all: Refactor to nonglobal Viper, i18n etc.

This is a final rewrite that removes all the global state in Hugo, which also enables
the use if `t.Parallel` in tests.

Updates #2701
Fixes #3016
This commit is contained in:
Bjørn Erik Pedersen 2017-02-05 10:20:06 +07:00
parent e34af6ee30
commit 93ca7c9e95
99 changed files with 2843 additions and 2458 deletions

View file

@ -54,7 +54,7 @@ func benchmark(cmd *cobra.Command, args []string) error {
return err return err
} }
c := commandeer{cfg} c := newCommandeer(cfg)
var memProf *os.File var memProf *os.File
if memProfileFile != "" { if memProfileFile != "" {

46
commands/commandeer.go Normal file
View file

@ -0,0 +1,46 @@
// Copyright 2017 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package commands
import (
"github.com/spf13/hugo/deps"
"github.com/spf13/hugo/helpers"
)
type commandeer struct {
*deps.DepsCfg
pathSpec *helpers.PathSpec
configured bool
}
func (c *commandeer) Set(key string, value interface{}) {
if c.configured {
panic("commandeer cannot be changed")
}
c.Cfg.Set(key, value)
}
// PathSpec lazily creates a new PathSpec, as all the paths must
// be configured before it is created.
func (c *commandeer) PathSpec() *helpers.PathSpec {
c.configured = true
if c.pathSpec == nil {
c.pathSpec = helpers.NewPathSpec(c.Fs, c.Cfg)
}
return c.pathSpec
}
func newCommandeer(cfg *deps.DepsCfg) *commandeer {
return &commandeer{DepsCfg: cfg}
}

View file

@ -21,11 +21,8 @@ import (
"github.com/spf13/cast" "github.com/spf13/cast"
"github.com/spf13/cobra" "github.com/spf13/cobra"
"github.com/spf13/hugo/helpers"
"github.com/spf13/hugo/hugolib" "github.com/spf13/hugo/hugolib"
"github.com/spf13/hugo/parser" "github.com/spf13/hugo/parser"
jww "github.com/spf13/jwalterweatherman"
"github.com/spf13/viper"
) )
var outputDir string var outputDir string
@ -86,7 +83,7 @@ func convertContents(mark rune) error {
return err return err
} }
h, err := hugolib.NewHugoSitesFromConfiguration(cfg) h, err := hugolib.NewHugoSites(*cfg)
if err != nil { if err != nil {
return err return err
} }
@ -104,10 +101,10 @@ func convertContents(mark rune) error {
return errors.New("No source files found") return errors.New("No source files found")
} }
contentDir := helpers.AbsPathify(viper.GetString("contentDir")) contentDir := site.PathSpec.AbsPathify(site.Cfg.GetString("contentDir"))
jww.FEEDBACK.Println("processing", len(site.Source.Files()), "content files") site.Log.FEEDBACK.Println("processing", len(site.Source.Files()), "content files")
for _, file := range site.Source.Files() { for _, file := range site.Source.Files() {
jww.INFO.Println("Attempting to convert", file.LogicalName()) site.Log.INFO.Println("Attempting to convert", file.LogicalName())
page, err := site.NewPage(file.LogicalName()) page, err := site.NewPage(file.LogicalName())
if err != nil { if err != nil {
return err return err
@ -115,12 +112,12 @@ func convertContents(mark rune) error {
psr, err := parser.ReadFrom(file.Contents) psr, err := parser.ReadFrom(file.Contents)
if err != nil { if err != nil {
jww.ERROR.Println("Error processing file:", file.Path()) site.Log.ERROR.Println("Error processing file:", file.Path())
return err return err
} }
metadata, err := psr.Metadata() metadata, err := psr.Metadata()
if err != nil { if err != nil {
jww.ERROR.Println("Error processing file:", file.Path()) site.Log.ERROR.Println("Error processing file:", file.Path())
return err return err
} }
@ -139,7 +136,7 @@ func convertContents(mark rune) error {
page.SetDir(filepath.Join(contentDir, file.Dir())) page.SetDir(filepath.Join(contentDir, file.Dir()))
page.SetSourceContent(psr.Content()) page.SetSourceContent(psr.Content())
if err = page.SetSourceMetaData(metadata, mark); err != nil { if err = page.SetSourceMetaData(metadata, mark); err != nil {
jww.ERROR.Printf("Failed to set source metadata for file %q: %s. For more info see For more info see https://github.com/spf13/hugo/issues/2458", page.FullFilePath(), err) site.Log.ERROR.Printf("Failed to set source metadata for file %q: %s. For more info see For more info see https://github.com/spf13/hugo/issues/2458", page.FullFilePath(), err)
continue continue
} }
@ -153,7 +150,7 @@ func convertContents(mark rune) error {
return fmt.Errorf("Failed to save file %q: %s", page.FullFilePath(), err) return fmt.Errorf("Failed to save file %q: %s", page.FullFilePath(), err)
} }
} else { } else {
jww.FEEDBACK.Println("Unsafe operation not allowed, use --unsafe or set a different output path") site.Log.FEEDBACK.Println("Unsafe operation not allowed, use --unsafe or set a different output path")
} }
} }
} }

View file

@ -27,8 +27,7 @@ import (
"sync" "sync"
"time" "time"
"github.com/spf13/hugo/tpl" "github.com/spf13/hugo/config"
"github.com/spf13/hugo/hugofs" "github.com/spf13/hugo/hugofs"
"github.com/spf13/hugo/parser" "github.com/spf13/hugo/parser"
@ -51,10 +50,6 @@ import (
"github.com/spf13/viper" "github.com/spf13/viper"
) )
type commandeer struct {
deps.DepsCfg
}
// Hugo represents the Hugo sites to build. This variable is exported as it // Hugo represents the Hugo sites to build. This variable is exported as it
// is used by at least one external library (the Hugo caddy plugin). We should // is used by at least one external library (the Hugo caddy plugin). We should
// provide a cleaner external API, but until then, this is it. // provide a cleaner external API, but until then, this is it.
@ -64,7 +59,6 @@ var Hugo *hugolib.HugoSites
// for benchmark testing etc. via the CLI commands. // for benchmark testing etc. via the CLI commands.
func Reset() error { func Reset() error {
Hugo = nil Hugo = nil
viper.Reset()
return nil return nil
} }
@ -124,10 +118,10 @@ Complete documentation is available at http://gohugo.io/.`,
return err return err
} }
c := commandeer{cfg} c := newCommandeer(cfg)
if buildWatch { if buildWatch {
viper.Set("disableLiveReload", true) cfg.Cfg.Set("disableLiveReload", true)
c.watchConfig() c.watchConfig()
} }
@ -148,16 +142,17 @@ var (
) )
var ( var (
baseURL string baseURL string
cacheDir string cacheDir string
contentDir string contentDir string
layoutDir string layoutDir string
cfgFile string cfgFile string
destination string destination string
logFile string logFile string
theme string theme string
themesDir string themesDir string
source string source string
logI18nWarnings bool
) )
// Execute adds all child commands to the root command HugoCmd and sets flags appropriately. // Execute adds all child commands to the root command HugoCmd and sets flags appropriately.
@ -242,7 +237,7 @@ func initHugoBuildCommonFlags(cmd *cobra.Command) {
cmd.Flags().BoolP("forceSyncStatic", "", false, "Copy all files when static is changed.") cmd.Flags().BoolP("forceSyncStatic", "", false, "Copy all files when static is changed.")
cmd.Flags().BoolP("noTimes", "", false, "Don't sync modification time of files") cmd.Flags().BoolP("noTimes", "", false, "Don't sync modification time of files")
cmd.Flags().BoolP("noChmod", "", false, "Don't sync permission mode of files") cmd.Flags().BoolP("noChmod", "", false, "Don't sync permission mode of files")
cmd.Flags().BoolVarP(&tpl.Logi18nWarnings, "i18n-warnings", "", false, "Print missing translations") cmd.Flags().BoolVarP(&logI18nWarnings, "i18n-warnings", "", false, "Print missing translations")
// Set bash-completion. // Set bash-completion.
// Each flag must first be defined before using the SetAnnotation() call. // Each flag must first be defined before using the SetAnnotation() call.
@ -275,39 +270,56 @@ func init() {
} }
// InitializeConfig initializes a config file with sensible default configuration flags. // InitializeConfig initializes a config file with sensible default configuration flags.
func InitializeConfig(subCmdVs ...*cobra.Command) (deps.DepsCfg, error) { func InitializeConfig(subCmdVs ...*cobra.Command) (*deps.DepsCfg, error) {
var cfg deps.DepsCfg var cfg *deps.DepsCfg = &deps.DepsCfg{}
if err := hugolib.LoadGlobalConfig(source, cfgFile); err != nil { // Init file systems. This may be changed at a later point.
osFs := hugofs.Os
config, err := hugolib.LoadConfig(osFs, source, cfgFile)
if err != nil {
return cfg, err return cfg, err
} }
cfg.Cfg = config
c := newCommandeer(cfg)
for _, cmdV := range append([]*cobra.Command{hugoCmdV}, subCmdVs...) { for _, cmdV := range append([]*cobra.Command{hugoCmdV}, subCmdVs...) {
initializeFlags(cmdV) c.initializeFlags(cmdV)
} }
logger, err := createLogger(cfg.Cfg)
if err != nil {
return cfg, err
}
cfg.Logger = logger
config.Set("logI18nWarnings", logI18nWarnings)
if baseURL != "" { if baseURL != "" {
if !strings.HasSuffix(baseURL, "/") { if !strings.HasSuffix(baseURL, "/") {
baseURL = baseURL + "/" baseURL = baseURL + "/"
} }
viper.Set("baseURL", baseURL) config.Set("baseURL", baseURL)
} }
if !viper.GetBool("relativeURLs") && viper.GetString("baseURL") == "" { if !config.GetBool("relativeURLs") && config.GetString("baseURL") == "" {
jww.ERROR.Println("No 'baseURL' set in configuration or as a flag. Features like page menus will not work without one.") cfg.Logger.ERROR.Println("No 'baseURL' set in configuration or as a flag. Features like page menus will not work without one.")
} }
if theme != "" { if theme != "" {
viper.Set("theme", theme) config.Set("theme", theme)
} }
if themesDir != "" { if themesDir != "" {
viper.Set("themesDir", themesDir) config.Set("themesDir", themesDir)
} }
if destination != "" { if destination != "" {
viper.Set("publishDir", destination) config.Set("publishDir", destination)
} }
var dir string var dir string
@ -316,24 +328,32 @@ func InitializeConfig(subCmdVs ...*cobra.Command) (deps.DepsCfg, error) {
} else { } else {
dir, _ = os.Getwd() dir, _ = os.Getwd()
} }
viper.Set("workingDir", dir) config.Set("workingDir", dir)
cfg.Fs = hugofs.NewFrom(osFs, config)
// Hugo writes the output to memory instead of the disk.
// This is only used for benchmark testing. Cause the content is only visible
// in memory.
if renderToMemory {
c.Fs.Destination = new(afero.MemMapFs)
// Rendering to memoryFS, publish to Root regardless of publishDir.
c.Set("publishDir", "/")
}
if contentDir != "" { if contentDir != "" {
viper.Set("contentDir", contentDir) config.Set("contentDir", contentDir)
} }
if layoutDir != "" { if layoutDir != "" {
viper.Set("layoutDir", layoutDir) config.Set("layoutDir", layoutDir)
} }
if cacheDir != "" { if cacheDir != "" {
viper.Set("cacheDir", cacheDir) config.Set("cacheDir", cacheDir)
} }
// Init file systems. This may be changed at a later point. cacheDir = config.GetString("cacheDir")
cfg.Fs = hugofs.NewDefault()
cacheDir = viper.GetString("cacheDir")
if cacheDir != "" { if cacheDir != "" {
if helpers.FilePathSeparator != cacheDir[len(cacheDir)-1:] { if helpers.FilePathSeparator != cacheDir[len(cacheDir)-1:] {
cacheDir = cacheDir + helpers.FilePathSeparator cacheDir = cacheDir + helpers.FilePathSeparator
@ -343,39 +363,32 @@ func InitializeConfig(subCmdVs ...*cobra.Command) (deps.DepsCfg, error) {
if !isDir { if !isDir {
mkdir(cacheDir) mkdir(cacheDir)
} }
viper.Set("cacheDir", cacheDir) config.Set("cacheDir", cacheDir)
} else { } else {
viper.Set("cacheDir", helpers.GetTempDir("hugo_cache", cfg.Fs.Source)) config.Set("cacheDir", helpers.GetTempDir("hugo_cache", cfg.Fs.Source))
} }
jww.INFO.Println("Using config file:", viper.ConfigFileUsed()) cfg.Logger.INFO.Println("Using config file:", viper.ConfigFileUsed())
themeDir := helpers.GetThemeDir() themeDir := c.PathSpec().GetThemeDir()
if themeDir != "" { if themeDir != "" {
if _, err := cfg.Fs.Source.Stat(themeDir); os.IsNotExist(err) { if _, err := cfg.Fs.Source.Stat(themeDir); os.IsNotExist(err) {
return cfg, newSystemError("Unable to find theme Directory:", themeDir) return cfg, newSystemError("Unable to find theme Directory:", themeDir)
} }
} }
themeVersionMismatch, minVersion := isThemeVsHugoVersionMismatch(cfg.Fs.Source) themeVersionMismatch, minVersion := c.isThemeVsHugoVersionMismatch()
if themeVersionMismatch { if themeVersionMismatch {
jww.ERROR.Printf("Current theme does not support Hugo version %s. Minimum version required is %s\n", cfg.Logger.ERROR.Printf("Current theme does not support Hugo version %s. Minimum version required is %s\n",
helpers.HugoReleaseVersion(), minVersion) helpers.HugoReleaseVersion(), minVersion)
} }
logger, err := createLogger()
if err != nil {
return cfg, err
}
cfg.Logger = logger
return cfg, nil return cfg, nil
} }
func createLogger() (*jww.Notepad, error) { func createLogger(cfg config.Provider) (*jww.Notepad, error) {
var ( var (
logHandle = ioutil.Discard logHandle = ioutil.Discard
outHandle = os.Stdout outHandle = os.Stdout
@ -383,11 +396,11 @@ func createLogger() (*jww.Notepad, error) {
logThreshold = jww.LevelWarn logThreshold = jww.LevelWarn
) )
if verboseLog || logging || (viper.IsSet("logFile") && viper.GetString("logFile") != "") { if verboseLog || logging || (cfg.GetString("logFile") != "") {
var err error var err error
if viper.IsSet("logFile") && viper.GetString("logFile") != "" { if cfg.GetString("logFile") != "" {
path := viper.GetString("logFile") path := cfg.GetString("logFile")
logHandle, err = os.OpenFile(path, os.O_RDWR|os.O_APPEND|os.O_CREATE, 0666) logHandle, err = os.OpenFile(path, os.O_RDWR|os.O_APPEND|os.O_CREATE, 0666)
if err != nil { if err != nil {
return nil, newSystemError("Failed to open log file:", path, err) return nil, newSystemError("Failed to open log file:", path, err)
@ -398,7 +411,7 @@ func createLogger() (*jww.Notepad, error) {
return nil, newSystemError(err) return nil, newSystemError(err)
} }
} }
} else if !quiet && viper.GetBool("verbose") { } else if !quiet && cfg.GetBool("verbose") {
stdoutThreshold = jww.LevelInfo stdoutThreshold = jww.LevelInfo
} }
@ -409,7 +422,7 @@ func createLogger() (*jww.Notepad, error) {
return jww.NewNotepad(stdoutThreshold, logThreshold, outHandle, logHandle, "", log.Ldate|log.Ltime), nil return jww.NewNotepad(stdoutThreshold, logThreshold, outHandle, logHandle, "", log.Ldate|log.Ltime), nil
} }
func initializeFlags(cmd *cobra.Command) { func (c *commandeer) initializeFlags(cmd *cobra.Command) {
persFlagKeys := []string{"verbose", "logFile"} persFlagKeys := []string{"verbose", "logFile"}
flagKeys := []string{ flagKeys := []string{
"cleanDestinationDir", "cleanDestinationDir",
@ -432,21 +445,21 @@ func initializeFlags(cmd *cobra.Command) {
} }
for _, key := range persFlagKeys { for _, key := range persFlagKeys {
setValueFromFlag(cmd.PersistentFlags(), key) c.setValueFromFlag(cmd.PersistentFlags(), key)
} }
for _, key := range flagKeys { for _, key := range flagKeys {
setValueFromFlag(cmd.Flags(), key) c.setValueFromFlag(cmd.Flags(), key)
} }
} }
func setValueFromFlag(flags *flag.FlagSet, key string) { func (c *commandeer) setValueFromFlag(flags *flag.FlagSet, key string) {
if flagChanged(flags, key) { if c.flagChanged(flags, key) {
f := flags.Lookup(key) f := flags.Lookup(key)
viper.Set(key, f.Value.String()) c.Set(key, f.Value.String())
} }
} }
func flagChanged(flags *flag.FlagSet, key string) bool { func (c *commandeer) flagChanged(flags *flag.FlagSet, key string) bool {
flag := flags.Lookup(key) flag := flags.Lookup(key)
if flag == nil { if flag == nil {
return false return false
@ -454,31 +467,23 @@ func flagChanged(flags *flag.FlagSet, key string) bool {
return flag.Changed return flag.Changed
} }
func (c commandeer) watchConfig() { func (c *commandeer) watchConfig() {
viper.WatchConfig() v := c.Cfg.(*viper.Viper)
viper.OnConfigChange(func(e fsnotify.Event) { v.WatchConfig()
jww.FEEDBACK.Println("Config file changed:", e.Name) v.OnConfigChange(func(e fsnotify.Event) {
c.Logger.FEEDBACK.Println("Config file changed:", e.Name)
// Force a full rebuild // Force a full rebuild
utils.CheckErr(c.recreateAndBuildSites(true)) utils.CheckErr(c.recreateAndBuildSites(true))
if !viper.GetBool("disableLiveReload") { if !c.Cfg.GetBool("disableLiveReload") {
// Will block forever trying to write to a channel that nobody is reading if livereload isn't initialized // Will block forever trying to write to a channel that nobody is reading if livereload isn't initialized
livereload.ForceRefresh() livereload.ForceRefresh()
} }
}) })
} }
func (c commandeer) build(watches ...bool) error { func (c *commandeer) build(watches ...bool) error {
// Hugo writes the output to memory instead of the disk.
// This is only used for benchmark testing. Cause the content is only visible
// in memory.
if renderToMemory {
c.Fs.Destination = new(afero.MemMapFs)
// Rendering to memoryFS, publish to Root regardless of publishDir.
viper.Set("publishDir", "/")
}
if err := c.copyStatic(); err != nil { if err := c.copyStatic(); err != nil {
return fmt.Errorf("Error copying static files to %s: %s", helpers.AbsPathify(viper.GetString("publishDir")), err) return fmt.Errorf("Error copying static files to %s: %s", c.PathSpec().AbsPathify(c.Cfg.GetString("publishDir")), err)
} }
watch := false watch := false
if len(watches) > 0 && watches[0] { if len(watches) > 0 && watches[0] {
@ -489,37 +494,35 @@ func (c commandeer) build(watches ...bool) error {
} }
if buildWatch { if buildWatch {
jww.FEEDBACK.Println("Watching for changes in", helpers.AbsPathify(viper.GetString("contentDir"))) c.Logger.FEEDBACK.Println("Watching for changes in", c.PathSpec().AbsPathify(c.Cfg.GetString("contentDir")))
jww.FEEDBACK.Println("Press Ctrl+C to stop") c.Logger.FEEDBACK.Println("Press Ctrl+C to stop")
utils.CheckErr(c.newWatcher(0)) utils.CheckErr(c.newWatcher(0))
} }
return nil return nil
} }
func (c commandeer) getStaticSourceFs() afero.Fs { func (c *commandeer) getStaticSourceFs() afero.Fs {
source := c.Fs.Source source := c.Fs.Source
pathSpec := helpers.NewPathSpec(c.Fs, viper.GetViper()) themeDir, err := c.PathSpec().GetThemeStaticDirPath()
themeDir, err := pathSpec.GetThemeStaticDirPath() staticDir := c.PathSpec().GetStaticDirPath() + helpers.FilePathSeparator
staticDir := helpers.GetStaticDirPath() + helpers.FilePathSeparator
useTheme := true useTheme := true
useStatic := true useStatic := true
if err != nil { if err != nil {
if err != helpers.ErrThemeUndefined { if err != helpers.ErrThemeUndefined {
jww.WARN.Println(err) c.Logger.WARN.Println(err)
} }
useTheme = false useTheme = false
} else { } else {
if _, err := source.Stat(themeDir); os.IsNotExist(err) { if _, err := source.Stat(themeDir); os.IsNotExist(err) {
jww.WARN.Println("Unable to find Theme Static Directory:", themeDir) c.Logger.WARN.Println("Unable to find Theme Static Directory:", themeDir)
useTheme = false useTheme = false
} }
} }
if _, err := source.Stat(staticDir); os.IsNotExist(err) { if _, err := source.Stat(staticDir); os.IsNotExist(err) {
jww.WARN.Println("Unable to find Static Directory:", staticDir) c.Logger.WARN.Println("Unable to find Static Directory:", staticDir)
useStatic = false useStatic = false
} }
@ -528,25 +531,25 @@ func (c commandeer) getStaticSourceFs() afero.Fs {
} }
if !useStatic { if !useStatic {
jww.INFO.Println(themeDir, "is the only static directory available to sync from") c.Logger.INFO.Println(themeDir, "is the only static directory available to sync from")
return afero.NewReadOnlyFs(afero.NewBasePathFs(source, themeDir)) return afero.NewReadOnlyFs(afero.NewBasePathFs(source, themeDir))
} }
if !useTheme { if !useTheme {
jww.INFO.Println(staticDir, "is the only static directory available to sync from") c.Logger.INFO.Println(staticDir, "is the only static directory available to sync from")
return afero.NewReadOnlyFs(afero.NewBasePathFs(source, staticDir)) return afero.NewReadOnlyFs(afero.NewBasePathFs(source, staticDir))
} }
jww.INFO.Println("using a UnionFS for static directory comprised of:") c.Logger.INFO.Println("using a UnionFS for static directory comprised of:")
jww.INFO.Println("Base:", themeDir) c.Logger.INFO.Println("Base:", themeDir)
jww.INFO.Println("Overlay:", staticDir) c.Logger.INFO.Println("Overlay:", staticDir)
base := afero.NewReadOnlyFs(afero.NewBasePathFs(source, themeDir)) base := afero.NewReadOnlyFs(afero.NewBasePathFs(source, themeDir))
overlay := afero.NewReadOnlyFs(afero.NewBasePathFs(source, staticDir)) overlay := afero.NewReadOnlyFs(afero.NewBasePathFs(source, staticDir))
return afero.NewCopyOnWriteFs(base, overlay) return afero.NewCopyOnWriteFs(base, overlay)
} }
func (c commandeer) copyStatic() error { func (c *commandeer) copyStatic() error {
publishDir := helpers.AbsPathify(viper.GetString("publishDir")) + helpers.FilePathSeparator publishDir := c.PathSpec().AbsPathify(c.Cfg.GetString("publishDir")) + helpers.FilePathSeparator
// If root, remove the second '/' // If root, remove the second '/'
if publishDir == "//" { if publishDir == "//" {
@ -557,22 +560,22 @@ func (c commandeer) copyStatic() error {
staticSourceFs := c.getStaticSourceFs() staticSourceFs := c.getStaticSourceFs()
if staticSourceFs == nil { if staticSourceFs == nil {
jww.WARN.Println("No static directories found to sync") c.Logger.WARN.Println("No static directories found to sync")
return nil return nil
} }
syncer := fsync.NewSyncer() syncer := fsync.NewSyncer()
syncer.NoTimes = viper.GetBool("noTimes") syncer.NoTimes = c.Cfg.GetBool("noTimes")
syncer.NoChmod = viper.GetBool("noChmod") syncer.NoChmod = c.Cfg.GetBool("noChmod")
syncer.SrcFs = staticSourceFs syncer.SrcFs = staticSourceFs
syncer.DestFs = c.Fs.Destination syncer.DestFs = c.Fs.Destination
// Now that we are using a unionFs for the static directories // Now that we are using a unionFs for the static directories
// We can effectively clean the publishDir on initial sync // We can effectively clean the publishDir on initial sync
syncer.Delete = viper.GetBool("cleanDestinationDir") syncer.Delete = c.Cfg.GetBool("cleanDestinationDir")
if syncer.Delete { if syncer.Delete {
jww.INFO.Println("removing all files from destination that don't exist in static dirs") c.Logger.INFO.Println("removing all files from destination that don't exist in static dirs")
} }
jww.INFO.Println("syncing static files to", publishDir) c.Logger.INFO.Println("syncing static files to", publishDir)
// because we are using a baseFs (to get the union right). // because we are using a baseFs (to get the union right).
// set sync src to root // set sync src to root
@ -580,37 +583,37 @@ func (c commandeer) copyStatic() error {
} }
// getDirList provides NewWatcher() with a list of directories to watch for changes. // getDirList provides NewWatcher() with a list of directories to watch for changes.
func (c commandeer) getDirList() []string { func (c *commandeer) getDirList() []string {
var a []string var a []string
dataDir := helpers.AbsPathify(viper.GetString("dataDir")) dataDir := c.PathSpec().AbsPathify(c.Cfg.GetString("dataDir"))
i18nDir := helpers.AbsPathify(viper.GetString("i18nDir")) i18nDir := c.PathSpec().AbsPathify(c.Cfg.GetString("i18nDir"))
layoutDir := helpers.AbsPathify(viper.GetString("layoutDir")) layoutDir := c.PathSpec().AbsPathify(c.Cfg.GetString("layoutDir"))
staticDir := helpers.AbsPathify(viper.GetString("staticDir")) staticDir := c.PathSpec().AbsPathify(c.Cfg.GetString("staticDir"))
var themesDir string var themesDir string
if helpers.ThemeSet() { if c.PathSpec().ThemeSet() {
themesDir = helpers.AbsPathify(viper.GetString("themesDir") + "/" + viper.GetString("theme")) themesDir = c.PathSpec().AbsPathify(c.Cfg.GetString("themesDir") + "/" + c.Cfg.GetString("theme"))
} }
walker := func(path string, fi os.FileInfo, err error) error { walker := func(path string, fi os.FileInfo, err error) error {
if err != nil { if err != nil {
if path == dataDir && os.IsNotExist(err) { if path == dataDir && os.IsNotExist(err) {
jww.WARN.Println("Skip dataDir:", err) c.Logger.WARN.Println("Skip dataDir:", err)
return nil return nil
} }
if path == i18nDir && os.IsNotExist(err) { if path == i18nDir && os.IsNotExist(err) {
jww.WARN.Println("Skip i18nDir:", err) c.Logger.WARN.Println("Skip i18nDir:", err)
return nil return nil
} }
if path == layoutDir && os.IsNotExist(err) { if path == layoutDir && os.IsNotExist(err) {
jww.WARN.Println("Skip layoutDir:", err) c.Logger.WARN.Println("Skip layoutDir:", err)
return nil return nil
} }
if path == staticDir && os.IsNotExist(err) { if path == staticDir && os.IsNotExist(err) {
jww.WARN.Println("Skip staticDir:", err) c.Logger.WARN.Println("Skip staticDir:", err)
return nil return nil
} }
@ -619,23 +622,23 @@ func (c commandeer) getDirList() []string {
return nil return nil
} }
jww.ERROR.Println("Walker: ", err) c.Logger.ERROR.Println("Walker: ", err)
return nil return nil
} }
if fi.Mode()&os.ModeSymlink == os.ModeSymlink { if fi.Mode()&os.ModeSymlink == os.ModeSymlink {
link, err := filepath.EvalSymlinks(path) link, err := filepath.EvalSymlinks(path)
if err != nil { if err != nil {
jww.ERROR.Printf("Cannot read symbolic link '%s', error was: %s", path, err) c.Logger.ERROR.Printf("Cannot read symbolic link '%s', error was: %s", path, err)
return nil return nil
} }
linkfi, err := c.Fs.Source.Stat(link) linkfi, err := c.Fs.Source.Stat(link)
if err != nil { if err != nil {
jww.ERROR.Printf("Cannot stat '%s', error was: %s", link, err) c.Logger.ERROR.Printf("Cannot stat '%s', error was: %s", link, err)
return nil return nil
} }
if !linkfi.Mode().IsRegular() { if !linkfi.Mode().IsRegular() {
jww.ERROR.Printf("Symbolic links for directories not supported, skipping '%s'", path) c.Logger.ERROR.Printf("Symbolic links for directories not supported, skipping '%s'", path)
} }
return nil return nil
} }
@ -651,12 +654,12 @@ func (c commandeer) getDirList() []string {
} }
helpers.SymbolicWalk(c.Fs.Source, dataDir, walker) helpers.SymbolicWalk(c.Fs.Source, dataDir, walker)
helpers.SymbolicWalk(c.Fs.Source, helpers.AbsPathify(viper.GetString("contentDir")), walker) helpers.SymbolicWalk(c.Fs.Source, c.PathSpec().AbsPathify(c.Cfg.GetString("contentDir")), walker)
helpers.SymbolicWalk(c.Fs.Source, i18nDir, walker) helpers.SymbolicWalk(c.Fs.Source, i18nDir, walker)
helpers.SymbolicWalk(c.Fs.Source, helpers.AbsPathify(viper.GetString("layoutDir")), walker) helpers.SymbolicWalk(c.Fs.Source, c.PathSpec().AbsPathify(c.Cfg.GetString("layoutDir")), walker)
helpers.SymbolicWalk(c.Fs.Source, staticDir, walker) helpers.SymbolicWalk(c.Fs.Source, staticDir, walker)
if helpers.ThemeSet() { if c.PathSpec().ThemeSet() {
helpers.SymbolicWalk(c.Fs.Source, filepath.Join(themesDir, "layouts"), walker) helpers.SymbolicWalk(c.Fs.Source, filepath.Join(themesDir, "layouts"), walker)
helpers.SymbolicWalk(c.Fs.Source, filepath.Join(themesDir, "static"), walker) helpers.SymbolicWalk(c.Fs.Source, filepath.Join(themesDir, "static"), walker)
helpers.SymbolicWalk(c.Fs.Source, filepath.Join(themesDir, "i18n"), walker) helpers.SymbolicWalk(c.Fs.Source, filepath.Join(themesDir, "i18n"), walker)
@ -667,32 +670,31 @@ func (c commandeer) getDirList() []string {
return a return a
} }
func (c commandeer) recreateAndBuildSites(watching bool) (err error) { func (c *commandeer) recreateAndBuildSites(watching bool) (err error) {
if err := c.initSites(); err != nil { if err := c.initSites(); err != nil {
return err return err
} }
if !quiet { if !quiet {
jww.FEEDBACK.Println("Started building sites ...") c.Logger.FEEDBACK.Println("Started building sites ...")
} }
return Hugo.Build(hugolib.BuildCfg{CreateSitesFromConfig: true, Watching: watching, PrintStats: !quiet}) return Hugo.Build(hugolib.BuildCfg{CreateSitesFromConfig: true, Watching: watching, PrintStats: !quiet})
} }
func (c commandeer) resetAndBuildSites(watching bool) (err error) { func (c *commandeer) resetAndBuildSites(watching bool) (err error) {
if err = c.initSites(); err != nil { if err = c.initSites(); err != nil {
return return
} }
if !quiet { if !quiet {
jww.FEEDBACK.Println("Started building sites ...") c.Logger.FEEDBACK.Println("Started building sites ...")
} }
return Hugo.Build(hugolib.BuildCfg{ResetState: true, Watching: watching, PrintStats: !quiet}) return Hugo.Build(hugolib.BuildCfg{ResetState: true, Watching: watching, PrintStats: !quiet})
} }
func (c commandeer) initSites() error { func (c *commandeer) initSites() error {
if Hugo != nil { if Hugo != nil {
return nil return nil
} }
h, err := hugolib.NewHugoSites(*c.DepsCfg)
h, err := hugolib.NewHugoSitesFromConfiguration(c.DepsCfg)
if err != nil { if err != nil {
return err return err
@ -702,17 +704,17 @@ func (c commandeer) initSites() error {
return nil return nil
} }
func (c commandeer) buildSites(watching bool) (err error) { func (c *commandeer) buildSites(watching bool) (err error) {
if err := c.initSites(); err != nil { if err := c.initSites(); err != nil {
return err return err
} }
if !quiet { if !quiet {
jww.FEEDBACK.Println("Started building sites ...") c.Logger.FEEDBACK.Println("Started building sites ...")
} }
return Hugo.Build(hugolib.BuildCfg{Watching: watching, PrintStats: !quiet}) return Hugo.Build(hugolib.BuildCfg{Watching: watching, PrintStats: !quiet})
} }
func (c commandeer) rebuildSites(events []fsnotify.Event) error { func (c *commandeer) rebuildSites(events []fsnotify.Event) error {
if err := c.initSites(); err != nil { if err := c.initSites(); err != nil {
return err return err
} }
@ -720,13 +722,11 @@ func (c commandeer) rebuildSites(events []fsnotify.Event) error {
} }
// newWatcher creates a new watcher to watch filesystem events. // newWatcher creates a new watcher to watch filesystem events.
func (c commandeer) newWatcher(port int) error { func (c *commandeer) newWatcher(port int) error {
if runtime.GOOS == "darwin" { if runtime.GOOS == "darwin" {
tweakLimit() tweakLimit()
} }
pathSpec := helpers.NewPathSpec(c.Fs, viper.GetViper())
watcher, err := watcher.New(1 * time.Second) watcher, err := watcher.New(1 * time.Second)
var wg sync.WaitGroup var wg sync.WaitGroup
@ -748,7 +748,7 @@ func (c commandeer) newWatcher(port int) error {
for { for {
select { select {
case evs := <-watcher.Events: case evs := <-watcher.Events:
jww.INFO.Println("Received System Events:", evs) c.Logger.INFO.Println("Received System Events:", evs)
staticEvents := []fsnotify.Event{} staticEvents := []fsnotify.Event{}
dynamicEvents := []fsnotify.Event{} dynamicEvents := []fsnotify.Event{}
@ -794,7 +794,7 @@ func (c commandeer) newWatcher(port int) error {
walkAdder := func(path string, f os.FileInfo, err error) error { walkAdder := func(path string, f os.FileInfo, err error) error {
if f.IsDir() { if f.IsDir() {
jww.FEEDBACK.Println("adding created directory to watchlist", path) c.Logger.FEEDBACK.Println("adding created directory to watchlist", path)
watcher.Add(path) watcher.Add(path)
} }
return nil return nil
@ -808,7 +808,7 @@ func (c commandeer) newWatcher(port int) error {
} }
} }
isstatic := strings.HasPrefix(ev.Name, helpers.GetStaticDirPath()) || (len(pathSpec.GetThemesDirPath()) > 0 && strings.HasPrefix(ev.Name, pathSpec.GetThemesDirPath())) isstatic := strings.HasPrefix(ev.Name, c.PathSpec().GetStaticDirPath()) || (len(c.PathSpec().GetThemesDirPath()) > 0 && strings.HasPrefix(ev.Name, c.PathSpec().GetThemesDirPath()))
if isstatic { if isstatic {
staticEvents = append(staticEvents, ev) staticEvents = append(staticEvents, ev)
@ -818,19 +818,19 @@ func (c commandeer) newWatcher(port int) error {
} }
if len(staticEvents) > 0 { if len(staticEvents) > 0 {
publishDir := helpers.AbsPathify(viper.GetString("publishDir")) + helpers.FilePathSeparator publishDir := c.PathSpec().AbsPathify(c.Cfg.GetString("publishDir")) + helpers.FilePathSeparator
// If root, remove the second '/' // If root, remove the second '/'
if publishDir == "//" { if publishDir == "//" {
publishDir = helpers.FilePathSeparator publishDir = helpers.FilePathSeparator
} }
jww.FEEDBACK.Println("\nStatic file changes detected") c.Logger.FEEDBACK.Println("\nStatic file changes detected")
const layout = "2006-01-02 15:04 -0700" const layout = "2006-01-02 15:04 -0700"
jww.FEEDBACK.Println(time.Now().Format(layout)) c.Logger.FEEDBACK.Println(time.Now().Format(layout))
if viper.GetBool("forceSyncStatic") { if c.Cfg.GetBool("forceSyncStatic") {
jww.FEEDBACK.Printf("Syncing all static files\n") c.Logger.FEEDBACK.Printf("Syncing all static files\n")
err := c.copyStatic() err := c.copyStatic()
if err != nil { if err != nil {
utils.StopOnErr(err, fmt.Sprintf("Error copying static files to %s", publishDir)) utils.StopOnErr(err, fmt.Sprintf("Error copying static files to %s", publishDir))
@ -839,13 +839,13 @@ func (c commandeer) newWatcher(port int) error {
staticSourceFs := c.getStaticSourceFs() staticSourceFs := c.getStaticSourceFs()
if staticSourceFs == nil { if staticSourceFs == nil {
jww.WARN.Println("No static directories found to sync") c.Logger.WARN.Println("No static directories found to sync")
return return
} }
syncer := fsync.NewSyncer() syncer := fsync.NewSyncer()
syncer.NoTimes = viper.GetBool("noTimes") syncer.NoTimes = c.Cfg.GetBool("noTimes")
syncer.NoChmod = viper.GetBool("noChmod") syncer.NoChmod = c.Cfg.GetBool("noChmod")
syncer.SrcFs = staticSourceFs syncer.SrcFs = staticSourceFs
syncer.DestFs = c.Fs.Destination syncer.DestFs = c.Fs.Destination
@ -872,9 +872,9 @@ func (c commandeer) newWatcher(port int) error {
fromPath := ev.Name fromPath := ev.Name
// If we are here we already know the event took place in a static dir // If we are here we already know the event took place in a static dir
relPath, err := pathSpec.MakeStaticPathRelative(fromPath) relPath, err := c.PathSpec().MakeStaticPathRelative(fromPath)
if err != nil { if err != nil {
jww.ERROR.Println(err) c.Logger.ERROR.Println(err)
continue continue
} }
@ -897,10 +897,10 @@ func (c commandeer) newWatcher(port int) error {
// If file still exists, sync it // If file still exists, sync it
logger.Println("Syncing", relPath, "to", publishDir) logger.Println("Syncing", relPath, "to", publishDir)
if err := syncer.Sync(filepath.Join(publishDir, relPath), relPath); err != nil { if err := syncer.Sync(filepath.Join(publishDir, relPath), relPath); err != nil {
jww.ERROR.Println(err) c.Logger.ERROR.Println(err)
} }
} else { } else {
jww.ERROR.Println(err) c.Logger.ERROR.Println(err)
} }
continue continue
@ -909,18 +909,18 @@ func (c commandeer) newWatcher(port int) error {
// For all other event operations Hugo will sync static. // For all other event operations Hugo will sync static.
logger.Println("Syncing", relPath, "to", publishDir) logger.Println("Syncing", relPath, "to", publishDir)
if err := syncer.Sync(filepath.Join(publishDir, relPath), relPath); err != nil { if err := syncer.Sync(filepath.Join(publishDir, relPath), relPath); err != nil {
jww.ERROR.Println(err) c.Logger.ERROR.Println(err)
} }
} }
} }
if !buildWatch && !viper.GetBool("disableLiveReload") { if !buildWatch && !c.Cfg.GetBool("disableLiveReload") {
// Will block forever trying to write to a channel that nobody is reading if livereload isn't initialized // Will block forever trying to write to a channel that nobody is reading if livereload isn't initialized
// force refresh when more than one file // force refresh when more than one file
if len(staticEvents) > 0 { if len(staticEvents) > 0 {
for _, ev := range staticEvents { for _, ev := range staticEvents {
path, _ := pathSpec.MakeStaticPathRelative(ev.Name) path, _ := c.PathSpec().MakeStaticPathRelative(ev.Name)
livereload.RefreshPath(path) livereload.RefreshPath(path)
} }
@ -931,27 +931,27 @@ func (c commandeer) newWatcher(port int) error {
} }
if len(dynamicEvents) > 0 { if len(dynamicEvents) > 0 {
jww.FEEDBACK.Println("\nChange detected, rebuilding site") c.Logger.FEEDBACK.Println("\nChange detected, rebuilding site")
const layout = "2006-01-02 15:04 -0700" const layout = "2006-01-02 15:04 -0700"
jww.FEEDBACK.Println(time.Now().Format(layout)) c.Logger.FEEDBACK.Println(time.Now().Format(layout))
c.rebuildSites(dynamicEvents) c.rebuildSites(dynamicEvents)
if !buildWatch && !viper.GetBool("disableLiveReload") { if !buildWatch && !c.Cfg.GetBool("disableLiveReload") {
// Will block forever trying to write to a channel that nobody is reading if livereload isn't initialized // Will block forever trying to write to a channel that nobody is reading if livereload isn't initialized
livereload.ForceRefresh() livereload.ForceRefresh()
} }
} }
case err := <-watcher.Errors: case err := <-watcher.Errors:
if err != nil { if err != nil {
jww.ERROR.Println(err) c.Logger.ERROR.Println(err)
} }
} }
} }
}() }()
if port > 0 { if port > 0 {
if !viper.GetBool("disableLiveReload") { if !c.Cfg.GetBool("disableLiveReload") {
livereload.Initialize() livereload.Initialize()
http.HandleFunc("/livereload.js", livereload.ServeJS) http.HandleFunc("/livereload.js", livereload.ServeJS)
http.HandleFunc("/livereload", livereload.Handler) http.HandleFunc("/livereload", livereload.Handler)
@ -966,30 +966,30 @@ func (c commandeer) newWatcher(port int) error {
// isThemeVsHugoVersionMismatch returns whether the current Hugo version is // isThemeVsHugoVersionMismatch returns whether the current Hugo version is
// less than the theme's min_version. // less than the theme's min_version.
func isThemeVsHugoVersionMismatch(fs afero.Fs) (mismatch bool, requiredMinVersion string) { func (c *commandeer) isThemeVsHugoVersionMismatch() (mismatch bool, requiredMinVersion string) {
if !helpers.ThemeSet() { if !c.PathSpec().ThemeSet() {
return return
} }
themeDir := helpers.GetThemeDir() themeDir := c.PathSpec().GetThemeDir()
path := filepath.Join(themeDir, "theme.toml") path := filepath.Join(themeDir, "theme.toml")
exists, err := helpers.Exists(path, fs) exists, err := helpers.Exists(path, c.Fs.Source)
if err != nil || !exists { if err != nil || !exists {
return return
} }
b, err := afero.ReadFile(fs, path) b, err := afero.ReadFile(c.Fs.Source, path)
c, err := parser.HandleTOMLMetaData(b) tomlMeta, err := parser.HandleTOMLMetaData(b)
if err != nil { if err != nil {
return return
} }
config := c.(map[string]interface{}) config := tomlMeta.(map[string]interface{})
if minVersion, ok := config["min_version"]; ok { if minVersion, ok := config["min_version"]; ok {
switch minVersion.(type) { switch minVersion.(type) {

View file

@ -19,7 +19,6 @@ import (
"github.com/spf13/cobra" "github.com/spf13/cobra"
"github.com/spf13/hugo/hugolib" "github.com/spf13/hugo/hugolib"
jww "github.com/spf13/jwalterweatherman" jww "github.com/spf13/jwalterweatherman"
"github.com/spf13/viper"
) )
func init() { func init() {
@ -50,9 +49,11 @@ var listDraftsCmd = &cobra.Command{
return err return err
} }
viper.Set("buildDrafts", true) c := newCommandeer(cfg)
sites, err := hugolib.NewHugoSitesFromConfiguration(cfg) c.Set("buildDrafts", true)
sites, err := hugolib.NewHugoSites(*cfg)
if err != nil { if err != nil {
return newSystemError("Error creating sites", err) return newSystemError("Error creating sites", err)
@ -86,9 +87,11 @@ posted in the future.`,
return err return err
} }
viper.Set("buildFuture", true) c := newCommandeer(cfg)
sites, err := hugolib.NewHugoSitesFromConfiguration(cfg) c.Set("buildFuture", true)
sites, err := hugolib.NewHugoSites(*cfg)
if err != nil { if err != nil {
return newSystemError("Error creating sites", err) return newSystemError("Error creating sites", err)
@ -122,9 +125,11 @@ expired.`,
return err return err
} }
viper.Set("buildExpired", true) c := newCommandeer(cfg)
sites, err := hugolib.NewHugoSitesFromConfiguration(cfg) c.Set("buildExpired", true)
sites, err := hugolib.NewHugoSites(*cfg)
if err != nil { if err != nil {
return newSystemError("Error creating sites", err) return newSystemError("Error creating sites", err)

View file

@ -29,15 +29,17 @@ var configCmd = &cobra.Command{
} }
func init() { func init() {
configCmd.RunE = config configCmd.RunE = printConfig
} }
func config(cmd *cobra.Command, args []string) error { func printConfig(cmd *cobra.Command, args []string) error {
if _, err := InitializeConfig(configCmd); err != nil { cfg, err := InitializeConfig(configCmd)
if err != nil {
return err return err
} }
allSettings := viper.AllSettings() allSettings := cfg.Cfg.(*viper.Viper).AllSettings()
var separator string var separator string
if allSettings["metadataformat"] == "toml" { if allSettings["metadataformat"] == "toml" {

View file

@ -93,12 +93,14 @@ func NewContent(cmd *cobra.Command, args []string) error {
return err return err
} }
if flagChanged(cmd.Flags(), "format") { c := newCommandeer(cfg)
viper.Set("metaDataFormat", configFormat)
if c.flagChanged(cmd.Flags(), "format") {
c.Set("metaDataFormat", configFormat)
} }
if flagChanged(cmd.Flags(), "editor") { if c.flagChanged(cmd.Flags(), "editor") {
viper.Set("newContentEditor", contentEditor) c.Set("newContentEditor", contentEditor)
} }
if len(args) < 1 { if len(args) < 1 {
@ -115,7 +117,7 @@ func NewContent(cmd *cobra.Command, args []string) error {
kind = contentType kind = contentType
} }
s, err := hugolib.NewSite(cfg) s, err := hugolib.NewSite(*cfg)
if err != nil { if err != nil {
return newSystemError(err) return newSystemError(err)
@ -203,7 +205,7 @@ func NewSite(cmd *cobra.Command, args []string) error {
forceNew, _ := cmd.Flags().GetBool("force") forceNew, _ := cmd.Flags().GetBool("force")
return doNewSite(hugofs.NewDefault(), createpath, forceNew) return doNewSite(hugofs.NewDefault(viper.New()), createpath, forceNew)
} }
// NewTheme creates a new Hugo theme. // NewTheme creates a new Hugo theme.
@ -215,11 +217,12 @@ func NewTheme(cmd *cobra.Command, args []string) error {
} }
if len(args) < 1 { if len(args) < 1 {
return newUserError("theme name needs to be provided") return newUserError("theme name needs to be provided")
} }
createpath := helpers.AbsPathify(filepath.Join(viper.GetString("themesDir"), args[0])) c := newCommandeer(cfg)
createpath := c.PathSpec().AbsPathify(filepath.Join(c.Cfg.GetString("themesDir"), args[0]))
jww.INFO.Println("creating theme at", createpath) jww.INFO.Println("creating theme at", createpath)
if x, _ := helpers.Exists(createpath, cfg.Fs.Source); x { if x, _ := helpers.Exists(createpath, cfg.Fs.Source); x {

View file

@ -18,6 +18,7 @@ import (
"testing" "testing"
"github.com/spf13/hugo/hugofs" "github.com/spf13/hugo/hugofs"
"github.com/spf13/viper"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
@ -48,7 +49,7 @@ func checkNewSiteInited(fs *hugofs.Fs, basepath string, t *testing.T) {
func TestDoNewSite(t *testing.T) { func TestDoNewSite(t *testing.T) {
basepath := filepath.Join("base", "blog") basepath := filepath.Join("base", "blog")
fs := hugofs.NewMem() _, fs := newTestCfg()
require.NoError(t, doNewSite(fs, basepath, false)) require.NoError(t, doNewSite(fs, basepath, false))
@ -57,7 +58,7 @@ func TestDoNewSite(t *testing.T) {
func TestDoNewSite_noerror_base_exists_but_empty(t *testing.T) { func TestDoNewSite_noerror_base_exists_but_empty(t *testing.T) {
basepath := filepath.Join("base", "blog") basepath := filepath.Join("base", "blog")
fs := hugofs.NewMem() _, fs := newTestCfg()
require.NoError(t, fs.Source.MkdirAll(basepath, 777)) require.NoError(t, fs.Source.MkdirAll(basepath, 777))
@ -66,7 +67,7 @@ func TestDoNewSite_noerror_base_exists_but_empty(t *testing.T) {
func TestDoNewSite_error_base_exists(t *testing.T) { func TestDoNewSite_error_base_exists(t *testing.T) {
basepath := filepath.Join("base", "blog") basepath := filepath.Join("base", "blog")
fs := hugofs.NewMem() _, fs := newTestCfg()
require.NoError(t, fs.Source.MkdirAll(basepath, 777)) require.NoError(t, fs.Source.MkdirAll(basepath, 777))
_, err := fs.Source.Create(filepath.Join(basepath, "foo")) _, err := fs.Source.Create(filepath.Join(basepath, "foo"))
@ -78,7 +79,7 @@ func TestDoNewSite_error_base_exists(t *testing.T) {
func TestDoNewSite_force_empty_dir(t *testing.T) { func TestDoNewSite_force_empty_dir(t *testing.T) {
basepath := filepath.Join("base", "blog") basepath := filepath.Join("base", "blog")
fs := hugofs.NewMem() _, fs := newTestCfg()
require.NoError(t, fs.Source.MkdirAll(basepath, 777)) require.NoError(t, fs.Source.MkdirAll(basepath, 777))
@ -89,7 +90,7 @@ func TestDoNewSite_force_empty_dir(t *testing.T) {
func TestDoNewSite_error_force_dir_inside_exists(t *testing.T) { func TestDoNewSite_error_force_dir_inside_exists(t *testing.T) {
basepath := filepath.Join("base", "blog") basepath := filepath.Join("base", "blog")
fs := hugofs.NewMem() _, fs := newTestCfg()
contentPath := filepath.Join(basepath, "content") contentPath := filepath.Join(basepath, "content")
@ -99,7 +100,7 @@ func TestDoNewSite_error_force_dir_inside_exists(t *testing.T) {
func TestDoNewSite_error_force_config_inside_exists(t *testing.T) { func TestDoNewSite_error_force_config_inside_exists(t *testing.T) {
basepath := filepath.Join("base", "blog") basepath := filepath.Join("base", "blog")
fs := hugofs.NewMem() _, fs := newTestCfg()
configPath := filepath.Join(basepath, "config.toml") configPath := filepath.Join(basepath, "config.toml")
require.NoError(t, fs.Source.MkdirAll(basepath, 777)) require.NoError(t, fs.Source.MkdirAll(basepath, 777))
@ -108,3 +109,14 @@ func TestDoNewSite_error_force_config_inside_exists(t *testing.T) {
require.Error(t, doNewSite(fs, basepath, true)) require.Error(t, doNewSite(fs, basepath, true))
} }
func newTestCfg() (*viper.Viper, *hugofs.Fs) {
v := viper.New()
fs := hugofs.NewMem(v)
v.SetFs(fs.Source)
return v, fs
}

View file

@ -28,9 +28,9 @@ import (
"github.com/spf13/afero" "github.com/spf13/afero"
"github.com/spf13/cobra" "github.com/spf13/cobra"
"github.com/spf13/hugo/config"
"github.com/spf13/hugo/helpers" "github.com/spf13/hugo/helpers"
jww "github.com/spf13/jwalterweatherman" jww "github.com/spf13/jwalterweatherman"
"github.com/spf13/viper"
) )
var ( var (
@ -42,8 +42,6 @@ var (
serverWatch bool serverWatch bool
) )
//var serverCmdV *cobra.Command
var serverCmd = &cobra.Command{ var serverCmd = &cobra.Command{
Use: "server", Use: "server",
Aliases: []string{"serve"}, Aliases: []string{"serve"},
@ -108,17 +106,17 @@ func server(cmd *cobra.Command, args []string) error {
return err return err
} }
c := commandeer{cfg} c := newCommandeer(cfg)
if flagChanged(cmd.Flags(), "disableLiveReload") { if c.flagChanged(cmd.Flags(), "disableLiveReload") {
viper.Set("disableLiveReload", disableLiveReload) c.Set("disableLiveReload", disableLiveReload)
} }
if serverWatch { if serverWatch {
viper.Set("watch", true) c.Set("watch", true)
} }
if viper.GetBool("watch") { if c.Cfg.GetBool("watch") {
serverWatch = true serverWatch = true
c.watchConfig() c.watchConfig()
} }
@ -127,7 +125,7 @@ func server(cmd *cobra.Command, args []string) error {
if err == nil { if err == nil {
l.Close() l.Close()
} else { } else {
if flagChanged(serverCmd.Flags(), "port") { if c.flagChanged(serverCmd.Flags(), "port") {
// port set explicitly by user -- he/she probably meant it! // port set explicitly by user -- he/she probably meant it!
return newSystemErrorF("Server startup failed: %s", err) return newSystemErrorF("Server startup failed: %s", err)
} }
@ -139,13 +137,13 @@ func server(cmd *cobra.Command, args []string) error {
serverPort = sp.Port serverPort = sp.Port
} }
viper.Set("port", serverPort) c.Set("port", serverPort)
baseURL, err = fixURL(baseURL) baseURL, err = fixURL(c.Cfg, baseURL)
if err != nil { if err != nil {
return err return err
} }
viper.Set("baseURL", baseURL) c.Set("baseURL", baseURL)
if err := memStats(); err != nil { if err := memStats(); err != nil {
jww.ERROR.Println("memstats error:", err) jww.ERROR.Println("memstats error:", err)
@ -160,7 +158,7 @@ func server(cmd *cobra.Command, args []string) error {
if !renderToDisk { if !renderToDisk {
cfg.Fs.Destination = new(afero.MemMapFs) cfg.Fs.Destination = new(afero.MemMapFs)
// Rendering to memoryFS, publish to Root regardless of publishDir. // Rendering to memoryFS, publish to Root regardless of publishDir.
viper.Set("publishDir", "/") c.Set("publishDir", "/")
} }
if err := c.build(serverWatch); err != nil { if err := c.build(serverWatch); err != nil {
@ -170,7 +168,7 @@ func server(cmd *cobra.Command, args []string) error {
// Watch runs its own server as part of the routine // Watch runs its own server as part of the routine
if serverWatch { if serverWatch {
watchDirs := c.getDirList() watchDirs := c.getDirList()
baseWatchDir := viper.GetString("workingDir") baseWatchDir := c.Cfg.GetString("workingDir")
for i, dir := range watchDirs { for i, dir := range watchDirs {
watchDirs[i], _ = helpers.GetRelativePath(dir, baseWatchDir) watchDirs[i], _ = helpers.GetRelativePath(dir, baseWatchDir)
} }
@ -190,19 +188,19 @@ func server(cmd *cobra.Command, args []string) error {
return nil return nil
} }
func (c commandeer) serve(port int) { func (c *commandeer) serve(port int) {
if renderToDisk { if renderToDisk {
jww.FEEDBACK.Println("Serving pages from " + helpers.AbsPathify(viper.GetString("publishDir"))) jww.FEEDBACK.Println("Serving pages from " + c.PathSpec().AbsPathify(c.Cfg.GetString("publishDir")))
} else { } else {
jww.FEEDBACK.Println("Serving pages from memory") jww.FEEDBACK.Println("Serving pages from memory")
} }
httpFs := afero.NewHttpFs(c.Fs.Destination) httpFs := afero.NewHttpFs(c.Fs.Destination)
fs := filesOnlyFs{httpFs.Dir(helpers.AbsPathify(viper.GetString("publishDir")))} fs := filesOnlyFs{httpFs.Dir(c.PathSpec().AbsPathify(c.Cfg.GetString("publishDir")))}
fileserver := http.FileServer(fs) fileserver := http.FileServer(fs)
// We're only interested in the path // We're only interested in the path
u, err := url.Parse(viper.GetString("baseURL")) u, err := url.Parse(c.Cfg.GetString("baseURL"))
if err != nil { if err != nil {
jww.ERROR.Fatalf("Invalid baseURL: %s", err) jww.ERROR.Fatalf("Invalid baseURL: %s", err)
} }
@ -225,10 +223,10 @@ func (c commandeer) serve(port int) {
// fixURL massages the baseURL into a form needed for serving // fixURL massages the baseURL into a form needed for serving
// all pages correctly. // all pages correctly.
func fixURL(s string) (string, error) { func fixURL(cfg config.Provider, s string) (string, error) {
useLocalhost := false useLocalhost := false
if s == "" { if s == "" {
s = viper.GetString("baseURL") s = cfg.GetString("baseURL")
useLocalhost = true useLocalhost = true
} }

View file

@ -20,8 +20,6 @@ import (
) )
func TestFixURL(t *testing.T) { func TestFixURL(t *testing.T) {
defer viper.Reset()
type data struct { type data struct {
TestName string TestName string
CLIBaseURL string CLIBaseURL string
@ -44,12 +42,12 @@ func TestFixURL(t *testing.T) {
} }
for i, test := range tests { for i, test := range tests {
viper.Reset() v := viper.New()
baseURL = test.CLIBaseURL baseURL = test.CLIBaseURL
viper.Set("baseURL", test.CfgBaseURL) v.Set("baseURL", test.CfgBaseURL)
serverAppend = test.AppendPort serverAppend = test.AppendPort
serverPort = test.Port serverPort = test.Port
result, err := fixURL(baseURL) result, err := fixURL(v, baseURL)
if err != nil { if err != nil {
t.Errorf("Test #%d %s: unexpected error %s", i, test.TestName, err) t.Errorf("Test #%d %s: unexpected error %s", i, test.TestName, err)
} }

View file

@ -1,4 +1,4 @@
// Copyright 2016-present The Hugo Authors. All rights reserved. // Copyright 2017-present The Hugo Authors. All rights reserved.
// //
// Licensed under the Apache License, Version 2.0 (the "License"); // Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. // you may not use this file except in compliance with the License.
@ -15,11 +15,7 @@
// and on content. The helper functions defined here lay down the // and on content. The helper functions defined here lay down the
// foundation of how Hugo works with files and filepaths, and perform // foundation of how Hugo works with files and filepaths, and perform
// string operations on content. // string operations on content.
package helpers package config
import (
"github.com/spf13/viper"
)
// A cached version of the current ConfigProvider (language) and relatives. These globals // A cached version of the current ConfigProvider (language) and relatives. These globals
// are unfortunate, but we still have some places that needs this that does // are unfortunate, but we still have some places that needs this that does
@ -28,36 +24,17 @@ import (
// //
// TODO(bep) Get rid of these. // TODO(bep) Get rid of these.
var ( var (
currentConfigProvider ConfigProvider currentConfigProvider Provider
) )
// ConfigProvider provides the configuration settings for Hugo. // Provider provides the configuration settings for Hugo.
type ConfigProvider interface { type Provider interface {
GetString(key string) string GetString(key string) string
GetInt(key string) int GetInt(key string) int
GetBool(key string) bool GetBool(key string) bool
GetStringMap(key string) map[string]interface{} GetStringMap(key string) map[string]interface{}
GetStringMapString(key string) map[string]string GetStringMapString(key string) map[string]string
Get(key string) interface{} Get(key string) interface{}
} Set(key string, value interface{})
IsSet(key string) bool
// Config returns the currently active Hugo config. This will be set
// per site (language) rendered.
func Config() ConfigProvider {
if currentConfigProvider != nil {
return currentConfigProvider
}
// Some tests rely on this. We will fix that, eventually.
return viper.Get("currentContentLanguage").(ConfigProvider)
}
// InitConfigProviderForCurrentContentLanguage does what it says.
func InitConfigProviderForCurrentContentLanguage() {
currentConfigProvider = viper.Get("CurrentContentLanguage").(ConfigProvider)
}
// ResetConfigProvider is used in tests.
func ResetConfigProvider() {
currentConfigProvider = nil
} }

View file

@ -29,7 +29,6 @@ import (
"github.com/spf13/hugo/hugolib" "github.com/spf13/hugo/hugolib"
"github.com/spf13/hugo/parser" "github.com/spf13/hugo/parser"
jww "github.com/spf13/jwalterweatherman" jww "github.com/spf13/jwalterweatherman"
"github.com/spf13/viper"
) )
// NewContent creates a new content file in the content directory based upon the // NewContent creates a new content file in the content directory based upon the
@ -37,7 +36,7 @@ import (
func NewContent(s *hugolib.Site, kind, name string) (err error) { func NewContent(s *hugolib.Site, kind, name string) (err error) {
jww.INFO.Println("attempting to create ", name, "of", kind) jww.INFO.Println("attempting to create ", name, "of", kind)
location := FindArchetype(s.Fs.Source, kind) location := FindArchetype(s, kind)
var by []byte var by []byte
@ -67,23 +66,23 @@ func NewContent(s *hugolib.Site, kind, name string) (err error) {
return err return err
} }
if err = page.SetSourceMetaData(metadata, parser.FormatToLeadRune(viper.GetString("metaDataFormat"))); err != nil { if err = page.SetSourceMetaData(metadata, parser.FormatToLeadRune(s.Cfg.GetString("metaDataFormat"))); err != nil {
return return
} }
page.SetSourceContent(psr.Content()) page.SetSourceContent(psr.Content())
if err = page.SafeSaveSourceAs(filepath.Join(viper.GetString("contentDir"), name)); err != nil { if err = page.SafeSaveSourceAs(filepath.Join(s.Cfg.GetString("contentDir"), name)); err != nil {
return return
} }
jww.FEEDBACK.Println(helpers.AbsPathify(filepath.Join(viper.GetString("contentDir"), name)), "created") jww.FEEDBACK.Println(s.PathSpec.AbsPathify(filepath.Join(s.Cfg.GetString("contentDir"), name)), "created")
editor := viper.GetString("newContentEditor") editor := s.Cfg.GetString("newContentEditor")
if editor != "" { if editor != "" {
jww.FEEDBACK.Printf("Editing %s with %q ...\n", name, editor) jww.FEEDBACK.Printf("Editing %s with %q ...\n", name, editor)
cmd := exec.Command(editor, helpers.AbsPathify(path.Join(viper.GetString("contentDir"), name))) cmd := exec.Command(editor, s.PathSpec.AbsPathify(path.Join(s.Cfg.GetString("contentDir"), name)))
cmd.Stdin = os.Stdin cmd.Stdin = os.Stdin
cmd.Stdout = os.Stdout cmd.Stdout = os.Stdout
cmd.Stderr = os.Stderr cmd.Stderr = os.Stderr
@ -138,12 +137,7 @@ func createMetadata(archetype parser.Page, name string) (map[string]interface{},
metadata["title"] = helpers.MakeTitle(helpers.Filename(name)) metadata["title"] = helpers.MakeTitle(helpers.Filename(name))
} }
// TOD(bep) what is this? metadata["date"] = date.Format(time.RFC3339)
if x := parser.FormatSanitize(viper.GetString("metaDataFormat")); x == "json" || x == "yaml" || x == "toml" {
metadata["date"] = date.Format(time.RFC3339)
} else {
metadata["date"] = date
}
return metadata, nil return metadata, nil
} }
@ -151,13 +145,13 @@ func createMetadata(archetype parser.Page, name string) (map[string]interface{},
// FindArchetype takes a given kind/archetype of content and returns an output // FindArchetype takes a given kind/archetype of content and returns an output
// path for that archetype. If no archetype is found, an empty string is // path for that archetype. If no archetype is found, an empty string is
// returned. // returned.
func FindArchetype(fs afero.Fs, kind string) (outpath string) { func FindArchetype(s *hugolib.Site, kind string) (outpath string) {
search := []string{helpers.AbsPathify(viper.GetString("archetypeDir"))} search := []string{s.PathSpec.AbsPathify(s.Cfg.GetString("archetypeDir"))}
if viper.GetString("theme") != "" { if s.Cfg.GetString("theme") != "" {
themeDir := filepath.Join(helpers.AbsPathify(viper.GetString("themesDir")+"/"+viper.GetString("theme")), "/archetypes/") themeDir := filepath.Join(s.PathSpec.AbsPathify(s.Cfg.GetString("themesDir")+"/"+s.Cfg.GetString("theme")), "/archetypes/")
if _, err := fs.Stat(themeDir); os.IsNotExist(err) { if _, err := s.Fs.Source.Stat(themeDir); os.IsNotExist(err) {
jww.ERROR.Printf("Unable to find archetypes directory for theme %q at %q", viper.GetString("theme"), themeDir) jww.ERROR.Printf("Unable to find archetypes directory for theme %q at %q", s.Cfg.GetString("theme"), themeDir)
} else { } else {
search = append(search, themeDir) search = append(search, themeDir)
} }
@ -177,7 +171,7 @@ func FindArchetype(fs afero.Fs, kind string) (outpath string) {
for _, p := range pathsToCheck { for _, p := range pathsToCheck {
curpath := filepath.Join(x, p) curpath := filepath.Join(x, p)
jww.DEBUG.Println("checking", curpath, "for archetypes") jww.DEBUG.Println("checking", curpath, "for archetypes")
if exists, _ := helpers.Exists(curpath, fs); exists { if exists, _ := helpers.Exists(curpath, s.Fs.Source); exists {
jww.INFO.Println("curpath: " + curpath) jww.INFO.Println("curpath: " + curpath)
return curpath return curpath
} }

View file

@ -19,6 +19,8 @@ import (
"strings" "strings"
"testing" "testing"
"github.com/spf13/hugo/deps"
"github.com/spf13/hugo/hugolib" "github.com/spf13/hugo/hugolib"
"fmt" "fmt"
@ -33,7 +35,8 @@ import (
) )
func TestNewContent(t *testing.T) { func TestNewContent(t *testing.T) {
initViper() v := viper.New()
initViper(v)
cases := []struct { cases := []struct {
kind string kind string
@ -48,14 +51,17 @@ func TestNewContent(t *testing.T) {
} }
for _, c := range cases { for _, c := range cases {
s, err := hugolib.NewEnglishSiteMem() cfg, fs := newTestCfg()
h, err := hugolib.NewHugoSites(deps.DepsCfg{Cfg: cfg, Fs: fs})
require.NoError(t, err) require.NoError(t, err)
require.NoError(t, initFs(s.Fs)) require.NoError(t, initFs(fs))
s := h.Sites[0]
require.NoError(t, create.NewContent(s, c.kind, c.path)) require.NoError(t, create.NewContent(s, c.kind, c.path))
fname := filepath.Join("content", filepath.FromSlash(c.path)) fname := filepath.Join("content", filepath.FromSlash(c.path))
content := readFileFromFs(t, s.Fs.Source, fname) content := readFileFromFs(t, fs.Source, fname)
for i, v := range c.expected { for i, v := range c.expected {
found := strings.Contains(content, v) found := strings.Contains(content, v)
if !found { if !found {
@ -65,14 +71,14 @@ func TestNewContent(t *testing.T) {
} }
} }
func initViper() { func initViper(v *viper.Viper) {
viper.Reset() v.Set("metaDataFormat", "toml")
viper.Set("metaDataFormat", "toml") v.Set("archetypeDir", "archetypes")
viper.Set("archetypeDir", "archetypes") v.Set("contentDir", "content")
viper.Set("contentDir", "content") v.Set("themesDir", "themes")
viper.Set("themesDir", "themes") v.Set("layoutDir", "layouts")
viper.Set("layoutDir", "layouts") v.Set("i18nDir", "i18n")
viper.Set("theme", "sample") v.Set("theme", "sample")
} }
func initFs(fs *hugofs.Fs) error { func initFs(fs *hugofs.Fs) error {
@ -143,3 +149,16 @@ func readFileFromFs(t *testing.T, fs afero.Fs, filename string) string {
} }
return string(b) return string(b)
} }
func newTestCfg() (*viper.Viper, *hugofs.Fs) {
v := viper.New()
fs := hugofs.NewMem(v)
v.SetFs(fs.Source)
initViper(v)
return v, fs
}

67
deps/deps.go vendored
View file

@ -5,6 +5,7 @@ import (
"log" "log"
"os" "os"
"github.com/spf13/hugo/config"
"github.com/spf13/hugo/helpers" "github.com/spf13/hugo/helpers"
"github.com/spf13/hugo/hugofs" "github.com/spf13/hugo/hugofs"
"github.com/spf13/hugo/tplapi" "github.com/spf13/hugo/tplapi"
@ -27,24 +28,40 @@ type Deps struct {
// The PathSpec to use // The PathSpec to use
*helpers.PathSpec `json:"-"` *helpers.PathSpec `json:"-"`
templateProvider TemplateProvider // The ContentSpec to use
WithTemplate func(templ tplapi.Template) error *helpers.ContentSpec `json:"-"`
// TODO(bep) globals next in line: Viper // The configuration to use
Cfg config.Provider `json:"-"`
// The translation func to use
Translate func(translationID string, args ...interface{}) string `json:"-"`
Language *helpers.Language
templateProvider ResourceProvider
WithTemplate func(templ tplapi.Template) error `json:"-"`
translationProvider ResourceProvider
} }
// Used to create and refresh, and clone the template. // Used to create and refresh, and clone resources needed.
type TemplateProvider interface { type ResourceProvider interface {
Update(deps *Deps) error Update(deps *Deps) error
Clone(deps *Deps) error Clone(deps *Deps) error
} }
func (d *Deps) LoadTemplates() error { func (d *Deps) LoadResources() error {
// Note that the translations need to be loaded before the templates.
if err := d.translationProvider.Update(d); err != nil {
return err
}
if err := d.templateProvider.Update(d); err != nil { if err := d.templateProvider.Update(d); err != nil {
return err return err
} }
d.Tmpl.PrintErrors() d.Tmpl.PrintErrors()
return nil return nil
} }
@ -58,6 +75,10 @@ func New(cfg DepsCfg) *Deps {
panic("Must have a TemplateProvider") panic("Must have a TemplateProvider")
} }
if cfg.TranslationProvider == nil {
panic("Must have a TranslationProvider")
}
if cfg.Language == nil { if cfg.Language == nil {
panic("Must have a Language") panic("Must have a Language")
} }
@ -67,16 +88,20 @@ func New(cfg DepsCfg) *Deps {
} }
if fs == nil { if fs == nil {
// Default to the production file systems. // Default to the production file system.
fs = hugofs.NewDefault() fs = hugofs.NewDefault(cfg.Language)
} }
d := &Deps{ d := &Deps{
Fs: fs, Fs: fs,
Log: logger, Log: logger,
templateProvider: cfg.TemplateProvider, templateProvider: cfg.TemplateProvider,
WithTemplate: cfg.WithTemplate, translationProvider: cfg.TranslationProvider,
PathSpec: helpers.NewPathSpec(fs, cfg.Language), WithTemplate: cfg.WithTemplate,
PathSpec: helpers.NewPathSpec(fs, cfg.Language),
ContentSpec: helpers.NewContentSpec(cfg.Language),
Cfg: cfg.Language,
Language: cfg.Language,
} }
return d return d
@ -87,6 +112,14 @@ func New(cfg DepsCfg) *Deps {
func (d Deps) ForLanguage(l *helpers.Language) (*Deps, error) { func (d Deps) ForLanguage(l *helpers.Language) (*Deps, error) {
d.PathSpec = helpers.NewPathSpec(d.Fs, l) d.PathSpec = helpers.NewPathSpec(d.Fs, l)
d.ContentSpec = helpers.NewContentSpec(l)
d.Cfg = l
d.Language = l
if err := d.translationProvider.Clone(&d); err != nil {
return nil, err
}
if err := d.templateProvider.Clone(&d); err != nil { if err := d.templateProvider.Clone(&d); err != nil {
return nil, err return nil, err
} }
@ -109,7 +142,13 @@ type DepsCfg struct {
// The language to use. // The language to use.
Language *helpers.Language Language *helpers.Language
// The configuration to use.
Cfg config.Provider
// Template handling. // Template handling.
TemplateProvider TemplateProvider TemplateProvider ResourceProvider
WithTemplate func(templ tplapi.Template) error WithTemplate func(templ tplapi.Template) error
// i18n handling.
TranslationProvider ResourceProvider
} }

View file

@ -24,12 +24,13 @@ import (
"unicode" "unicode"
"unicode/utf8" "unicode/utf8"
"github.com/spf13/hugo/config"
"github.com/miekg/mmark" "github.com/miekg/mmark"
"github.com/mitchellh/mapstructure" "github.com/mitchellh/mapstructure"
"github.com/russross/blackfriday" "github.com/russross/blackfriday"
bp "github.com/spf13/hugo/bufferpool" bp "github.com/spf13/hugo/bufferpool"
jww "github.com/spf13/jwalterweatherman" jww "github.com/spf13/jwalterweatherman"
"github.com/spf13/viper"
"strings" "strings"
"sync" "sync"
@ -41,6 +42,14 @@ var SummaryLength = 70
// SummaryDivider denotes where content summarization should end. The default is "<!--more-->". // SummaryDivider denotes where content summarization should end. The default is "<!--more-->".
var SummaryDivider = []byte("<!--more-->") var SummaryDivider = []byte("<!--more-->")
type ContentSpec struct {
cfg config.Provider
}
func NewContentSpec(cfg config.Provider) *ContentSpec {
return &ContentSpec{cfg}
}
// Blackfriday holds configuration values for Blackfriday rendering. // Blackfriday holds configuration values for Blackfriday rendering.
type Blackfriday struct { type Blackfriday struct {
Smartypants bool Smartypants bool
@ -58,7 +67,7 @@ type Blackfriday struct {
} }
// NewBlackfriday creates a new Blackfriday filled with site config or some sane defaults. // NewBlackfriday creates a new Blackfriday filled with site config or some sane defaults.
func NewBlackfriday(c ConfigProvider) *Blackfriday { func (c ContentSpec) NewBlackfriday() *Blackfriday {
defaultParam := map[string]interface{}{ defaultParam := map[string]interface{}{
"smartypants": true, "smartypants": true,
@ -75,7 +84,7 @@ func NewBlackfriday(c ConfigProvider) *Blackfriday {
ToLowerMap(defaultParam) ToLowerMap(defaultParam)
siteParam := c.GetStringMap("blackfriday") siteParam := c.cfg.GetStringMap("blackfriday")
siteConfig := make(map[string]interface{}) siteConfig := make(map[string]interface{})
@ -187,10 +196,10 @@ func BytesToHTML(b []byte) template.HTML {
} }
// getHTMLRenderer creates a new Blackfriday HTML Renderer with the given configuration. // getHTMLRenderer creates a new Blackfriday HTML Renderer with the given configuration.
func getHTMLRenderer(defaultFlags int, ctx *RenderingContext) blackfriday.Renderer { func (c ContentSpec) getHTMLRenderer(defaultFlags int, ctx *RenderingContext) blackfriday.Renderer {
renderParameters := blackfriday.HtmlRendererParameters{ renderParameters := blackfriday.HtmlRendererParameters{
FootnoteAnchorPrefix: viper.GetString("footnoteAnchorPrefix"), FootnoteAnchorPrefix: c.cfg.GetString("footnoteAnchorPrefix"),
FootnoteReturnLinkContents: viper.GetString("footnoteReturnLinkContents"), FootnoteReturnLinkContents: c.cfg.GetString("footnoteReturnLinkContents"),
} }
b := len(ctx.DocumentID) != 0 b := len(ctx.DocumentID) != 0
@ -265,21 +274,21 @@ func getMarkdownExtensions(ctx *RenderingContext) int {
return flags return flags
} }
func markdownRender(ctx *RenderingContext) []byte { func (c ContentSpec) markdownRender(ctx *RenderingContext) []byte {
if ctx.RenderTOC { if ctx.RenderTOC {
return blackfriday.Markdown(ctx.Content, return blackfriday.Markdown(ctx.Content,
getHTMLRenderer(blackfriday.HTML_TOC, ctx), c.getHTMLRenderer(blackfriday.HTML_TOC, ctx),
getMarkdownExtensions(ctx)) getMarkdownExtensions(ctx))
} }
return blackfriday.Markdown(ctx.Content, getHTMLRenderer(0, ctx), return blackfriday.Markdown(ctx.Content, c.getHTMLRenderer(0, ctx),
getMarkdownExtensions(ctx)) getMarkdownExtensions(ctx))
} }
// getMmarkHTMLRenderer creates a new mmark HTML Renderer with the given configuration. // getMmarkHTMLRenderer creates a new mmark HTML Renderer with the given configuration.
func getMmarkHTMLRenderer(defaultFlags int, ctx *RenderingContext) mmark.Renderer { func (c ContentSpec) getMmarkHTMLRenderer(defaultFlags int, ctx *RenderingContext) mmark.Renderer {
renderParameters := mmark.HtmlRendererParameters{ renderParameters := mmark.HtmlRendererParameters{
FootnoteAnchorPrefix: viper.GetString("footnoteAnchorPrefix"), FootnoteAnchorPrefix: c.cfg.GetString("footnoteAnchorPrefix"),
FootnoteReturnLinkContents: viper.GetString("footnoteReturnLinkContents"), FootnoteReturnLinkContents: c.cfg.GetString("footnoteReturnLinkContents"),
} }
b := len(ctx.DocumentID) != 0 b := len(ctx.DocumentID) != 0
@ -294,6 +303,7 @@ func getMmarkHTMLRenderer(defaultFlags int, ctx *RenderingContext) mmark.Rendere
return &HugoMmarkHTMLRenderer{ return &HugoMmarkHTMLRenderer{
mmark.HtmlRendererWithParameters(htmlFlags, "", "", renderParameters), mmark.HtmlRendererWithParameters(htmlFlags, "", "", renderParameters),
c.cfg,
} }
} }
@ -321,8 +331,8 @@ func getMmarkExtensions(ctx *RenderingContext) int {
return flags return flags
} }
func mmarkRender(ctx *RenderingContext) []byte { func (c ContentSpec) mmarkRender(ctx *RenderingContext) []byte {
return mmark.Parse(ctx.Content, getMmarkHTMLRenderer(0, ctx), return mmark.Parse(ctx.Content, c.getMmarkHTMLRenderer(0, ctx),
getMmarkExtensions(ctx)).Bytes() getMmarkExtensions(ctx)).Bytes()
} }
@ -365,42 +375,44 @@ func ExtractTOC(content []byte) (newcontent []byte, toc []byte) {
// RenderingContext holds contextual information, like content and configuration, // RenderingContext holds contextual information, like content and configuration,
// for a given content rendering. // for a given content rendering.
type RenderingContext struct { type RenderingContext struct {
Content []byte Content []byte
PageFmt string PageFmt string
DocumentID string DocumentID string
DocumentName string DocumentName string
Config *Blackfriday Config *Blackfriday
RenderTOC bool RenderTOC bool
FileResolver FileResolverFunc FileResolver FileResolverFunc
LinkResolver LinkResolverFunc LinkResolver LinkResolverFunc
ConfigProvider ConfigProvider Cfg config.Provider
configInit sync.Once configInit sync.Once
} }
func newViperProvidedRenderingContext() *RenderingContext { func newRenderingContext(cfg config.Provider) *RenderingContext {
return &RenderingContext{ConfigProvider: viper.GetViper()} return &RenderingContext{Cfg: cfg}
} }
func (c *RenderingContext) getConfig() *Blackfriday { func (c *RenderingContext) getConfig() *Blackfriday {
// TODO(bep) get rid of this
c.configInit.Do(func() { c.configInit.Do(func() {
if c.Config == nil { if c.Config == nil {
c.Config = NewBlackfriday(c.ConfigProvider) cs := NewContentSpec(c.Cfg)
c.Config = cs.NewBlackfriday()
} }
}) })
return c.Config return c.Config
} }
// RenderBytes renders a []byte. // RenderBytes renders a []byte.
func RenderBytes(ctx *RenderingContext) []byte { func (c ContentSpec) RenderBytes(ctx *RenderingContext) []byte {
switch ctx.PageFmt { switch ctx.PageFmt {
default: default:
return markdownRender(ctx) return c.markdownRender(ctx)
case "markdown": case "markdown":
return markdownRender(ctx) return c.markdownRender(ctx)
case "asciidoc": case "asciidoc":
return getAsciidocContent(ctx) return getAsciidocContent(ctx)
case "mmark": case "mmark":
return mmarkRender(ctx) return c.mmarkRender(ctx)
case "rst": case "rst":
return getRstContent(ctx) return getRstContent(ctx)
} }

View file

@ -19,8 +19,8 @@ import (
"github.com/miekg/mmark" "github.com/miekg/mmark"
"github.com/russross/blackfriday" "github.com/russross/blackfriday"
"github.com/spf13/hugo/config"
jww "github.com/spf13/jwalterweatherman" jww "github.com/spf13/jwalterweatherman"
"github.com/spf13/viper"
) )
type LinkResolverFunc func(ref string) (string, error) type LinkResolverFunc func(ref string) (string, error)
@ -33,49 +33,49 @@ type HugoHTMLRenderer struct {
blackfriday.Renderer blackfriday.Renderer
} }
func (renderer *HugoHTMLRenderer) BlockCode(out *bytes.Buffer, text []byte, lang string) { func (r *HugoHTMLRenderer) BlockCode(out *bytes.Buffer, text []byte, lang string) {
if viper.GetBool("pygmentsCodeFences") && (lang != "" || viper.GetBool("pygmentsCodeFencesGuessSyntax")) { if r.Cfg.GetBool("pygmentsCodeFences") && (lang != "" || r.Cfg.GetBool("pygmentsCodeFencesGuessSyntax")) {
opts := viper.GetString("pygmentsOptions") opts := r.Cfg.GetString("pygmentsOptions")
str := html.UnescapeString(string(text)) str := html.UnescapeString(string(text))
out.WriteString(Highlight(str, lang, opts)) out.WriteString(Highlight(r.RenderingContext.Cfg, str, lang, opts))
} else { } else {
renderer.Renderer.BlockCode(out, text, lang) r.Renderer.BlockCode(out, text, lang)
} }
} }
func (renderer *HugoHTMLRenderer) Link(out *bytes.Buffer, link []byte, title []byte, content []byte) { func (r *HugoHTMLRenderer) Link(out *bytes.Buffer, link []byte, title []byte, content []byte) {
if renderer.LinkResolver == nil || bytes.HasPrefix(link, []byte("HAHAHUGOSHORTCODE")) { if r.LinkResolver == nil || bytes.HasPrefix(link, []byte("HAHAHUGOSHORTCODE")) {
// Use the blackfriday built in Link handler // Use the blackfriday built in Link handler
renderer.Renderer.Link(out, link, title, content) r.Renderer.Link(out, link, title, content)
} else { } else {
// set by SourceRelativeLinksEval // set by SourceRelativeLinksEval
newLink, err := renderer.LinkResolver(string(link)) newLink, err := r.LinkResolver(string(link))
if err != nil { if err != nil {
newLink = string(link) newLink = string(link)
jww.ERROR.Printf("LinkResolver: %s", err) jww.ERROR.Printf("LinkResolver: %s", err)
} }
renderer.Renderer.Link(out, []byte(newLink), title, content) r.Renderer.Link(out, []byte(newLink), title, content)
} }
} }
func (renderer *HugoHTMLRenderer) Image(out *bytes.Buffer, link []byte, title []byte, alt []byte) { func (r *HugoHTMLRenderer) Image(out *bytes.Buffer, link []byte, title []byte, alt []byte) {
if renderer.FileResolver == nil || bytes.HasPrefix(link, []byte("HAHAHUGOSHORTCODE")) { if r.FileResolver == nil || bytes.HasPrefix(link, []byte("HAHAHUGOSHORTCODE")) {
// Use the blackfriday built in Image handler // Use the blackfriday built in Image handler
renderer.Renderer.Image(out, link, title, alt) r.Renderer.Image(out, link, title, alt)
} else { } else {
// set by SourceRelativeLinksEval // set by SourceRelativeLinksEval
newLink, err := renderer.FileResolver(string(link)) newLink, err := r.FileResolver(string(link))
if err != nil { if err != nil {
newLink = string(link) newLink = string(link)
jww.ERROR.Printf("FileResolver: %s", err) jww.ERROR.Printf("FileResolver: %s", err)
} }
renderer.Renderer.Image(out, []byte(newLink), title, alt) r.Renderer.Image(out, []byte(newLink), title, alt)
} }
} }
// ListItem adds task list support to the Blackfriday renderer. // ListItem adds task list support to the Blackfriday renderer.
func (renderer *HugoHTMLRenderer) ListItem(out *bytes.Buffer, text []byte, flags int) { func (r *HugoHTMLRenderer) ListItem(out *bytes.Buffer, text []byte, flags int) {
if !renderer.Config.TaskLists { if !r.Config.TaskLists {
renderer.Renderer.ListItem(out, text, flags) r.Renderer.ListItem(out, text, flags)
return return
} }
@ -87,17 +87,17 @@ func (renderer *HugoHTMLRenderer) ListItem(out *bytes.Buffer, text []byte, flags
text = append([]byte(`<input type="checkbox" checked disabled class="task-list-item">`), text[3:]...) text = append([]byte(`<input type="checkbox" checked disabled class="task-list-item">`), text[3:]...)
} }
renderer.Renderer.ListItem(out, text, flags) r.Renderer.ListItem(out, text, flags)
} }
// List adds task list support to the Blackfriday renderer. // List adds task list support to the Blackfriday renderer.
func (renderer *HugoHTMLRenderer) List(out *bytes.Buffer, text func() bool, flags int) { func (r *HugoHTMLRenderer) List(out *bytes.Buffer, text func() bool, flags int) {
if !renderer.Config.TaskLists { if !r.Config.TaskLists {
renderer.Renderer.List(out, text, flags) r.Renderer.List(out, text, flags)
return return
} }
marker := out.Len() marker := out.Len()
renderer.Renderer.List(out, text, flags) r.Renderer.List(out, text, flags)
if out.Len() > marker { if out.Len() > marker {
list := out.Bytes()[marker:] list := out.Bytes()[marker:]
if bytes.Contains(list, []byte("task-list-item")) { if bytes.Contains(list, []byte("task-list-item")) {
@ -114,13 +114,14 @@ func (renderer *HugoHTMLRenderer) List(out *bytes.Buffer, text func() bool, flag
// Enabling Hugo to customise the rendering experience // Enabling Hugo to customise the rendering experience
type HugoMmarkHTMLRenderer struct { type HugoMmarkHTMLRenderer struct {
mmark.Renderer mmark.Renderer
Cfg config.Provider
} }
func (renderer *HugoMmarkHTMLRenderer) BlockCode(out *bytes.Buffer, text []byte, lang string, caption []byte, subfigure bool, callouts bool) { func (r *HugoMmarkHTMLRenderer) BlockCode(out *bytes.Buffer, text []byte, lang string, caption []byte, subfigure bool, callouts bool) {
if viper.GetBool("pygmentsCodeFences") && (lang != "" || viper.GetBool("pygmentsCodeFencesGuessSyntax")) { if r.Cfg.GetBool("pygmentsCodeFences") && (lang != "" || r.Cfg.GetBool("pygmentsCodeFencesGuessSyntax")) {
str := html.UnescapeString(string(text)) str := html.UnescapeString(string(text))
out.WriteString(Highlight(str, lang, "")) out.WriteString(Highlight(r.Cfg, str, lang, ""))
} else { } else {
renderer.Renderer.BlockCode(out, text, lang, caption, subfigure, callouts) r.Renderer.BlockCode(out, text, lang, caption, subfigure, callouts)
} }
} }

View file

@ -22,9 +22,9 @@ import (
) )
// Renders a codeblock using Blackfriday // Renders a codeblock using Blackfriday
func render(input string) string { func (c ContentSpec) render(input string) string {
ctx := newViperProvidedRenderingContext() ctx := newRenderingContext(c.cfg)
render := getHTMLRenderer(0, ctx) render := c.getHTMLRenderer(0, ctx)
buf := &bytes.Buffer{} buf := &bytes.Buffer{}
render.BlockCode(buf, []byte(input), "html") render.BlockCode(buf, []byte(input), "html")
@ -32,9 +32,9 @@ func render(input string) string {
} }
// Renders a codeblock using Mmark // Renders a codeblock using Mmark
func renderWithMmark(input string) string { func (c ContentSpec) renderWithMmark(input string) string {
ctx := newViperProvidedRenderingContext() ctx := newRenderingContext(c.cfg)
render := getMmarkHTMLRenderer(0, ctx) render := c.getMmarkHTMLRenderer(0, ctx)
buf := &bytes.Buffer{} buf := &bytes.Buffer{}
render.BlockCode(buf, []byte(input), "html", []byte(""), false, false) render.BlockCode(buf, []byte(input), "html", []byte(""), false, false)
@ -59,16 +59,16 @@ func TestCodeFence(t *testing.T) {
{false, "<html></html>", `(?s)^<pre><code class="language-html">.*?</code></pre>\n$`}, {false, "<html></html>", `(?s)^<pre><code class="language-html">.*?</code></pre>\n$`},
} }
viper.Reset()
defer viper.Reset()
viper.Set("pygmentsStyle", "monokai")
viper.Set("pygmentsUseClasses", true)
for i, d := range data { for i, d := range data {
viper.Set("pygmentsCodeFences", d.enabled) v := viper.New()
result := render(d.input) v.Set("pygmentsStyle", "monokai")
v.Set("pygmentsUseClasses", true)
v.Set("pygmentsCodeFences", d.enabled)
c := NewContentSpec(v)
result := c.render(d.input)
expectedRe, err := regexp.Compile(d.expected) expectedRe, err := regexp.Compile(d.expected)
@ -81,7 +81,7 @@ func TestCodeFence(t *testing.T) {
t.Errorf("Test %d failed. BlackFriday enabled:%t, Expected:\n%q got:\n%q", i, d.enabled, d.expected, result) t.Errorf("Test %d failed. BlackFriday enabled:%t, Expected:\n%q got:\n%q", i, d.enabled, d.expected, result)
} }
result = renderWithMmark(d.input) result = c.renderWithMmark(d.input)
matched = expectedRe.MatchString(result) matched = expectedRe.MatchString(result)
if !matched { if !matched {
t.Errorf("Test %d failed. Mmark enabled:%t, Expected:\n%q got:\n%q", i, d.enabled, d.expected, result) t.Errorf("Test %d failed. Mmark enabled:%t, Expected:\n%q got:\n%q", i, d.enabled, d.expected, result)
@ -90,6 +90,8 @@ func TestCodeFence(t *testing.T) {
} }
func TestBlackfridayTaskList(t *testing.T) { func TestBlackfridayTaskList(t *testing.T) {
c := newTestContentSpec()
for i, this := range []struct { for i, this := range []struct {
markdown string markdown string
taskListEnabled bool taskListEnabled bool
@ -118,11 +120,11 @@ END
</ul> </ul>
`}, `},
} { } {
blackFridayConfig := NewBlackfriday(viper.GetViper()) blackFridayConfig := c.NewBlackfriday()
blackFridayConfig.TaskLists = this.taskListEnabled blackFridayConfig.TaskLists = this.taskListEnabled
ctx := &RenderingContext{Content: []byte(this.markdown), PageFmt: "markdown", Config: blackFridayConfig} ctx := &RenderingContext{Content: []byte(this.markdown), PageFmt: "markdown", Config: blackFridayConfig}
result := string(RenderBytes(ctx)) result := string(c.RenderBytes(ctx))
if result != this.expect { if result != this.expect {
t.Errorf("[%d] got \n%v but expected \n%v", i, result, this.expect) t.Errorf("[%d] got \n%v but expected \n%v", i, result, this.expect)

View file

@ -21,7 +21,6 @@ import (
"github.com/miekg/mmark" "github.com/miekg/mmark"
"github.com/russross/blackfriday" "github.com/russross/blackfriday"
"github.com/spf13/viper"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
) )
@ -152,8 +151,9 @@ func TestTruncateWordsByRune(t *testing.T) {
} }
func TestGetHTMLRendererFlags(t *testing.T) { func TestGetHTMLRendererFlags(t *testing.T) {
ctx := newViperProvidedRenderingContext() c := newTestContentSpec()
renderer := getHTMLRenderer(blackfriday.HTML_USE_XHTML, ctx) ctx := newRenderingContext(c.cfg)
renderer := c.getHTMLRenderer(blackfriday.HTML_USE_XHTML, ctx)
flags := renderer.GetFlags() flags := renderer.GetFlags()
if flags&blackfriday.HTML_USE_XHTML != blackfriday.HTML_USE_XHTML { if flags&blackfriday.HTML_USE_XHTML != blackfriday.HTML_USE_XHTML {
t.Errorf("Test flag: %d was not found amongs set flags:%d; Result: %d", blackfriday.HTML_USE_XHTML, flags, flags&blackfriday.HTML_USE_XHTML) t.Errorf("Test flag: %d was not found amongs set flags:%d; Result: %d", blackfriday.HTML_USE_XHTML, flags, flags&blackfriday.HTML_USE_XHTML)
@ -161,6 +161,8 @@ func TestGetHTMLRendererFlags(t *testing.T) {
} }
func TestGetHTMLRendererAllFlags(t *testing.T) { func TestGetHTMLRendererAllFlags(t *testing.T) {
c := newTestContentSpec()
type data struct { type data struct {
testFlag int testFlag int
} }
@ -176,7 +178,7 @@ func TestGetHTMLRendererAllFlags(t *testing.T) {
{blackfriday.HTML_SMARTYPANTS_LATEX_DASHES}, {blackfriday.HTML_SMARTYPANTS_LATEX_DASHES},
} }
defaultFlags := blackfriday.HTML_USE_XHTML defaultFlags := blackfriday.HTML_USE_XHTML
ctx := newViperProvidedRenderingContext() ctx := newRenderingContext(c.cfg)
ctx.Config = ctx.getConfig() ctx.Config = ctx.getConfig()
ctx.Config.AngledQuotes = true ctx.Config.AngledQuotes = true
ctx.Config.Fractions = true ctx.Config.Fractions = true
@ -186,7 +188,7 @@ func TestGetHTMLRendererAllFlags(t *testing.T) {
ctx.Config.SmartDashes = true ctx.Config.SmartDashes = true
ctx.Config.Smartypants = true ctx.Config.Smartypants = true
ctx.Config.SourceRelativeLinksEval = true ctx.Config.SourceRelativeLinksEval = true
renderer := getHTMLRenderer(defaultFlags, ctx) renderer := c.getHTMLRenderer(defaultFlags, ctx)
actualFlags := renderer.GetFlags() actualFlags := renderer.GetFlags()
var expectedFlags int var expectedFlags int
//OR-ing flags together... //OR-ing flags together...
@ -199,12 +201,13 @@ func TestGetHTMLRendererAllFlags(t *testing.T) {
} }
func TestGetHTMLRendererAnchors(t *testing.T) { func TestGetHTMLRendererAnchors(t *testing.T) {
ctx := newViperProvidedRenderingContext() c := newTestContentSpec()
ctx := newRenderingContext(c.cfg)
ctx.DocumentID = "testid" ctx.DocumentID = "testid"
ctx.Config = ctx.getConfig() ctx.Config = ctx.getConfig()
ctx.Config.PlainIDAnchors = false ctx.Config.PlainIDAnchors = false
actualRenderer := getHTMLRenderer(0, ctx) actualRenderer := c.getHTMLRenderer(0, ctx)
headerBuffer := &bytes.Buffer{} headerBuffer := &bytes.Buffer{}
footnoteBuffer := &bytes.Buffer{} footnoteBuffer := &bytes.Buffer{}
expectedFootnoteHref := []byte("href=\"#fn:testid:href\"") expectedFootnoteHref := []byte("href=\"#fn:testid:href\"")
@ -223,11 +226,12 @@ func TestGetHTMLRendererAnchors(t *testing.T) {
} }
func TestGetMmarkHTMLRenderer(t *testing.T) { func TestGetMmarkHTMLRenderer(t *testing.T) {
ctx := newViperProvidedRenderingContext() c := newTestContentSpec()
ctx := newRenderingContext(c.cfg)
ctx.DocumentID = "testid" ctx.DocumentID = "testid"
ctx.Config = ctx.getConfig() ctx.Config = ctx.getConfig()
ctx.Config.PlainIDAnchors = false ctx.Config.PlainIDAnchors = false
actualRenderer := getMmarkHTMLRenderer(0, ctx) actualRenderer := c.getMmarkHTMLRenderer(0, ctx)
headerBuffer := &bytes.Buffer{} headerBuffer := &bytes.Buffer{}
footnoteBuffer := &bytes.Buffer{} footnoteBuffer := &bytes.Buffer{}
@ -247,7 +251,8 @@ func TestGetMmarkHTMLRenderer(t *testing.T) {
} }
func TestGetMarkdownExtensionsMasksAreRemovedFromExtensions(t *testing.T) { func TestGetMarkdownExtensionsMasksAreRemovedFromExtensions(t *testing.T) {
ctx := newViperProvidedRenderingContext() c := newTestContentSpec()
ctx := newRenderingContext(c.cfg)
ctx.Config = ctx.getConfig() ctx.Config = ctx.getConfig()
ctx.Config.Extensions = []string{"headerId"} ctx.Config.Extensions = []string{"headerId"}
ctx.Config.ExtensionsMask = []string{"noIntraEmphasis"} ctx.Config.ExtensionsMask = []string{"noIntraEmphasis"}
@ -262,7 +267,8 @@ func TestGetMarkdownExtensionsByDefaultAllExtensionsAreEnabled(t *testing.T) {
type data struct { type data struct {
testFlag int testFlag int
} }
ctx := newViperProvidedRenderingContext() c := newTestContentSpec()
ctx := newRenderingContext(c.cfg)
ctx.Config = ctx.getConfig() ctx.Config = ctx.getConfig()
ctx.Config.Extensions = []string{""} ctx.Config.Extensions = []string{""}
ctx.Config.ExtensionsMask = []string{""} ctx.Config.ExtensionsMask = []string{""}
@ -294,7 +300,8 @@ func TestGetMarkdownExtensionsByDefaultAllExtensionsAreEnabled(t *testing.T) {
} }
func TestGetMarkdownExtensionsAddingFlagsThroughRenderingContext(t *testing.T) { func TestGetMarkdownExtensionsAddingFlagsThroughRenderingContext(t *testing.T) {
ctx := newViperProvidedRenderingContext() c := newTestContentSpec()
ctx := newRenderingContext(c.cfg)
ctx.Config = ctx.getConfig() ctx.Config = ctx.getConfig()
ctx.Config.Extensions = []string{"definitionLists"} ctx.Config.Extensions = []string{"definitionLists"}
ctx.Config.ExtensionsMask = []string{""} ctx.Config.ExtensionsMask = []string{""}
@ -306,10 +313,11 @@ func TestGetMarkdownExtensionsAddingFlagsThroughRenderingContext(t *testing.T) {
} }
func TestGetMarkdownRenderer(t *testing.T) { func TestGetMarkdownRenderer(t *testing.T) {
ctx := newViperProvidedRenderingContext() c := newTestContentSpec()
ctx := newRenderingContext(c.cfg)
ctx.Content = []byte("testContent") ctx.Content = []byte("testContent")
ctx.Config = ctx.getConfig() ctx.Config = ctx.getConfig()
actualRenderedMarkdown := markdownRender(ctx) actualRenderedMarkdown := c.markdownRender(ctx)
expectedRenderedMarkdown := []byte("<p>testContent</p>\n") expectedRenderedMarkdown := []byte("<p>testContent</p>\n")
if !bytes.Equal(actualRenderedMarkdown, expectedRenderedMarkdown) { if !bytes.Equal(actualRenderedMarkdown, expectedRenderedMarkdown) {
t.Errorf("Actual rendered Markdown (%s) did not match expected markdown (%s)", actualRenderedMarkdown, expectedRenderedMarkdown) t.Errorf("Actual rendered Markdown (%s) did not match expected markdown (%s)", actualRenderedMarkdown, expectedRenderedMarkdown)
@ -317,10 +325,11 @@ func TestGetMarkdownRenderer(t *testing.T) {
} }
func TestGetMarkdownRendererWithTOC(t *testing.T) { func TestGetMarkdownRendererWithTOC(t *testing.T) {
ctx := &RenderingContext{RenderTOC: true, ConfigProvider: viper.GetViper()} c := newTestContentSpec()
ctx := &RenderingContext{RenderTOC: true, Cfg: c.cfg}
ctx.Content = []byte("testContent") ctx.Content = []byte("testContent")
ctx.Config = ctx.getConfig() ctx.Config = ctx.getConfig()
actualRenderedMarkdown := markdownRender(ctx) actualRenderedMarkdown := c.markdownRender(ctx)
expectedRenderedMarkdown := []byte("<nav>\n</nav>\n\n<p>testContent</p>\n") expectedRenderedMarkdown := []byte("<nav>\n</nav>\n\n<p>testContent</p>\n")
if !bytes.Equal(actualRenderedMarkdown, expectedRenderedMarkdown) { if !bytes.Equal(actualRenderedMarkdown, expectedRenderedMarkdown) {
t.Errorf("Actual rendered Markdown (%s) did not match expected markdown (%s)", actualRenderedMarkdown, expectedRenderedMarkdown) t.Errorf("Actual rendered Markdown (%s) did not match expected markdown (%s)", actualRenderedMarkdown, expectedRenderedMarkdown)
@ -332,7 +341,8 @@ func TestGetMmarkExtensions(t *testing.T) {
type data struct { type data struct {
testFlag int testFlag int
} }
ctx := newViperProvidedRenderingContext() c := newTestContentSpec()
ctx := newRenderingContext(c.cfg)
ctx.Config = ctx.getConfig() ctx.Config = ctx.getConfig()
ctx.Config.Extensions = []string{"tables"} ctx.Config.Extensions = []string{"tables"}
ctx.Config.ExtensionsMask = []string{""} ctx.Config.ExtensionsMask = []string{""}
@ -361,10 +371,11 @@ func TestGetMmarkExtensions(t *testing.T) {
} }
func TestMmarkRender(t *testing.T) { func TestMmarkRender(t *testing.T) {
ctx := newViperProvidedRenderingContext() c := newTestContentSpec()
ctx := newRenderingContext(c.cfg)
ctx.Content = []byte("testContent") ctx.Content = []byte("testContent")
ctx.Config = ctx.getConfig() ctx.Config = ctx.getConfig()
actualRenderedMarkdown := mmarkRender(ctx) actualRenderedMarkdown := c.mmarkRender(ctx)
expectedRenderedMarkdown := []byte("<p>testContent</p>\n") expectedRenderedMarkdown := []byte("<p>testContent</p>\n")
if !bytes.Equal(actualRenderedMarkdown, expectedRenderedMarkdown) { if !bytes.Equal(actualRenderedMarkdown, expectedRenderedMarkdown) {
t.Errorf("Actual rendered Markdown (%s) did not match expected markdown (%s)", actualRenderedMarkdown, expectedRenderedMarkdown) t.Errorf("Actual rendered Markdown (%s) did not match expected markdown (%s)", actualRenderedMarkdown, expectedRenderedMarkdown)

View file

@ -32,7 +32,6 @@ import (
bp "github.com/spf13/hugo/bufferpool" bp "github.com/spf13/hugo/bufferpool"
jww "github.com/spf13/jwalterweatherman" jww "github.com/spf13/jwalterweatherman"
"github.com/spf13/pflag" "github.com/spf13/pflag"
"github.com/spf13/viper"
) )
// FilePathSeparator as defined by os.Separator. // FilePathSeparator as defined by os.Separator.
@ -196,8 +195,8 @@ func ReaderContains(r io.Reader, subslice []byte) bool {
} }
// ThemeSet checks whether a theme is in use or not. // ThemeSet checks whether a theme is in use or not.
func ThemeSet() bool { func (p *PathSpec) ThemeSet() bool {
return viper.GetString("theme") != "" return p.theme != ""
} }
type logPrinter interface { type logPrinter interface {

View file

@ -19,8 +19,7 @@ import (
"sync" "sync"
"github.com/spf13/cast" "github.com/spf13/cast"
"github.com/spf13/hugo/config"
"github.com/spf13/viper"
) )
// These are the settings that should only be looked up in the global Viper // These are the settings that should only be looked up in the global Viper
@ -41,26 +40,28 @@ type Language struct {
LanguageName string LanguageName string
Title string Title string
Weight int Weight int
params map[string]interface{}
paramsInit sync.Once Cfg config.Provider
params map[string]interface{}
paramsInit sync.Once
} }
func (l *Language) String() string { func (l *Language) String() string {
return l.Lang return l.Lang
} }
func NewLanguage(lang string) *Language { func NewLanguage(lang string, cfg config.Provider) *Language {
return &Language{Lang: lang, params: make(map[string]interface{})} return &Language{Lang: lang, Cfg: cfg, params: make(map[string]interface{})}
} }
func NewDefaultLanguage() *Language { func NewDefaultLanguage(cfg config.Provider) *Language {
defaultLang := viper.GetString("defaultContentLanguage") defaultLang := cfg.GetString("defaultContentLanguage")
if defaultLang == "" { if defaultLang == "" {
defaultLang = "en" defaultLang = "en"
} }
return NewLanguage(defaultLang) return NewLanguage(defaultLang, cfg)
} }
type Languages []*Language type Languages []*Language
@ -83,7 +84,7 @@ func (l *Language) Params() map[string]interface{} {
// Merge with global config. // Merge with global config.
// TODO(bep) consider making this part of a constructor func. // TODO(bep) consider making this part of a constructor func.
globalParams := viper.GetStringMap("params") globalParams := l.Cfg.GetStringMap("params")
for k, v := range globalParams { for k, v := range globalParams {
if _, ok := l.params[k]; !ok { if _, ok := l.params[k]; !ok {
l.params[k] = v l.params[k] = v
@ -132,5 +133,28 @@ func (l *Language) Get(key string) interface{} {
return v return v
} }
} }
return viper.Get(key) return l.Cfg.Get(key)
}
// Set sets the value for the key in the language's params.
func (l *Language) Set(key string, value interface{}) {
if l == nil {
panic("language not set")
}
key = strings.ToLower(key)
l.params[key] = value
}
// IsSet checks whether the key is set in the language or the related config store.
func (l *Language) IsSet(key string) bool {
key = strings.ToLower(key)
key = strings.ToLower(key)
if !globalOnlySettings[key] {
if _, ok := l.params[key]; ok {
return true
}
}
return l.Cfg.IsSet(key)
} }

View file

@ -21,11 +21,12 @@ import (
) )
func TestGetGlobalOnlySetting(t *testing.T) { func TestGetGlobalOnlySetting(t *testing.T) {
lang := NewDefaultLanguage() v := viper.New()
lang := NewDefaultLanguage(v)
lang.SetParam("defaultContentLanguageInSubdir", false) lang.SetParam("defaultContentLanguageInSubdir", false)
lang.SetParam("paginatePath", "side") lang.SetParam("paginatePath", "side")
viper.Set("defaultContentLanguageInSubdir", true) v.Set("defaultContentLanguageInSubdir", true)
viper.Set("paginatePath", "page") v.Set("paginatePath", "page")
require.True(t, lang.GetBool("defaultContentLanguageInSubdir")) require.True(t, lang.GetBool("defaultContentLanguageInSubdir"))
require.Equal(t, "side", lang.GetString("paginatePath")) require.Equal(t, "side", lang.GetString("paginatePath"))

View file

@ -24,7 +24,6 @@ import (
"unicode" "unicode"
"github.com/spf13/afero" "github.com/spf13/afero"
"github.com/spf13/viper"
"golang.org/x/text/transform" "golang.org/x/text/transform"
"golang.org/x/text/unicode/norm" "golang.org/x/text/unicode/norm"
) )
@ -153,41 +152,41 @@ func ReplaceExtension(path string, newExt string) string {
// AbsPathify creates an absolute path if given a relative path. If already // AbsPathify creates an absolute path if given a relative path. If already
// absolute, the path is just cleaned. // absolute, the path is just cleaned.
func AbsPathify(inPath string) string { func (p *PathSpec) AbsPathify(inPath string) string {
if filepath.IsAbs(inPath) { if filepath.IsAbs(inPath) {
return filepath.Clean(inPath) return filepath.Clean(inPath)
} }
// TODO(bep): Consider moving workingDir to argument list // TODO(bep): Consider moving workingDir to argument list
return filepath.Clean(filepath.Join(viper.GetString("workingDir"), inPath)) return filepath.Clean(filepath.Join(p.workingDir, inPath))
} }
// GetLayoutDirPath returns the absolute path to the layout file dir // GetLayoutDirPath returns the absolute path to the layout file dir
// for the current Hugo project. // for the current Hugo project.
func GetLayoutDirPath() string { func (p *PathSpec) GetLayoutDirPath() string {
return AbsPathify(viper.GetString("layoutDir")) return p.AbsPathify(p.layoutDir)
} }
// GetStaticDirPath returns the absolute path to the static file dir // GetStaticDirPath returns the absolute path to the static file dir
// for the current Hugo project. // for the current Hugo project.
func GetStaticDirPath() string { func (p *PathSpec) GetStaticDirPath() string {
return AbsPathify(viper.GetString("staticDir")) return p.AbsPathify(p.staticDir)
} }
// GetThemeDir gets the root directory of the current theme, if there is one. // GetThemeDir gets the root directory of the current theme, if there is one.
// If there is no theme, returns the empty string. // If there is no theme, returns the empty string.
func GetThemeDir() string { func (p *PathSpec) GetThemeDir() string {
if ThemeSet() { if p.ThemeSet() {
return AbsPathify(filepath.Join(viper.GetString("themesDir"), viper.GetString("theme"))) return p.AbsPathify(filepath.Join(p.themesDir, p.theme))
} }
return "" return ""
} }
// GetRelativeThemeDir gets the relative root directory of the current theme, if there is one. // GetRelativeThemeDir gets the relative root directory of the current theme, if there is one.
// If there is no theme, returns the empty string. // If there is no theme, returns the empty string.
func GetRelativeThemeDir() string { func (p *PathSpec) GetRelativeThemeDir() string {
if ThemeSet() { if p.ThemeSet() {
return strings.TrimPrefix(filepath.Join(viper.GetString("themesDir"), viper.GetString("theme")), FilePathSeparator) return strings.TrimPrefix(filepath.Join(p.themesDir, p.theme), FilePathSeparator)
} }
return "" return ""
} }
@ -211,13 +210,13 @@ func (p *PathSpec) GetThemeI18nDirPath() (string, error) {
} }
func (p *PathSpec) getThemeDirPath(path string) (string, error) { func (p *PathSpec) getThemeDirPath(path string) (string, error) {
if !ThemeSet() { if !p.ThemeSet() {
return "", ErrThemeUndefined return "", ErrThemeUndefined
} }
themeDir := filepath.Join(GetThemeDir(), path) themeDir := filepath.Join(p.GetThemeDir(), path)
if _, err := p.fs.Source.Stat(themeDir); os.IsNotExist(err) { if _, err := p.fs.Source.Stat(themeDir); os.IsNotExist(err) {
return "", fmt.Errorf("Unable to find %s directory for theme %s in %s", path, viper.GetString("theme"), themeDir) return "", fmt.Errorf("Unable to find %s directory for theme %s in %s", path, p.theme, themeDir)
} }
return themeDir, nil return themeDir, nil
@ -235,7 +234,7 @@ func (p *PathSpec) GetThemesDirPath() string {
// It does so by taking either the project's static path or the theme's static // It does so by taking either the project's static path or the theme's static
// path into consideration. // path into consideration.
func (p *PathSpec) MakeStaticPathRelative(inPath string) (string, error) { func (p *PathSpec) MakeStaticPathRelative(inPath string) (string, error) {
staticDir := GetStaticDirPath() staticDir := p.GetStaticDirPath()
themeStaticDir := p.GetThemesDirPath() themeStaticDir := p.GetThemesDirPath()
return makePathRelative(inPath, staticDir, themeStaticDir) return makePathRelative(inPath, staticDir, themeStaticDir)
@ -360,20 +359,20 @@ func GetRelativePath(path, base string) (final string, err error) {
} }
// PaginateAliasPath creates a path used to access the aliases in the paginator. // PaginateAliasPath creates a path used to access the aliases in the paginator.
func PaginateAliasPath(base string, page int) string { func (p *PathSpec) PaginateAliasPath(base string, page int) string {
paginatePath := Config().GetString("paginatePath") paginatePath := p.paginatePath
uglify := viper.GetBool("uglyURLs") uglify := p.uglyURLs
var p string var pth string
if base != "" { if base != "" {
p = filepath.FromSlash(fmt.Sprintf("/%s/%s/%d", base, paginatePath, page)) pth = filepath.FromSlash(fmt.Sprintf("/%s/%s/%d", base, paginatePath, page))
} else { } else {
p = filepath.FromSlash(fmt.Sprintf("/%s/%d", paginatePath, page)) pth = filepath.FromSlash(fmt.Sprintf("/%s/%d", paginatePath, page))
} }
if uglify { if uglify {
p += ".html" pth += ".html"
} }
return p return pth
} }
// GuessSection returns the section given a source path. // GuessSection returns the section given a source path.

View file

@ -34,15 +34,7 @@ import (
"github.com/spf13/viper" "github.com/spf13/viper"
) )
func initCommonTestConfig() {
viper.Set("currentContentLanguage", NewLanguage("en"))
}
func TestMakePath(t *testing.T) { func TestMakePath(t *testing.T) {
viper.Reset()
defer viper.Reset()
initCommonTestConfig()
tests := []struct { tests := []struct {
input string input string
expected string expected string
@ -64,8 +56,10 @@ func TestMakePath(t *testing.T) {
} }
for _, test := range tests { for _, test := range tests {
viper.Set("removePathAccents", test.removeAccents) v := viper.New()
p := NewPathSpec(hugofs.NewMem(), viper.GetViper()) l := NewDefaultLanguage(v)
v.Set("removePathAccents", test.removeAccents)
p := NewPathSpec(hugofs.NewMem(v), l)
output := p.MakePath(test.input) output := p.MakePath(test.input)
if output != test.expected { if output != test.expected {
@ -75,11 +69,9 @@ func TestMakePath(t *testing.T) {
} }
func TestMakePathSanitized(t *testing.T) { func TestMakePathSanitized(t *testing.T) {
viper.Reset() v := viper.New()
defer viper.Reset() l := NewDefaultLanguage(v)
initCommonTestConfig() p := NewPathSpec(hugofs.NewMem(v), l)
p := NewPathSpec(hugofs.NewMem(), viper.GetViper())
tests := []struct { tests := []struct {
input string input string
@ -102,12 +94,12 @@ func TestMakePathSanitized(t *testing.T) {
} }
func TestMakePathSanitizedDisablePathToLower(t *testing.T) { func TestMakePathSanitizedDisablePathToLower(t *testing.T) {
viper.Reset() v := viper.New()
defer viper.Reset()
initCommonTestConfig() v.Set("disablePathToLower", true)
viper.Set("disablePathToLower", true)
p := NewPathSpec(hugofs.NewMem(), viper.GetViper()) l := NewDefaultLanguage(v)
p := NewPathSpec(hugofs.NewMem(v), l)
tests := []struct { tests := []struct {
input string input string
@ -553,9 +545,9 @@ func TestAbsPathify(t *testing.T) {
for i, d := range data { for i, d := range data {
viper.Reset() viper.Reset()
// todo see comment in AbsPathify // todo see comment in AbsPathify
viper.Set("workingDir", d.workingDir) ps := newTestDefaultPathSpec("workingDir", d.workingDir)
expected := AbsPathify(d.inPath) expected := ps.AbsPathify(d.inPath)
if d.expected != expected { if d.expected != expected {
t.Errorf("Test %d failed. Expected %q but got %q", i, d.expected, expected) t.Errorf("Test %d failed. Expected %q but got %q", i, d.expected, expected)
} }
@ -563,18 +555,18 @@ func TestAbsPathify(t *testing.T) {
t.Logf("Running platform specific path tests for %s", runtime.GOOS) t.Logf("Running platform specific path tests for %s", runtime.GOOS)
if runtime.GOOS == "windows" { if runtime.GOOS == "windows" {
for i, d := range windowsData { for i, d := range windowsData {
viper.Set("workingDir", d.workingDir) ps := newTestDefaultPathSpec("workingDir", d.workingDir)
expected := AbsPathify(d.inPath) expected := ps.AbsPathify(d.inPath)
if d.expected != expected { if d.expected != expected {
t.Errorf("Test %d failed. Expected %q but got %q", i, d.expected, expected) t.Errorf("Test %d failed. Expected %q but got %q", i, d.expected, expected)
} }
} }
} else { } else {
for i, d := range unixData { for i, d := range unixData {
viper.Set("workingDir", d.workingDir) ps := newTestDefaultPathSpec("workingDir", d.workingDir)
expected := AbsPathify(d.inPath) expected := ps.AbsPathify(d.inPath)
if d.expected != expected { if d.expected != expected {
t.Errorf("Test %d failed. Expected %q but got %q", i, d.expected, expected) t.Errorf("Test %d failed. Expected %q but got %q", i, d.expected, expected)
} }

View file

@ -16,6 +16,7 @@ package helpers
import ( import (
"fmt" "fmt"
"github.com/spf13/hugo/config"
"github.com/spf13/hugo/hugofs" "github.com/spf13/hugo/hugofs"
) )
@ -26,11 +27,20 @@ type PathSpec struct {
uglyURLs bool uglyURLs bool
canonifyURLs bool canonifyURLs bool
currentContentLanguage *Language language *Language
// pagination path handling // pagination path handling
paginatePath string paginatePath string
baseURL string
theme string
// Directories
themesDir string
layoutDir string
workingDir string
staticDir string
// The PathSpec looks up its config settings in both the current language // The PathSpec looks up its config settings in both the current language
// and then in the global Viper config. // and then in the global Viper config.
// Some settings, the settings listed below, does not make sense to be set // Some settings, the settings listed below, does not make sense to be set
@ -45,31 +55,35 @@ type PathSpec struct {
} }
func (p PathSpec) String() string { func (p PathSpec) String() string {
return fmt.Sprintf("PathSpec, language %q, prefix %q, multilingual: %T", p.currentContentLanguage.Lang, p.getLanguagePrefix(), p.multilingual) return fmt.Sprintf("PathSpec, language %q, prefix %q, multilingual: %T", p.language.Lang, p.getLanguagePrefix(), p.multilingual)
} }
// NewPathSpec creats a new PathSpec from the given filesystems and ConfigProvider. // NewPathSpec creats a new PathSpec from the given filesystems and Language.
func NewPathSpec(fs *hugofs.Fs, config ConfigProvider) *PathSpec { func NewPathSpec(fs *hugofs.Fs, cfg config.Provider) *PathSpec {
currCl, ok := config.Get("currentContentLanguage").(*Language) ps := &PathSpec{
if !ok {
// TODO(bep) globals
currCl = NewLanguage("en")
}
return &PathSpec{
fs: fs, fs: fs,
disablePathToLower: config.GetBool("disablePathToLower"), disablePathToLower: cfg.GetBool("disablePathToLower"),
removePathAccents: config.GetBool("removePathAccents"), removePathAccents: cfg.GetBool("removePathAccents"),
uglyURLs: config.GetBool("uglyURLs"), uglyURLs: cfg.GetBool("uglyURLs"),
canonifyURLs: config.GetBool("canonifyURLs"), canonifyURLs: cfg.GetBool("canonifyURLs"),
multilingual: config.GetBool("multilingual"), multilingual: cfg.GetBool("multilingual"),
defaultContentLanguageInSubdir: config.GetBool("defaultContentLanguageInSubdir"), defaultContentLanguageInSubdir: cfg.GetBool("defaultContentLanguageInSubdir"),
defaultContentLanguage: config.GetString("defaultContentLanguage"), defaultContentLanguage: cfg.GetString("defaultContentLanguage"),
currentContentLanguage: currCl, paginatePath: cfg.GetString("paginatePath"),
paginatePath: config.GetString("paginatePath"), baseURL: cfg.GetString("baseURL"),
themesDir: cfg.GetString("themesDir"),
layoutDir: cfg.GetString("layoutDir"),
workingDir: cfg.GetString("workingDir"),
staticDir: cfg.GetString("staticDir"),
theme: cfg.GetString("theme"),
} }
if language, ok := cfg.(*Language); ok {
ps.language = language
}
return ps
} }
// PaginatePath returns the configured root path used for paginator pages. // PaginatePath returns the configured root path used for paginator pages.

View file

@ -23,17 +23,24 @@ import (
) )
func TestNewPathSpecFromConfig(t *testing.T) { func TestNewPathSpecFromConfig(t *testing.T) {
viper.Set("disablePathToLower", true) v := viper.New()
viper.Set("removePathAccents", true) l := NewLanguage("no", v)
viper.Set("uglyURLs", true) v.Set("disablePathToLower", true)
viper.Set("multilingual", true) v.Set("removePathAccents", true)
viper.Set("defaultContentLanguageInSubdir", true) v.Set("uglyURLs", true)
viper.Set("defaultContentLanguage", "no") v.Set("multilingual", true)
viper.Set("currentContentLanguage", NewLanguage("no")) v.Set("defaultContentLanguageInSubdir", true)
viper.Set("canonifyURLs", true) v.Set("defaultContentLanguage", "no")
viper.Set("paginatePath", "side") v.Set("canonifyURLs", true)
v.Set("paginatePath", "side")
v.Set("baseURL", "http://base.com")
v.Set("themesDir", "thethemes")
v.Set("layoutDir", "thelayouts")
v.Set("workingDir", "thework")
v.Set("staticDir", "thestatic")
v.Set("theme", "thetheme")
p := NewPathSpec(hugofs.NewMem(), viper.GetViper()) p := NewPathSpec(hugofs.NewMem(v), l)
require.True(t, p.canonifyURLs) require.True(t, p.canonifyURLs)
require.True(t, p.defaultContentLanguageInSubdir) require.True(t, p.defaultContentLanguageInSubdir)
@ -42,6 +49,13 @@ func TestNewPathSpecFromConfig(t *testing.T) {
require.True(t, p.removePathAccents) require.True(t, p.removePathAccents)
require.True(t, p.uglyURLs) require.True(t, p.uglyURLs)
require.Equal(t, "no", p.defaultContentLanguage) require.Equal(t, "no", p.defaultContentLanguage)
require.Equal(t, "no", p.currentContentLanguage.Lang) require.Equal(t, "no", p.language.Lang)
require.Equal(t, "side", p.paginatePath) require.Equal(t, "side", p.paginatePath)
require.Equal(t, "http://base.com", p.baseURL)
require.Equal(t, "thethemes", p.themesDir)
require.Equal(t, "thelayouts", p.layoutDir)
require.Equal(t, "thework", p.workingDir)
require.Equal(t, "thestatic", p.staticDir)
require.Equal(t, "thetheme", p.theme)
} }

View file

@ -24,9 +24,9 @@ import (
"sort" "sort"
"strings" "strings"
"github.com/spf13/hugo/config"
"github.com/spf13/hugo/hugofs" "github.com/spf13/hugo/hugofs"
jww "github.com/spf13/jwalterweatherman" jww "github.com/spf13/jwalterweatherman"
"github.com/spf13/viper"
) )
const pygmentsBin = "pygmentize" const pygmentsBin = "pygmentize"
@ -41,13 +41,13 @@ func HasPygments() bool {
} }
// Highlight takes some code and returns highlighted code. // Highlight takes some code and returns highlighted code.
func Highlight(code, lang, optsStr string) string { func Highlight(cfg config.Provider, code, lang, optsStr string) string {
if !HasPygments() { if !HasPygments() {
jww.WARN.Println("Highlighting requires Pygments to be installed and in the path") jww.WARN.Println("Highlighting requires Pygments to be installed and in the path")
return code return code
} }
options, err := parsePygmentsOpts(optsStr) options, err := parsePygmentsOpts(cfg, optsStr)
if err != nil { if err != nil {
jww.ERROR.Print(err.Error()) jww.ERROR.Print(err.Error())
@ -62,8 +62,8 @@ func Highlight(code, lang, optsStr string) string {
fs := hugofs.Os fs := hugofs.Os
ignoreCache := viper.GetBool("ignoreCache") ignoreCache := cfg.GetBool("ignoreCache")
cacheDir := viper.GetString("cacheDir") cacheDir := cfg.GetString("cacheDir")
var cachefile string var cachefile string
if !ignoreCache && cacheDir != "" { if !ignoreCache && cacheDir != "" {
@ -195,19 +195,19 @@ func createOptionsString(options map[string]string) string {
return optionsStr return optionsStr
} }
func parseDefaultPygmentsOpts() (map[string]string, error) { func parseDefaultPygmentsOpts(cfg config.Provider) (map[string]string, error) {
options := make(map[string]string) options := make(map[string]string)
err := parseOptions(options, viper.GetString("pygmentsOptions")) err := parseOptions(options, cfg.GetString("pygmentsOptions"))
if err != nil { if err != nil {
return nil, err return nil, err
} }
if viper.IsSet("pygmentsStyle") { if cfg.IsSet("pygmentsStyle") {
options["style"] = viper.GetString("pygmentsStyle") options["style"] = cfg.GetString("pygmentsStyle")
} }
if viper.IsSet("pygmentsUseClasses") { if cfg.IsSet("pygmentsUseClasses") {
if viper.GetBool("pygmentsUseClasses") { if cfg.GetBool("pygmentsUseClasses") {
options["noclasses"] = "false" options["noclasses"] = "false"
} else { } else {
options["noclasses"] = "true" options["noclasses"] = "true"
@ -222,8 +222,8 @@ func parseDefaultPygmentsOpts() (map[string]string, error) {
return options, nil return options, nil
} }
func parsePygmentsOpts(in string) (string, error) { func parsePygmentsOpts(cfg config.Provider, in string) (string, error) {
options, err := parseDefaultPygmentsOpts() options, err := parseDefaultPygmentsOpts(cfg)
if err != nil { if err != nil {
return "", err return "", err
} }

View file

@ -34,11 +34,12 @@ func TestParsePygmentsArgs(t *testing.T) {
{"boo=invalid", "foo", false, false}, {"boo=invalid", "foo", false, false},
{"style", "foo", false, false}, {"style", "foo", false, false},
} { } {
viper.Reset()
viper.Set("pygmentsStyle", this.pygmentsStyle)
viper.Set("pygmentsUseClasses", this.pygmentsUseClasses)
result1, err := parsePygmentsOpts(this.in) v := viper.New()
v.Set("pygmentsStyle", this.pygmentsStyle)
v.Set("pygmentsUseClasses", this.pygmentsUseClasses)
result1, err := parsePygmentsOpts(v, this.in)
if b, ok := this.expect1.(bool); ok && !b { if b, ok := this.expect1.(bool); ok && !b {
if err == nil { if err == nil {
t.Errorf("[%d] parsePygmentArgs didn't return an expected error", i) t.Errorf("[%d] parsePygmentArgs didn't return an expected error", i)
@ -70,19 +71,19 @@ func TestParseDefaultPygmentsArgs(t *testing.T) {
{"style=foo,noclasses=false", nil, nil, "style=override,noclasses=override"}, {"style=foo,noclasses=false", nil, nil, "style=override,noclasses=override"},
{"style=foo,noclasses=false", "override", false, "style=override,noclasses=override"}, {"style=foo,noclasses=false", "override", false, "style=override,noclasses=override"},
} { } {
viper.Reset() v := viper.New()
viper.Set("pygmentsOptions", this.pygmentsOptions) v.Set("pygmentsOptions", this.pygmentsOptions)
if s, ok := this.pygmentsStyle.(string); ok { if s, ok := this.pygmentsStyle.(string); ok {
viper.Set("pygmentsStyle", s) v.Set("pygmentsStyle", s)
} }
if b, ok := this.pygmentsUseClasses.(bool); ok { if b, ok := this.pygmentsUseClasses.(bool); ok {
viper.Set("pygmentsUseClasses", b) v.Set("pygmentsUseClasses", b)
} }
result, err := parsePygmentsOpts(this.in) result, err := parsePygmentsOpts(v, this.in)
if err != nil { if err != nil {
t.Errorf("[%d] parsePygmentArgs failed: %s", i, err) t.Errorf("[%d] parsePygmentArgs failed: %s", i, err)
continue continue

View file

@ -0,0 +1,37 @@
package helpers
import (
"github.com/spf13/viper"
"github.com/spf13/hugo/hugofs"
)
func newTestPathSpec(fs *hugofs.Fs, v *viper.Viper) *PathSpec {
l := NewDefaultLanguage(v)
return NewPathSpec(fs, l)
}
func newTestDefaultPathSpec(configKeyValues ...interface{}) *PathSpec {
v := viper.New()
fs := hugofs.NewMem(v)
cfg := newTestCfg(fs)
for i := 0; i < len(configKeyValues); i += 2 {
cfg.Set(configKeyValues[i].(string), configKeyValues[i+1])
}
return newTestPathSpec(fs, cfg)
}
func newTestCfg(fs *hugofs.Fs) *viper.Viper {
v := viper.New()
v.SetFs(fs.Source)
return v
}
func newTestContentSpec() *ContentSpec {
v := viper.New()
return NewContentSpec(v)
}

View file

@ -21,7 +21,6 @@ import (
"github.com/PuerkitoBio/purell" "github.com/PuerkitoBio/purell"
jww "github.com/spf13/jwalterweatherman" jww "github.com/spf13/jwalterweatherman"
"github.com/spf13/viper"
) )
type pathBridge struct { type pathBridge struct {
@ -158,7 +157,7 @@ func (p *PathSpec) AbsURL(in string, addLanguage bool) string {
return in return in
} }
baseURL := viper.GetString("baseURL") baseURL := p.baseURL
if strings.HasPrefix(in, "/") { if strings.HasPrefix(in, "/") {
p, err := url.Parse(baseURL) p, err := url.Parse(baseURL)
if err != nil { if err != nil {
@ -200,7 +199,7 @@ func (p *PathSpec) getLanguagePrefix() string {
defaultLang := p.defaultContentLanguage defaultLang := p.defaultContentLanguage
defaultInSubDir := p.defaultContentLanguageInSubdir defaultInSubDir := p.defaultContentLanguageInSubdir
currentLang := p.currentContentLanguage.Lang currentLang := p.language.Lang
if currentLang == "" || (currentLang == defaultLang && !defaultInSubDir) { if currentLang == "" || (currentLang == defaultLang && !defaultInSubDir) {
return "" return ""
} }
@ -220,7 +219,7 @@ func IsAbsURL(path string) bool {
// RelURL creates a URL relative to the BaseURL root. // RelURL creates a URL relative to the BaseURL root.
// Note: The result URL will not include the context root if canonifyURLs is enabled. // Note: The result URL will not include the context root if canonifyURLs is enabled.
func (p *PathSpec) RelURL(in string, addLanguage bool) string { func (p *PathSpec) RelURL(in string, addLanguage bool) string {
baseURL := viper.GetString("baseURL") baseURL := p.baseURL
canonifyURLs := p.canonifyURLs canonifyURLs := p.canonifyURLs
if (!strings.HasPrefix(in, baseURL) && strings.HasPrefix(in, "http")) || strings.HasPrefix(in, "//") { if (!strings.HasPrefix(in, baseURL) && strings.HasPrefix(in, "http")) || strings.HasPrefix(in, "//") {
return in return in

View file

@ -25,9 +25,10 @@ import (
) )
func TestURLize(t *testing.T) { func TestURLize(t *testing.T) {
initCommonTestConfig()
p := NewPathSpec(hugofs.NewMem(), viper.GetViper()) v := viper.New()
l := NewDefaultLanguage(v)
p := NewPathSpec(hugofs.NewMem(v), l)
tests := []struct { tests := []struct {
input string input string
@ -62,11 +63,10 @@ func TestAbsURL(t *testing.T) {
} }
func doTestAbsURL(t *testing.T, defaultInSubDir, addLanguage, multilingual bool, lang string) { func doTestAbsURL(t *testing.T, defaultInSubDir, addLanguage, multilingual bool, lang string) {
viper.Reset() v := viper.New()
viper.Set("multilingual", multilingual) v.Set("multilingual", multilingual)
viper.Set("currentContentLanguage", NewLanguage(lang)) v.Set("defaultContentLanguage", "en")
viper.Set("defaultContentLanguage", "en") v.Set("defaultContentLanguageInSubdir", defaultInSubDir)
viper.Set("defaultContentLanguageInSubdir", defaultInSubDir)
tests := []struct { tests := []struct {
input string input string
@ -86,10 +86,10 @@ func doTestAbsURL(t *testing.T, defaultInSubDir, addLanguage, multilingual bool,
{"http//foo", "http://base/path", "http://base/path/MULTIhttp/foo"}, {"http//foo", "http://base/path", "http://base/path/MULTIhttp/foo"},
} }
p := NewPathSpec(hugofs.NewMem(), viper.GetViper())
for _, test := range tests { for _, test := range tests {
viper.Set("baseURL", test.baseURL) v.Set("baseURL", test.baseURL)
l := NewLanguage(lang, v)
p := NewPathSpec(hugofs.NewMem(v), l)
output := p.AbsURL(test.input, addLanguage) output := p.AbsURL(test.input, addLanguage)
expected := test.expected expected := test.expected
@ -138,11 +138,10 @@ func TestRelURL(t *testing.T) {
} }
func doTestRelURL(t *testing.T, defaultInSubDir, addLanguage, multilingual bool, lang string) { func doTestRelURL(t *testing.T, defaultInSubDir, addLanguage, multilingual bool, lang string) {
viper.Reset() v := viper.New()
viper.Set("multilingual", multilingual) v.Set("multilingual", multilingual)
viper.Set("currentContentLanguage", NewLanguage(lang)) v.Set("defaultContentLanguage", "en")
viper.Set("defaultContentLanguage", "en") v.Set("defaultContentLanguageInSubdir", defaultInSubDir)
viper.Set("defaultContentLanguageInSubdir", defaultInSubDir)
tests := []struct { tests := []struct {
input string input string
@ -165,9 +164,10 @@ func doTestRelURL(t *testing.T, defaultInSubDir, addLanguage, multilingual bool,
} }
for i, test := range tests { for i, test := range tests {
viper.Set("baseURL", test.baseURL) v.Set("baseURL", test.baseURL)
viper.Set("canonifyURLs", test.canonify) v.Set("canonifyURLs", test.canonify)
p := NewPathSpec(hugofs.NewMem(), viper.GetViper()) l := NewLanguage(lang, v)
p := NewPathSpec(hugofs.NewMem(v), l)
output := p.RelURL(test.input, addLanguage) output := p.RelURL(test.input, addLanguage)
@ -252,8 +252,10 @@ func TestURLPrep(t *testing.T) {
} }
for i, d := range data { for i, d := range data {
viper.Set("uglyURLs", d.ugly) v := viper.New()
p := NewPathSpec(hugofs.NewMem(), viper.GetViper()) v.Set("uglyURLs", d.ugly)
l := NewDefaultLanguage(v)
p := NewPathSpec(hugofs.NewMem(v), l)
output := p.URLPrep(d.input) output := p.URLPrep(d.input)
if d.output != output { if d.output != output {

View file

@ -16,7 +16,7 @@ package hugofs
import ( import (
"github.com/spf13/afero" "github.com/spf13/afero"
"github.com/spf13/viper" "github.com/spf13/hugo/config"
) )
// Os points to an Os Afero file system. // Os points to an Os Afero file system.
@ -39,30 +39,37 @@ type Fs struct {
// NewDefault creates a new Fs with the OS file system // NewDefault creates a new Fs with the OS file system
// as source and destination file systems. // as source and destination file systems.
func NewDefault() *Fs { func NewDefault(cfg config.Provider) *Fs {
fs := &afero.OsFs{} fs := &afero.OsFs{}
return newFs(fs) return newFs(fs, cfg)
} }
// NewDefault creates a new Fs with the MemMapFs // NewDefault creates a new Fs with the MemMapFs
// as source and destination file systems. // as source and destination file systems.
// Useful for testing. // Useful for testing.
func NewMem() *Fs { func NewMem(cfg config.Provider) *Fs {
fs := &afero.MemMapFs{} fs := &afero.MemMapFs{}
return newFs(fs) return newFs(fs, cfg)
} }
func newFs(base afero.Fs) *Fs { // NewFrom creates a new Fs based on the provided Afero Fs
// as source and destination file systems.
// Useful for testing.
func NewFrom(fs afero.Fs, cfg config.Provider) *Fs {
return newFs(fs, cfg)
}
func newFs(base afero.Fs, cfg config.Provider) *Fs {
return &Fs{ return &Fs{
Source: base, Source: base,
Destination: base, Destination: base,
Os: &afero.OsFs{}, Os: &afero.OsFs{},
WorkingDir: getWorkingDirFs(base), WorkingDir: getWorkingDirFs(base, cfg),
} }
} }
func getWorkingDirFs(base afero.Fs) *afero.BasePathFs { func getWorkingDirFs(base afero.Fs, cfg config.Provider) *afero.BasePathFs {
workingDir := viper.GetString("workingDir") workingDir := cfg.GetString("workingDir")
if workingDir != "" { if workingDir != "" {
return afero.NewBasePathFs(afero.NewReadOnlyFs(base), workingDir).(*afero.BasePathFs) return afero.NewBasePathFs(afero.NewReadOnlyFs(base), workingDir).(*afero.BasePathFs)

View file

@ -22,10 +22,8 @@ import (
) )
func TestNewDefault(t *testing.T) { func TestNewDefault(t *testing.T) {
viper.Reset() v := viper.New()
defer viper.Reset() f := NewDefault(v)
f := NewDefault()
assert.NotNil(t, f.Source) assert.NotNil(t, f.Source)
assert.IsType(t, new(afero.OsFs), f.Source) assert.IsType(t, new(afero.OsFs), f.Source)
@ -39,10 +37,8 @@ func TestNewDefault(t *testing.T) {
} }
func TestNewMem(t *testing.T) { func TestNewMem(t *testing.T) {
viper.Reset() v := viper.New()
defer viper.Reset() f := NewMem(v)
f := NewMem()
assert.NotNil(t, f.Source) assert.NotNil(t, f.Source)
assert.IsType(t, new(afero.MemMapFs), f.Source) assert.IsType(t, new(afero.MemMapFs), f.Source)
@ -53,12 +49,11 @@ func TestNewMem(t *testing.T) {
} }
func TestWorkingDir(t *testing.T) { func TestWorkingDir(t *testing.T) {
viper.Reset() v := viper.New()
defer viper.Reset()
viper.Set("workingDir", "/a/b/") v.Set("workingDir", "/a/b/")
f := NewMem() f := NewMem(v)
assert.NotNil(t, f.WorkingDir) assert.NotNil(t, f.WorkingDir)
assert.IsType(t, new(afero.BasePathFs), f.WorkingDir) assert.IsType(t, new(afero.BasePathFs), f.WorkingDir)

View file

@ -18,7 +18,6 @@ import (
"testing" "testing"
"github.com/spf13/hugo/deps" "github.com/spf13/hugo/deps"
"github.com/spf13/hugo/hugofs"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
@ -33,38 +32,44 @@ const basicTemplate = "<html><body>{{.Content}}</body></html>"
const aliasTemplate = "<html><body>ALIASTEMPLATE</body></html>" const aliasTemplate = "<html><body>ALIASTEMPLATE</body></html>"
func TestAlias(t *testing.T) { func TestAlias(t *testing.T) {
testCommonResetState() t.Parallel()
fs := hugofs.NewMem() var (
cfg, fs = newTestCfg()
th = testHelper{cfg}
)
writeSource(t, fs, filepath.Join("content", "page.md"), pageWithAlias) writeSource(t, fs, filepath.Join("content", "page.md"), pageWithAlias)
writeSource(t, fs, filepath.Join("layouts", "_default", "single.html"), basicTemplate) writeSource(t, fs, filepath.Join("layouts", "_default", "single.html"), basicTemplate)
buildSingleSite(t, deps.DepsCfg{Fs: fs}, BuildCfg{}) buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{})
// the real page // the real page
assertFileContent(t, fs, filepath.Join("public", "page", "index.html"), false, "For some moments the old man") th.assertFileContent(t, fs, filepath.Join("public", "page", "index.html"), false, "For some moments the old man")
// the alias redirector // the alias redirector
assertFileContent(t, fs, filepath.Join("public", "foo", "bar", "index.html"), false, "<meta http-equiv=\"refresh\" content=\"0; ") th.assertFileContent(t, fs, filepath.Join("public", "foo", "bar", "index.html"), false, "<meta http-equiv=\"refresh\" content=\"0; ")
} }
func TestAliasTemplate(t *testing.T) { func TestAliasTemplate(t *testing.T) {
testCommonResetState() t.Parallel()
fs := hugofs.NewMem() var (
cfg, fs = newTestCfg()
th = testHelper{cfg}
)
writeSource(t, fs, filepath.Join("content", "page.md"), pageWithAlias) writeSource(t, fs, filepath.Join("content", "page.md"), pageWithAlias)
writeSource(t, fs, filepath.Join("layouts", "_default", "single.html"), basicTemplate) writeSource(t, fs, filepath.Join("layouts", "_default", "single.html"), basicTemplate)
writeSource(t, fs, filepath.Join("layouts", "alias.html"), aliasTemplate) writeSource(t, fs, filepath.Join("layouts", "alias.html"), aliasTemplate)
sites, err := NewHugoSitesFromConfiguration(deps.DepsCfg{Fs: fs}) sites, err := NewHugoSites(deps.DepsCfg{Fs: fs, Cfg: cfg})
require.NoError(t, err) require.NoError(t, err)
require.NoError(t, sites.Build(BuildCfg{})) require.NoError(t, sites.Build(BuildCfg{}))
// the real page // the real page
assertFileContent(t, fs, filepath.Join("public", "page", "index.html"), false, "For some moments the old man") th.assertFileContent(t, fs, filepath.Join("public", "page", "index.html"), false, "For some moments the old man")
// the alias redirector // the alias redirector
assertFileContent(t, fs, filepath.Join("public", "foo", "bar", "index.html"), false, "ALIASTEMPLATE") th.assertFileContent(t, fs, filepath.Join("public", "foo", "bar", "index.html"), false, "ALIASTEMPLATE")
} }

View file

@ -19,10 +19,10 @@ import (
"strings" "strings"
"testing" "testing"
"github.com/spf13/viper" "github.com/spf13/afero"
"github.com/spf13/hugo/deps" "github.com/spf13/hugo/deps"
"github.com/spf13/hugo/hugofs" "github.com/spf13/hugo/hugofs"
"github.com/stretchr/testify/require"
) )
var ( var (
@ -111,26 +111,28 @@ ColorS:
` `
) )
func caseMixingTestsWriteCommonSources(t *testing.T, fs *hugofs.Fs) { func caseMixingTestsWriteCommonSources(t *testing.T, fs afero.Fs) {
writeSource(t, fs, filepath.Join("content", "sect1", "page1.md"), caseMixingPage1) writeToFs(t, fs, filepath.Join("content", "sect1", "page1.md"), caseMixingPage1)
writeSource(t, fs, filepath.Join("content", "sect2", "page2.md"), caseMixingPage2) writeToFs(t, fs, filepath.Join("content", "sect2", "page2.md"), caseMixingPage2)
writeSource(t, fs, filepath.Join("content", "sect1", "page1.en.md"), caseMixingPage1En) writeToFs(t, fs, filepath.Join("content", "sect1", "page1.en.md"), caseMixingPage1En)
writeSource(t, fs, "layouts/shortcodes/shortcode.html", ` writeToFs(t, fs, "layouts/shortcodes/shortcode.html", `
Shortcode Page: {{ .Page.Params.COLOR }}|{{ .Page.Params.Colors.Blue }} Shortcode Page: {{ .Page.Params.COLOR }}|{{ .Page.Params.Colors.Blue }}
Shortcode Site: {{ .Page.Site.Params.COLOR }}|{{ .Site.Params.COLORS.YELLOW }} Shortcode Site: {{ .Page.Site.Params.COLOR }}|{{ .Site.Params.COLORS.YELLOW }}
`) `)
writeSource(t, fs, "layouts/partials/partial.html", ` writeToFs(t, fs, "layouts/partials/partial.html", `
Partial Page: {{ .Params.COLOR }}|{{ .Params.Colors.Blue }} Partial Page: {{ .Params.COLOR }}|{{ .Params.Colors.Blue }}
Partial Site: {{ .Site.Params.COLOR }}|{{ .Site.Params.COLORS.YELLOW }} Partial Site: {{ .Site.Params.COLOR }}|{{ .Site.Params.COLORS.YELLOW }}
`) `)
writeSource(t, fs, "config.toml", caseMixingSiteConfigTOML) writeToFs(t, fs, "config.toml", caseMixingSiteConfigTOML)
} }
func TestCaseInsensitiveConfigurationVariations(t *testing.T) { func TestCaseInsensitiveConfigurationVariations(t *testing.T) {
t.Parallel()
// See issues 2615, 1129, 2590 and maybe some others // See issues 2615, 1129, 2590 and maybe some others
// Also see 2598 // Also see 2598
// //
@ -143,22 +145,22 @@ func TestCaseInsensitiveConfigurationVariations(t *testing.T) {
// language: new and overridden values, in regular fields and nested paramsmap // language: new and overridden values, in regular fields and nested paramsmap
// page frontmatter: regular fields, blackfriday config, param with nested map // page frontmatter: regular fields, blackfriday config, param with nested map
testCommonResetState() mm := afero.NewMemMapFs()
depsCfg := newTestDepsConfig() caseMixingTestsWriteCommonSources(t, mm)
viper.SetFs(depsCfg.Fs.Source)
caseMixingTestsWriteCommonSources(t, depsCfg.Fs) cfg, err := LoadConfig(mm, "", "config.toml")
require.NoError(t, err)
if err := LoadGlobalConfig("", "config.toml"); err != nil { fs := hugofs.NewFrom(mm, cfg)
t.Fatalf("Failed to load config: %s", err)
}
writeSource(t, depsCfg.Fs, filepath.Join("layouts", "_default", "baseof.html"), ` th := testHelper{cfg}
writeSource(t, fs, filepath.Join("layouts", "_default", "baseof.html"), `
Block Page Colors: {{ .Params.COLOR }}|{{ .Params.Colors.Blue }} Block Page Colors: {{ .Params.COLOR }}|{{ .Params.Colors.Blue }}
{{ block "main" . }}default{{end}}`) {{ block "main" . }}default{{end}}`)
writeSource(t, depsCfg.Fs, filepath.Join("layouts", "sect2", "single.html"), ` writeSource(t, fs, filepath.Join("layouts", "sect2", "single.html"), `
{{ define "main"}} {{ define "main"}}
Page Colors: {{ .Params.CoLOR }}|{{ .Params.Colors.Blue }} Page Colors: {{ .Params.CoLOR }}|{{ .Params.Colors.Blue }}
Site Colors: {{ .Site.Params.COlOR }}|{{ .Site.Params.COLORS.YELLOW }} Site Colors: {{ .Site.Params.COlOR }}|{{ .Site.Params.COLORS.YELLOW }}
@ -167,7 +169,7 @@ Site Colors: {{ .Site.Params.COlOR }}|{{ .Site.Params.COLORS.YELLOW }}
{{ end }} {{ end }}
`) `)
writeSource(t, depsCfg.Fs, filepath.Join("layouts", "_default", "single.html"), ` writeSource(t, fs, filepath.Join("layouts", "_default", "single.html"), `
Page Title: {{ .Title }} Page Title: {{ .Title }}
Site Title: {{ .Site.Title }} Site Title: {{ .Site.Title }}
Site Lang Mood: {{ .Site.Language.Params.MOoD }} Site Lang Mood: {{ .Site.Language.Params.MOoD }}
@ -177,7 +179,7 @@ Site Colors: {{ .Site.Params.COLOR }}|{{ .Site.Params.COLORS.YELLOW }}
{{ partial "partial.html" . }} {{ partial "partial.html" . }}
`) `)
sites, err := NewHugoSitesFromConfiguration(depsCfg) sites, err := NewHugoSites(deps.DepsCfg{Fs: fs, Cfg: cfg})
if err != nil { if err != nil {
t.Fatalf("Failed to create sites: %s", err) t.Fatalf("Failed to create sites: %s", err)
@ -189,7 +191,7 @@ Site Colors: {{ .Site.Params.COLOR }}|{{ .Site.Params.COLORS.YELLOW }}
t.Fatalf("Failed to build sites: %s", err) t.Fatalf("Failed to build sites: %s", err)
} }
assertFileContent(t, sites.Fs, filepath.Join("public", "nn", "sect1", "page1", "index.html"), true, th.assertFileContent(t, sites.Fs, filepath.Join("public", "nn", "sect1", "page1", "index.html"), true,
"Page Colors: red|heavenly", "Page Colors: red|heavenly",
"Site Colors: green|yellow", "Site Colors: green|yellow",
"Site Lang Mood: Happy", "Site Lang Mood: Happy",
@ -202,7 +204,7 @@ Site Colors: {{ .Site.Params.COLOR }}|{{ .Site.Params.COLORS.YELLOW }}
"&laquo;Hi&raquo;", // angled quotes "&laquo;Hi&raquo;", // angled quotes
) )
assertFileContent(t, sites.Fs, filepath.Join("public", "en", "sect1", "page1", "index.html"), true, th.assertFileContent(t, sites.Fs, filepath.Join("public", "en", "sect1", "page1", "index.html"), true,
"Site Colors: Pink|golden", "Site Colors: Pink|golden",
"Page Colors: black|bluesy", "Page Colors: black|bluesy",
"Site Lang Mood: Thoughtful", "Site Lang Mood: Thoughtful",
@ -211,7 +213,7 @@ Site Colors: {{ .Site.Params.COLOR }}|{{ .Site.Params.COLORS.YELLOW }}
"&ldquo;Hi&rdquo;", "&ldquo;Hi&rdquo;",
) )
assertFileContent(t, sites.Fs, filepath.Join("public", "nn", "sect2", "page2", "index.html"), true, th.assertFileContent(t, sites.Fs, filepath.Join("public", "nn", "sect2", "page2", "index.html"), true,
"Page Colors: black|sky", "Page Colors: black|sky",
"Site Colors: green|yellow", "Site Colors: green|yellow",
"Shortcode Page: black|sky", "Shortcode Page: black|sky",
@ -222,6 +224,8 @@ Site Colors: {{ .Site.Params.COLOR }}|{{ .Site.Params.COLORS.YELLOW }}
} }
func TestCaseInsensitiveConfigurationForAllTemplateEngines(t *testing.T) { func TestCaseInsensitiveConfigurationForAllTemplateEngines(t *testing.T) {
t.Parallel()
noOp := func(s string) string { noOp := func(s string) string {
return s return s
} }
@ -252,16 +256,16 @@ func TestCaseInsensitiveConfigurationForAllTemplateEngines(t *testing.T) {
func doTestCaseInsensitiveConfigurationForTemplateEngine(t *testing.T, suffix string, templateFixer func(s string) string) { func doTestCaseInsensitiveConfigurationForTemplateEngine(t *testing.T, suffix string, templateFixer func(s string) string) {
testCommonResetState() mm := afero.NewMemMapFs()
fs := hugofs.NewMem() caseMixingTestsWriteCommonSources(t, mm)
viper.SetFs(fs.Source)
caseMixingTestsWriteCommonSources(t, fs) cfg, err := LoadConfig(mm, "", "config.toml")
require.NoError(t, err)
if err := LoadGlobalConfig("", "config.toml"); err != nil { fs := hugofs.NewFrom(mm, cfg)
t.Fatalf("Failed to load config: %s", err)
} th := testHelper{cfg}
t.Log("Testing", suffix) t.Log("Testing", suffix)
@ -280,7 +284,7 @@ p
writeSource(t, fs, filepath.Join("layouts", "_default", fmt.Sprintf("single.%s", suffix)), templ) writeSource(t, fs, filepath.Join("layouts", "_default", fmt.Sprintf("single.%s", suffix)), templ)
sites, err := NewHugoSitesFromConfiguration(deps.DepsCfg{Fs: fs}) sites, err := NewHugoSites(deps.DepsCfg{Fs: fs, Cfg: cfg})
if err != nil { if err != nil {
t.Fatalf("Failed to create sites: %s", err) t.Fatalf("Failed to create sites: %s", err)
@ -292,7 +296,7 @@ p
t.Fatalf("Failed to build sites: %s", err) t.Fatalf("Failed to build sites: %s", err)
} }
assertFileContent(t, sites.Fs, filepath.Join("public", "nn", "sect1", "page1", "index.html"), true, th.assertFileContent(t, sites.Fs, filepath.Join("public", "nn", "sect1", "page1", "index.html"), true,
"Page Colors: red|heavenly", "Page Colors: red|heavenly",
"Site Colors: green|yellow", "Site Colors: green|yellow",
"Shortcode Page: red|heavenly", "Shortcode Page: red|heavenly",

View file

@ -16,94 +16,100 @@ package hugolib
import ( import (
"fmt" "fmt"
"github.com/spf13/afero"
"github.com/spf13/hugo/helpers" "github.com/spf13/hugo/helpers"
"github.com/spf13/viper" "github.com/spf13/viper"
) )
// LoadGlobalConfig loads Hugo configuration into the global Viper. // LoadConfig loads Hugo configuration into a new Viper and then adds
func LoadGlobalConfig(relativeSourcePath, configFilename string) error { // a set of defaults.
func LoadConfig(fs afero.Fs, relativeSourcePath, configFilename string) (*viper.Viper, error) {
v := viper.New()
v.SetFs(fs)
if relativeSourcePath == "" { if relativeSourcePath == "" {
relativeSourcePath = "." relativeSourcePath = "."
} }
viper.AutomaticEnv() v.AutomaticEnv()
viper.SetEnvPrefix("hugo") v.SetEnvPrefix("hugo")
viper.SetConfigFile(configFilename) v.SetConfigFile(configFilename)
// See https://github.com/spf13/viper/issues/73#issuecomment-126970794 // See https://github.com/spf13/viper/issues/73#issuecomment-126970794
if relativeSourcePath == "" { if relativeSourcePath == "" {
viper.AddConfigPath(".") v.AddConfigPath(".")
} else { } else {
viper.AddConfigPath(relativeSourcePath) v.AddConfigPath(relativeSourcePath)
} }
err := viper.ReadInConfig() err := v.ReadInConfig()
if err != nil { if err != nil {
if _, ok := err.(viper.ConfigParseError); ok { if _, ok := err.(viper.ConfigParseError); ok {
return err return nil, err
} }
return fmt.Errorf("Unable to locate Config file. Perhaps you need to create a new site.\n Run `hugo help new` for details. (%s)\n", err) return nil, fmt.Errorf("Unable to locate Config file. Perhaps you need to create a new site.\n Run `hugo help new` for details. (%s)\n", err)
} }
viper.RegisterAlias("indexes", "taxonomies") v.RegisterAlias("indexes", "taxonomies")
loadDefaultSettings() loadDefaultSettingsFor(v)
return nil return v, nil
} }
func loadDefaultSettings() { func loadDefaultSettingsFor(v *viper.Viper) {
viper.SetDefault("cleanDestinationDir", false)
viper.SetDefault("watch", false) c := helpers.NewContentSpec(v)
viper.SetDefault("metaDataFormat", "toml")
viper.SetDefault("disable404", false) v.SetDefault("cleanDestinationDir", false)
viper.SetDefault("disableRSS", false) v.SetDefault("watch", false)
viper.SetDefault("disableSitemap", false) v.SetDefault("metaDataFormat", "toml")
viper.SetDefault("disableRobotsTXT", false) v.SetDefault("disable404", false)
viper.SetDefault("contentDir", "content") v.SetDefault("disableRSS", false)
viper.SetDefault("layoutDir", "layouts") v.SetDefault("disableSitemap", false)
viper.SetDefault("staticDir", "static") v.SetDefault("disableRobotsTXT", false)
viper.SetDefault("archetypeDir", "archetypes") v.SetDefault("contentDir", "content")
viper.SetDefault("publishDir", "public") v.SetDefault("layoutDir", "layouts")
viper.SetDefault("dataDir", "data") v.SetDefault("staticDir", "static")
viper.SetDefault("i18nDir", "i18n") v.SetDefault("archetypeDir", "archetypes")
viper.SetDefault("themesDir", "themes") v.SetDefault("publishDir", "public")
viper.SetDefault("defaultLayout", "post") v.SetDefault("dataDir", "data")
viper.SetDefault("buildDrafts", false) v.SetDefault("i18nDir", "i18n")
viper.SetDefault("buildFuture", false) v.SetDefault("themesDir", "themes")
viper.SetDefault("buildExpired", false) v.SetDefault("defaultLayout", "post")
viper.SetDefault("uglyURLs", false) v.SetDefault("buildDrafts", false)
viper.SetDefault("verbose", false) v.SetDefault("buildFuture", false)
viper.SetDefault("ignoreCache", false) v.SetDefault("buildExpired", false)
viper.SetDefault("canonifyURLs", false) v.SetDefault("uglyURLs", false)
viper.SetDefault("relativeURLs", false) v.SetDefault("verbose", false)
viper.SetDefault("removePathAccents", false) v.SetDefault("ignoreCache", false)
viper.SetDefault("taxonomies", map[string]string{"tag": "tags", "category": "categories"}) v.SetDefault("canonifyURLs", false)
viper.SetDefault("permalinks", make(PermalinkOverrides, 0)) v.SetDefault("relativeURLs", false)
viper.SetDefault("sitemap", Sitemap{Priority: -1, Filename: "sitemap.xml"}) v.SetDefault("removePathAccents", false)
viper.SetDefault("defaultExtension", "html") v.SetDefault("taxonomies", map[string]string{"tag": "tags", "category": "categories"})
viper.SetDefault("pygmentsStyle", "monokai") v.SetDefault("permalinks", make(PermalinkOverrides, 0))
viper.SetDefault("pygmentsUseClasses", false) v.SetDefault("sitemap", Sitemap{Priority: -1, Filename: "sitemap.xml"})
viper.SetDefault("pygmentsCodeFences", false) v.SetDefault("defaultExtension", "html")
viper.SetDefault("pygmentsOptions", "") v.SetDefault("pygmentsStyle", "monokai")
viper.SetDefault("disableLiveReload", false) v.SetDefault("pygmentsUseClasses", false)
viper.SetDefault("pluralizeListTitles", true) v.SetDefault("pygmentsCodeFences", false)
viper.SetDefault("preserveTaxonomyNames", false) v.SetDefault("pygmentsOptions", "")
viper.SetDefault("forceSyncStatic", false) v.SetDefault("disableLiveReload", false)
viper.SetDefault("footnoteAnchorPrefix", "") v.SetDefault("pluralizeListTitles", true)
viper.SetDefault("footnoteReturnLinkContents", "") v.SetDefault("preserveTaxonomyNames", false)
viper.SetDefault("newContentEditor", "") v.SetDefault("forceSyncStatic", false)
viper.SetDefault("paginate", 10) v.SetDefault("footnoteAnchorPrefix", "")
viper.SetDefault("paginatePath", "page") v.SetDefault("footnoteReturnLinkContents", "")
viper.SetDefault("blackfriday", helpers.NewBlackfriday(viper.GetViper())) v.SetDefault("newContentEditor", "")
viper.SetDefault("rSSUri", "index.xml") v.SetDefault("paginate", 10)
viper.SetDefault("sectionPagesMenu", "") v.SetDefault("paginatePath", "page")
viper.SetDefault("disablePathToLower", false) v.SetDefault("blackfriday", c.NewBlackfriday())
viper.SetDefault("hasCJKLanguage", false) v.SetDefault("rSSUri", "index.xml")
viper.SetDefault("enableEmoji", false) v.SetDefault("sectionPagesMenu", "")
viper.SetDefault("pygmentsCodeFencesGuessSyntax", false) v.SetDefault("disablePathToLower", false)
viper.SetDefault("useModTimeAsFallback", false) v.SetDefault("hasCJKLanguage", false)
viper.SetDefault("currentContentLanguage", helpers.NewDefaultLanguage()) v.SetDefault("enableEmoji", false)
viper.SetDefault("defaultContentLanguage", "en") v.SetDefault("pygmentsCodeFencesGuessSyntax", false)
viper.SetDefault("defaultContentLanguageInSubdir", false) v.SetDefault("useModTimeAsFallback", false)
viper.SetDefault("enableMissingTranslationPlaceholders", false) v.SetDefault("defaultContentLanguage", "en")
viper.SetDefault("enableGitInfo", false) v.SetDefault("defaultContentLanguageInSubdir", false)
v.SetDefault("enableMissingTranslationPlaceholders", false)
v.SetDefault("enableGitInfo", false)
} }

View file

@ -16,28 +16,28 @@ package hugolib
import ( import (
"testing" "testing"
"github.com/spf13/hugo/helpers" "github.com/spf13/afero"
"github.com/spf13/hugo/hugofs"
"github.com/spf13/viper"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
func TestLoadGlobalConfig(t *testing.T) { func TestLoadConfig(t *testing.T) {
t.Parallel()
// Add a random config variable for testing. // Add a random config variable for testing.
// side = page in Norwegian. // side = page in Norwegian.
configContent := ` configContent := `
PaginatePath = "side" PaginatePath = "side"
` `
fs := hugofs.NewMem() mm := afero.NewMemMapFs()
viper.SetFs(fs.Source)
writeSource(t, fs, "hugo.toml", configContent) writeToFs(t, mm, "hugo.toml", configContent)
require.NoError(t, LoadGlobalConfig("", "hugo.toml")) cfg, err := LoadConfig(mm, "", "hugo.toml")
assert.Equal(t, "side", helpers.Config().GetString("paginatePath")) require.NoError(t, err)
assert.Equal(t, "side", cfg.GetString("paginatePath"))
// default // default
assert.Equal(t, "layouts", viper.GetString("layoutDir")) assert.Equal(t, "layouts", cfg.GetString("layoutDir"))
} }

View file

@ -19,15 +19,24 @@ import (
"strings" "strings"
"testing" "testing"
"io/ioutil"
"log"
"os"
"github.com/spf13/hugo/deps"
jww "github.com/spf13/jwalterweatherman"
"github.com/spf13/hugo/parser" "github.com/spf13/hugo/parser"
"github.com/spf13/hugo/source" "github.com/spf13/hugo/source"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
func TestDataDirJSON(t *testing.T) { func TestDataDirJSON(t *testing.T) {
t.Parallel()
sources := []source.ByteSource{ sources := []source.ByteSource{
{Name: filepath.FromSlash("test/foo.json"), Content: []byte(`{ "bar": "foofoo" }`)}, {Name: filepath.FromSlash("data/test/foo.json"), Content: []byte(`{ "bar": "foofoo" }`)},
{Name: filepath.FromSlash("test.json"), Content: []byte(`{ "hello": [ { "world": "foo" } ] }`)}, {Name: filepath.FromSlash("data/test.json"), Content: []byte(`{ "hello": [ { "world": "foo" } ] }`)},
} }
expected, err := parser.HandleJSONMetaData([]byte(`{ "test": { "hello": [{ "world": "foo" }] , "foo": { "bar":"foofoo" } } }`)) expected, err := parser.HandleJSONMetaData([]byte(`{ "test": { "hello": [{ "world": "foo" }] , "foo": { "bar":"foofoo" } } }`))
@ -36,12 +45,14 @@ func TestDataDirJSON(t *testing.T) {
t.Fatalf("Error %s", err) t.Fatalf("Error %s", err)
} }
doTestDataDir(t, expected, []source.Input{&source.InMemorySource{ByteSource: sources}}) doTestDataDir(t, expected, sources)
} }
func TestDataDirToml(t *testing.T) { func TestDataDirToml(t *testing.T) {
t.Parallel()
sources := []source.ByteSource{ sources := []source.ByteSource{
{Name: filepath.FromSlash("test/kung.toml"), Content: []byte("[foo]\nbar = 1")}, {Name: filepath.FromSlash("data/test/kung.toml"), Content: []byte("[foo]\nbar = 1")},
} }
expected, err := parser.HandleTOMLMetaData([]byte("[test]\n[test.kung]\n[test.kung.foo]\nbar = 1")) expected, err := parser.HandleTOMLMetaData([]byte("[test]\n[test.kung]\n[test.kung.foo]\nbar = 1"))
@ -50,78 +61,102 @@ func TestDataDirToml(t *testing.T) {
t.Fatalf("Error %s", err) t.Fatalf("Error %s", err)
} }
doTestDataDir(t, expected, []source.Input{&source.InMemorySource{ByteSource: sources}}) doTestDataDir(t, expected, sources)
} }
func TestDataDirYAMLWithOverridenValue(t *testing.T) { func TestDataDirYAMLWithOverridenValue(t *testing.T) {
t.Parallel()
sources := []source.ByteSource{ sources := []source.ByteSource{
// filepath.Walk walks the files in lexical order, '/' comes before '.'. Simulate this: // filepath.Walk walks the files in lexical order, '/' comes before '.'. Simulate this:
{Name: filepath.FromSlash("a.yaml"), Content: []byte("a: 1")}, {Name: filepath.FromSlash("data/a.yaml"), Content: []byte("a: 1")},
{Name: filepath.FromSlash("test/v1.yaml"), Content: []byte("v1-2: 2")}, {Name: filepath.FromSlash("data/test/v1.yaml"), Content: []byte("v1-2: 2")},
{Name: filepath.FromSlash("test/v2.yaml"), Content: []byte("v2:\n- 2\n- 3")}, {Name: filepath.FromSlash("data/test/v2.yaml"), Content: []byte("v2:\n- 2\n- 3")},
{Name: filepath.FromSlash("test.yaml"), Content: []byte("v1: 1")}, {Name: filepath.FromSlash("data/test.yaml"), Content: []byte("v1: 1")},
} }
expected := map[string]interface{}{"a": map[string]interface{}{"a": 1}, expected := map[string]interface{}{"a": map[string]interface{}{"a": 1},
"test": map[string]interface{}{"v1": map[string]interface{}{"v1-2": 2}, "v2": map[string]interface{}{"v2": []interface{}{2, 3}}}} "test": map[string]interface{}{"v1": map[string]interface{}{"v1-2": 2}, "v2": map[string]interface{}{"v2": []interface{}{2, 3}}}}
doTestDataDir(t, expected, []source.Input{&source.InMemorySource{ByteSource: sources}}) doTestDataDir(t, expected, sources)
} }
// issue 892 // issue 892
func TestDataDirMultipleSources(t *testing.T) { func TestDataDirMultipleSources(t *testing.T) {
s1 := []source.ByteSource{ t.Parallel()
{Name: filepath.FromSlash("test/first.toml"), Content: []byte("bar = 1")},
}
s2 := []source.ByteSource{ sources := []source.ByteSource{
{Name: filepath.FromSlash("test/first.toml"), Content: []byte("bar = 2")}, {Name: filepath.FromSlash("data/test/first.toml"), Content: []byte("bar = 1")},
{Name: filepath.FromSlash("test/second.toml"), Content: []byte("tender = 2")}, {Name: filepath.FromSlash("themes/mytheme/data/test/first.toml"), Content: []byte("bar = 2")},
{Name: filepath.FromSlash("data/test/second.toml"), Content: []byte("tender = 2")},
} }
expected, _ := parser.HandleTOMLMetaData([]byte("[test.first]\nbar = 1\n[test.second]\ntender=2")) expected, _ := parser.HandleTOMLMetaData([]byte("[test.first]\nbar = 1\n[test.second]\ntender=2"))
doTestDataDir(t, expected, []source.Input{&source.InMemorySource{ByteSource: s1}, &source.InMemorySource{ByteSource: s2}}) doTestDataDir(t, expected, sources,
"theme", "mytheme")
} }
func TestDataDirUnknownFormat(t *testing.T) { func TestDataDirUnknownFormat(t *testing.T) {
t.Parallel()
sources := []source.ByteSource{ sources := []source.ByteSource{
{Name: filepath.FromSlash("test.roml"), Content: []byte("boo")}, {Name: filepath.FromSlash("data/test.roml"), Content: []byte("boo")},
} }
s, err := NewSiteDefaultLang() doTestDataDir(t, true, sources)
require.NoError(t, err)
require.NoError(t, s.loadData([]source.Input{&source.InMemorySource{ByteSource: sources}}))
} }
func doTestDataDir(t *testing.T, expected interface{}, sources []source.Input) { func doTestDataDir(t *testing.T, expected interface{}, sources []source.ByteSource, configKeyValues ...interface{}) {
s, err := NewSiteDefaultLang() var (
require.NoError(t, err) cfg, fs = newTestCfg()
require.NoError(t, s.loadData(sources)) )
if !reflect.DeepEqual(expected, s.Data) { for i := 0; i < len(configKeyValues); i += 2 {
cfg.Set(configKeyValues[i].(string), configKeyValues[i+1])
}
var (
logger = jww.NewNotepad(jww.LevelError, jww.LevelError, os.Stdout, ioutil.Discard, "", log.Ldate|log.Ltime)
depsCfg = deps.DepsCfg{Fs: fs, Cfg: cfg, Logger: logger}
)
writeSource(t, fs, filepath.Join("content", "dummy.md"), "content")
writeSourcesToSource(t, "", fs, sources...)
expectBuildError := false
if ok, shouldFail := expected.(bool); ok && shouldFail {
expectBuildError = true
}
s := buildSingleSiteExpected(t, expectBuildError, depsCfg, BuildCfg{SkipRender: true})
if !expectBuildError && !reflect.DeepEqual(expected, s.Data) {
t.Errorf("Expected structure\n%#v got\n%#v", expected, s.Data) t.Errorf("Expected structure\n%#v got\n%#v", expected, s.Data)
} }
} }
func TestDataFromShortcode(t *testing.T) { func TestDataFromShortcode(t *testing.T) {
testCommonResetState() t.Parallel()
cfg := newTestDepsConfig() var (
cfg, fs = newTestCfg()
)
writeSource(t, cfg.Fs, "data/hugo.toml", "slogan = \"Hugo Rocks!\"") writeSource(t, fs, "data/hugo.toml", "slogan = \"Hugo Rocks!\"")
writeSource(t, cfg.Fs, "layouts/_default/single.html", ` writeSource(t, fs, "layouts/_default/single.html", `
* Slogan from template: {{ .Site.Data.hugo.slogan }} * Slogan from template: {{ .Site.Data.hugo.slogan }}
* {{ .Content }}`) * {{ .Content }}`)
writeSource(t, cfg.Fs, "layouts/shortcodes/d.html", `{{ .Page.Site.Data.hugo.slogan }}`) writeSource(t, fs, "layouts/shortcodes/d.html", `{{ .Page.Site.Data.hugo.slogan }}`)
writeSource(t, cfg.Fs, "content/c.md", `--- writeSource(t, fs, "content/c.md", `---
--- ---
Slogan from shortcode: {{< d >}} Slogan from shortcode: {{< d >}}
`) `)
buildSingleSite(t, cfg, BuildCfg{}) buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{})
content := readSource(t, cfg.Fs, "public/c/index.html") content := readSource(t, fs, "public/c/index.html")
require.True(t, strings.Contains(content, "Slogan from template: Hugo Rocks!"), content) require.True(t, strings.Contains(content, "Slogan from template: Hugo Rocks!"), content)
require.True(t, strings.Contains(content, "Slogan from shortcode: Hugo Rocks!"), content) require.True(t, strings.Contains(content, "Slogan from shortcode: Hugo Rocks!"), content)

File diff suppressed because one or more lines are too long

View file

@ -20,17 +20,16 @@ import (
"github.com/bep/gitmap" "github.com/bep/gitmap"
"github.com/spf13/hugo/helpers" "github.com/spf13/hugo/helpers"
"github.com/spf13/viper"
) )
func (h *HugoSites) assembleGitInfo() { func (h *HugoSites) assembleGitInfo() {
if !viper.GetBool("enableGitInfo") { if !h.Cfg.GetBool("enableGitInfo") {
return return
} }
var ( var (
workingDir = viper.GetString("workingDir") workingDir = h.Cfg.GetString("workingDir")
contentDir = viper.GetString("contentDir") contentDir = h.Cfg.GetString("contentDir")
) )
gitRepo, err := gitmap.Map(workingDir, "") gitRepo, err := gitmap.Map(workingDir, "")

View file

@ -19,7 +19,6 @@ import (
"github.com/spf13/hugo/helpers" "github.com/spf13/hugo/helpers"
"github.com/spf13/hugo/source" "github.com/spf13/hugo/source"
"github.com/spf13/viper"
) )
func init() { func init() {
@ -43,6 +42,13 @@ func (b basicPageHandler) Read(f *source.File, s *Site) HandledResult {
return HandledResult{file: f, err: err} return HandledResult{file: f, err: err}
} }
// In a multilanguage setup, we use the first site to
// do the initial processing.
// That site may be different than where the page will end up,
// so we do the assignment here.
// We should clean up this, but that will have to wait.
s.assignSiteByLanguage(page)
return HandledResult{file: f, page: page, err: err} return HandledResult{file: f, page: page, err: err}
} }
@ -117,7 +123,7 @@ func commonConvert(p *Page) HandledResult {
// TODO(bep) these page handlers need to be re-evaluated, as it is hard to // TODO(bep) these page handlers need to be re-evaluated, as it is hard to
// process a page in isolation. See the new preRender func. // process a page in isolation. See the new preRender func.
if viper.GetBool("enableEmoji") { if p.s.Cfg.GetBool("enableEmoji") {
p.workContent = helpers.Emojify(p.workContent) p.workContent = helpers.Emojify(p.workContent)
} }

View file

@ -19,18 +19,18 @@ import (
"github.com/spf13/hugo/deps" "github.com/spf13/hugo/deps"
"github.com/spf13/hugo/helpers" "github.com/spf13/hugo/helpers"
"github.com/spf13/hugo/hugofs"
"github.com/spf13/viper"
) )
func TestDefaultHandler(t *testing.T) { func TestDefaultHandler(t *testing.T) {
testCommonResetState() t.Parallel()
viper.Set("defaultExtension", "html") var (
viper.Set("verbose", true) cfg, fs = newTestCfg()
viper.Set("uglyURLs", true) )
fs := hugofs.NewMem() cfg.Set("defaultExtension", "html")
cfg.Set("verbose", true)
cfg.Set("uglyURLs", true)
writeSource(t, fs, filepath.FromSlash("content/sect/doc1.html"), "---\nmarkup: markdown\n---\n# title\nsome *content*") writeSource(t, fs, filepath.FromSlash("content/sect/doc1.html"), "---\nmarkup: markdown\n---\n# title\nsome *content*")
writeSource(t, fs, filepath.FromSlash("content/sect/doc2.html"), "<!doctype html><html><body>more content</body></html>") writeSource(t, fs, filepath.FromSlash("content/sect/doc2.html"), "<!doctype html><html><body>more content</body></html>")
@ -46,7 +46,7 @@ func TestDefaultHandler(t *testing.T) {
writeSource(t, fs, filepath.FromSlash("head"), "<head><script src=\"script.js\"></script></head>") writeSource(t, fs, filepath.FromSlash("head"), "<head><script src=\"script.js\"></script></head>")
writeSource(t, fs, filepath.FromSlash("head_abs"), "<head><script src=\"/script.js\"></script></head") writeSource(t, fs, filepath.FromSlash("head_abs"), "<head><script src=\"/script.js\"></script></head")
buildSingleSite(t, deps.DepsCfg{Fs: fs}, BuildCfg{}) buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{})
tests := []struct { tests := []struct {
doc string doc string

View file

@ -22,9 +22,7 @@ import (
"github.com/spf13/hugo/deps" "github.com/spf13/hugo/deps"
"github.com/spf13/hugo/helpers" "github.com/spf13/hugo/helpers"
"github.com/spf13/viper" "github.com/spf13/hugo/i18n"
"github.com/spf13/hugo/source"
"github.com/spf13/hugo/tpl" "github.com/spf13/hugo/tpl"
"github.com/spf13/hugo/tplapi" "github.com/spf13/hugo/tplapi"
) )
@ -48,7 +46,7 @@ func newHugoSites(cfg deps.DepsCfg, sites ...*Site) (*HugoSites, error) {
return nil, errors.New("Cannot provide Language in Cfg when sites are provided") return nil, errors.New("Cannot provide Language in Cfg when sites are provided")
} }
langConfig, err := newMultiLingualFromSites(sites...) langConfig, err := newMultiLingualFromSites(cfg.Cfg, sites...)
if err != nil { if err != nil {
return nil, err return nil, err
@ -62,6 +60,9 @@ func newHugoSites(cfg deps.DepsCfg, sites ...*Site) (*HugoSites, error) {
s.owner = h s.owner = h
} }
// TODO(bep)
cfg.Cfg.Set("multilingual", sites[0].multilingualEnabled())
applyDepsIfNeeded(cfg, sites...) applyDepsIfNeeded(cfg, sites...)
h.Deps = sites[0].Deps h.Deps = sites[0].Deps
@ -70,11 +71,14 @@ func newHugoSites(cfg deps.DepsCfg, sites ...*Site) (*HugoSites, error) {
} }
func applyDepsIfNeeded(cfg deps.DepsCfg, sites ...*Site) error { func applyDepsIfNeeded(cfg deps.DepsCfg, sites ...*Site) error {
if cfg.TemplateProvider == nil { if cfg.TemplateProvider == nil {
cfg.TemplateProvider = tpl.DefaultTemplateProvider cfg.TemplateProvider = tpl.DefaultTemplateProvider
} }
if cfg.TranslationProvider == nil {
cfg.TranslationProvider = i18n.NewTranslationProvider()
}
var ( var (
d *deps.Deps d *deps.Deps
err error err error
@ -89,7 +93,9 @@ func applyDepsIfNeeded(cfg deps.DepsCfg, sites ...*Site) error {
cfg.Language = s.Language cfg.Language = s.Language
cfg.WithTemplate = s.withSiteTemplates(cfg.WithTemplate) cfg.WithTemplate = s.withSiteTemplates(cfg.WithTemplate)
d = deps.New(cfg) d = deps.New(cfg)
if err := d.LoadTemplates(); err != nil { s.Deps = d
if err := d.LoadResources(); err != nil {
return err return err
} }
@ -98,16 +104,16 @@ func applyDepsIfNeeded(cfg deps.DepsCfg, sites ...*Site) error {
if err != nil { if err != nil {
return err return err
} }
s.Deps = d
} }
s.Deps = d
} }
return nil return nil
} }
// NewHugoSitesFromConfiguration creates HugoSites from the global Viper config. // NewHugoSites creates HugoSites from the given config.
// TODO(bep) globals rename this when all the globals are gone. func NewHugoSites(cfg deps.DepsCfg) (*HugoSites, error) {
func NewHugoSitesFromConfiguration(cfg deps.DepsCfg) (*HugoSites, error) {
sites, err := createSitesFromConfig(cfg) sites, err := createSitesFromConfig(cfg)
if err != nil { if err != nil {
return nil, err return nil, err
@ -141,10 +147,10 @@ func createSitesFromConfig(cfg deps.DepsCfg) ([]*Site, error) {
sites []*Site sites []*Site
) )
multilingual := viper.GetStringMap("languages") multilingual := cfg.Cfg.GetStringMap("languages")
if len(multilingual) == 0 { if len(multilingual) == 0 {
l := helpers.NewDefaultLanguage() l := helpers.NewDefaultLanguage(cfg.Cfg)
cfg.Language = l cfg.Language = l
s, err := newSite(cfg) s, err := newSite(cfg)
if err != nil { if err != nil {
@ -156,7 +162,7 @@ func createSitesFromConfig(cfg deps.DepsCfg) ([]*Site, error) {
if len(multilingual) > 0 { if len(multilingual) > 0 {
var err error var err error
languages, err := toSortedLanguages(multilingual) languages, err := toSortedLanguages(cfg.Cfg, multilingual)
if err != nil { if err != nil {
return nil, fmt.Errorf("Failed to parse multilingual config: %s", err) return nil, fmt.Errorf("Failed to parse multilingual config: %s", err)
@ -190,14 +196,14 @@ func (h *HugoSites) reset() {
func (h *HugoSites) createSitesFromConfig() error { func (h *HugoSites) createSitesFromConfig() error {
depsCfg := deps.DepsCfg{Fs: h.Fs} depsCfg := deps.DepsCfg{Fs: h.Fs, Cfg: h.Cfg}
sites, err := createSitesFromConfig(depsCfg) sites, err := createSitesFromConfig(depsCfg)
if err != nil { if err != nil {
return err return err
} }
langConfig, err := newMultiLingualFromSites(sites...) langConfig, err := newMultiLingualFromSites(depsCfg.Cfg, sites...)
if err != nil { if err != nil {
return err return err
@ -251,12 +257,12 @@ func (h *HugoSites) renderCrossSitesArtifacts() error {
return nil return nil
} }
if viper.GetBool("disableSitemap") { if h.Cfg.GetBool("disableSitemap") {
return nil return nil
} }
// TODO(bep) DRY // TODO(bep) DRY
sitemapDefault := parseSitemap(viper.GetStringMap("sitemap")) sitemapDefault := parseSitemap(h.Cfg.GetStringMap("sitemap"))
s := h.Sites[0] s := h.Sites[0]
@ -398,6 +404,24 @@ func (h *HugoSites) createMissingPages() error {
return nil return nil
} }
func (s *Site) assignSiteByLanguage(p *Page) {
pageLang := p.Lang()
if pageLang == "" {
panic("Page language missing: " + p.Title)
}
for _, site := range s.owner.Sites {
if strings.HasPrefix(site.Language.Lang, pageLang) {
p.s = site
p.Site = &site.Info
return
}
}
}
func (h *HugoSites) setupTranslations() { func (h *HugoSites) setupTranslations() {
master := h.Sites[0] master := h.Sites[0]
@ -410,11 +434,11 @@ func (h *HugoSites) setupTranslations() {
shouldBuild := p.shouldBuild() shouldBuild := p.shouldBuild()
for i, site := range h.Sites { for i, site := range h.Sites {
if strings.HasPrefix(site.Language.Lang, p.Lang()) { // The site is assigned by language when read.
if site == p.s {
site.updateBuildStats(p) site.updateBuildStats(p)
if shouldBuild { if shouldBuild {
site.Pages = append(site.Pages, p) site.Pages = append(site.Pages, p)
p.Site = &site.Info
} }
} }
@ -577,32 +601,3 @@ func (h *HugoSites) findAllPagesByKind(kind string) Pages {
func (h *HugoSites) findAllPagesByKindNotIn(kind string) Pages { func (h *HugoSites) findAllPagesByKindNotIn(kind string) Pages {
return h.findPagesByKindNotIn(kind, h.Sites[0].AllPages) return h.findPagesByKindNotIn(kind, h.Sites[0].AllPages)
} }
// Convenience func used in tests.
func newHugoSitesFromSourceAndLanguages(input []source.ByteSource, languages helpers.Languages, cfg deps.DepsCfg) (*HugoSites, error) {
if len(languages) == 0 {
panic("Must provide at least one language")
}
first := &Site{
Language: languages[0],
Source: &source.InMemorySource{ByteSource: input},
}
if len(languages) == 1 {
return newHugoSites(cfg, first)
}
sites := make([]*Site, len(languages))
sites[0] = first
for i := 1; i < len(languages); i++ {
sites[i] = &Site{Language: languages[i]}
}
return newHugoSites(cfg, sites...)
}
// Convenience func used in tests.
func newHugoSitesDefaultLanguage(cfg deps.DepsCfg) (*HugoSites, error) {
return newHugoSitesFromSourceAndLanguages(nil, helpers.Languages{helpers.NewDefaultLanguage()}, cfg)
}

View file

@ -121,6 +121,7 @@ func (h *HugoSites) process(config *BuildCfg, events ...fsnotify.Event) error {
// but that seems like a daunting task. // but that seems like a daunting task.
// So for now, if there are more than one site (language), // So for now, if there are more than one site (language),
// we pre-process the first one, then configure all the sites based on that. // we pre-process the first one, then configure all the sites based on that.
firstSite := h.Sites[0] firstSite := h.Sites[0]
if len(events) > 0 { if len(events) > 0 {
@ -169,9 +170,6 @@ func (h *HugoSites) assemble(config *BuildCfg) error {
} }
for _, s := range h.Sites { for _, s := range h.Sites {
if err := s.setCurrentLanguageConfig(); err != nil {
return err
}
s.preparePagesForRender(config) s.preparePagesForRender(config)
} }

View file

@ -14,52 +14,35 @@ import (
"github.com/fortytw2/leaktest" "github.com/fortytw2/leaktest"
"github.com/fsnotify/fsnotify" "github.com/fsnotify/fsnotify"
"github.com/spf13/afero" "github.com/spf13/afero"
"github.com/spf13/hugo/config"
"github.com/spf13/hugo/deps" "github.com/spf13/hugo/deps"
"github.com/spf13/hugo/helpers" "github.com/spf13/hugo/helpers"
"github.com/spf13/hugo/hugofs" "github.com/spf13/hugo/hugofs"
"github.com/spf13/hugo/source" "github.com/spf13/hugo/source"
// jww "github.com/spf13/jwalterweatherman"
"github.com/spf13/viper" "github.com/spf13/viper"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
type testSiteConfig struct { type testSiteConfig struct {
DefaultContentLanguage string DefaultContentLanguage string
Fs *hugofs.Fs DefaultContentLanguageInSubdir bool
Fs afero.Fs
} }
func init() { func TestMultiSitesMainLangInRoot(t *testing.T) {
testCommonResetState() t.Parallel()
}
func testCommonResetState() {
viper.Reset()
// TODO(bep) globals viper viper.SetFs(hugofs.Source())
viper.Set("currentContentLanguage", helpers.NewLanguage("en"))
helpers.ResetConfigProvider()
loadDefaultSettings()
// Default is false, but true is easier to use as default in tests
viper.Set("defaultContentLanguageInSubdir", true)
}
// TODO(bep) globals this currently fails because of a configuration dependency that will be resolved when we get rid of the global Viper.
func _TestMultiSitesMainLangInRoot(t *testing.T) {
for _, b := range []bool{true, false} { for _, b := range []bool{true, false} {
doTestMultiSitesMainLangInRoot(t, b) doTestMultiSitesMainLangInRoot(t, b)
} }
} }
func doTestMultiSitesMainLangInRoot(t *testing.T, defaultInSubDir bool) { func doTestMultiSitesMainLangInRoot(t *testing.T, defaultInSubDir bool) {
testCommonResetState()
viper.Set("defaultContentLanguageInSubdir", defaultInSubDir) siteConfig := testSiteConfig{Fs: afero.NewMemMapFs(), DefaultContentLanguage: "fr", DefaultContentLanguageInSubdir: defaultInSubDir}
fs := hugofs.NewMem()
siteConfig := testSiteConfig{DefaultContentLanguage: "fr", Fs: fs}
sites := createMultiTestSites(t, siteConfig, multiSiteTOMLConfigTemplate) sites := createMultiTestSites(t, siteConfig, multiSiteTOMLConfigTemplate)
th := testHelper{sites.Cfg}
fs := sites.Fs
err := sites.Build(BuildCfg{}) err := sites.Build(BuildCfg{})
@ -80,7 +63,6 @@ func doTestMultiSitesMainLangInRoot(t *testing.T, defaultInSubDir bool) {
require.Equal(t, "", frSite.Info.LanguagePrefix) require.Equal(t, "", frSite.Info.LanguagePrefix)
} }
fmt.Println(">>>", enSite.PathSpec)
require.Equal(t, "/blog/en/foo", enSite.PathSpec.RelURL("foo", true)) require.Equal(t, "/blog/en/foo", enSite.PathSpec.RelURL("foo", true))
doc1en := enSite.RegularPages[0] doc1en := enSite.RegularPages[0]
@ -94,71 +76,75 @@ func doTestMultiSitesMainLangInRoot(t *testing.T, defaultInSubDir bool) {
frPerm := doc1fr.Permalink() frPerm := doc1fr.Permalink()
frRelPerm := doc1fr.RelPermalink() frRelPerm := doc1fr.RelPermalink()
// Main language in root // Main language in root
require.Equal(t, replaceDefaultContentLanguageValue("http://example.com/blog/fr/sect/doc1/", defaultInSubDir), frPerm) require.Equal(t, th.replaceDefaultContentLanguageValue("http://example.com/blog/fr/sect/doc1/", defaultInSubDir), frPerm)
require.Equal(t, replaceDefaultContentLanguageValue("/blog/fr/sect/doc1/", defaultInSubDir), frRelPerm) require.Equal(t, th.replaceDefaultContentLanguageValue("/blog/fr/sect/doc1/", defaultInSubDir), frRelPerm)
assertFileContent(t, fs, "public/fr/sect/doc1/index.html", defaultInSubDir, "Single", "Bonjour") th.assertFileContent(t, fs, "public/fr/sect/doc1/index.html", defaultInSubDir, "Single", "Bonjour")
assertFileContent(t, fs, "public/en/sect/doc1-slug/index.html", defaultInSubDir, "Single", "Hello") th.assertFileContent(t, fs, "public/en/sect/doc1-slug/index.html", defaultInSubDir, "Single", "Hello")
// Check home // Check home
if defaultInSubDir { if defaultInSubDir {
// should have a redirect on top level. // should have a redirect on top level.
assertFileContent(t, fs, "public/index.html", true, `<meta http-equiv="refresh" content="0; url=http://example.com/blog/fr" />`) th.assertFileContent(t, fs, "public/index.html", true, `<meta http-equiv="refresh" content="0; url=http://example.com/blog/fr" />`)
} else { } else {
// should have redirect back to root // should have redirect back to root
assertFileContent(t, fs, "public/fr/index.html", true, `<meta http-equiv="refresh" content="0; url=http://example.com/blog" />`) th.assertFileContent(t, fs, "public/fr/index.html", true, `<meta http-equiv="refresh" content="0; url=http://example.com/blog" />`)
} }
assertFileContent(t, fs, "public/fr/index.html", defaultInSubDir, "Home", "Bonjour") th.assertFileContent(t, fs, "public/fr/index.html", defaultInSubDir, "Home", "Bonjour")
assertFileContent(t, fs, "public/en/index.html", defaultInSubDir, "Home", "Hello") th.assertFileContent(t, fs, "public/en/index.html", defaultInSubDir, "Home", "Hello")
// Check list pages // Check list pages
assertFileContent(t, fs, "public/fr/sect/index.html", defaultInSubDir, "List", "Bonjour") th.assertFileContent(t, fs, "public/fr/sect/index.html", defaultInSubDir, "List", "Bonjour")
assertFileContent(t, fs, "public/en/sect/index.html", defaultInSubDir, "List", "Hello") th.assertFileContent(t, fs, "public/en/sect/index.html", defaultInSubDir, "List", "Hello")
assertFileContent(t, fs, "public/fr/plaques/frtag1/index.html", defaultInSubDir, "List", "Bonjour") th.assertFileContent(t, fs, "public/fr/plaques/frtag1/index.html", defaultInSubDir, "List", "Bonjour")
assertFileContent(t, fs, "public/en/tags/tag1/index.html", defaultInSubDir, "List", "Hello") th.assertFileContent(t, fs, "public/en/tags/tag1/index.html", defaultInSubDir, "List", "Hello")
// Check sitemaps // Check sitemaps
// Sitemaps behaves different: In a multilanguage setup there will always be a index file and // Sitemaps behaves different: In a multilanguage setup there will always be a index file and
// one sitemap in each lang folder. // one sitemap in each lang folder.
assertFileContent(t, fs, "public/sitemap.xml", true, th.assertFileContent(t, fs, "public/sitemap.xml", true,
"<loc>http://example.com/blog/en/sitemap.xml</loc>", "<loc>http://example.com/blog/en/sitemap.xml</loc>",
"<loc>http://example.com/blog/fr/sitemap.xml</loc>") "<loc>http://example.com/blog/fr/sitemap.xml</loc>")
if defaultInSubDir { if defaultInSubDir {
assertFileContent(t, fs, "public/fr/sitemap.xml", true, "<loc>http://example.com/blog/fr/</loc>") th.assertFileContent(t, fs, "public/fr/sitemap.xml", true, "<loc>http://example.com/blog/fr/</loc>")
} else { } else {
assertFileContent(t, fs, "public/fr/sitemap.xml", true, "<loc>http://example.com/blog/</loc>") th.assertFileContent(t, fs, "public/fr/sitemap.xml", true, "<loc>http://example.com/blog/</loc>")
} }
assertFileContent(t, fs, "public/en/sitemap.xml", true, "<loc>http://example.com/blog/en/</loc>") th.assertFileContent(t, fs, "public/en/sitemap.xml", true, "<loc>http://example.com/blog/en/</loc>")
// Check rss // Check rss
assertFileContent(t, fs, "public/fr/index.xml", defaultInSubDir, `<atom:link href="http://example.com/blog/fr/index.xml"`) th.assertFileContent(t, fs, "public/fr/index.xml", defaultInSubDir, `<atom:link href="http://example.com/blog/fr/index.xml"`)
assertFileContent(t, fs, "public/en/index.xml", defaultInSubDir, `<atom:link href="http://example.com/blog/en/index.xml"`) th.assertFileContent(t, fs, "public/en/index.xml", defaultInSubDir, `<atom:link href="http://example.com/blog/en/index.xml"`)
assertFileContent(t, fs, "public/fr/sect/index.xml", defaultInSubDir, `<atom:link href="http://example.com/blog/fr/sect/index.xml"`) th.assertFileContent(t, fs, "public/fr/sect/index.xml", defaultInSubDir, `<atom:link href="http://example.com/blog/fr/sect/index.xml"`)
assertFileContent(t, fs, "public/en/sect/index.xml", defaultInSubDir, `<atom:link href="http://example.com/blog/en/sect/index.xml"`) th.assertFileContent(t, fs, "public/en/sect/index.xml", defaultInSubDir, `<atom:link href="http://example.com/blog/en/sect/index.xml"`)
assertFileContent(t, fs, "public/fr/plaques/frtag1/index.xml", defaultInSubDir, `<atom:link href="http://example.com/blog/fr/plaques/frtag1/index.xml"`) th.assertFileContent(t, fs, "public/fr/plaques/frtag1/index.xml", defaultInSubDir, `<atom:link href="http://example.com/blog/fr/plaques/frtag1/index.xml"`)
assertFileContent(t, fs, "public/en/tags/tag1/index.xml", defaultInSubDir, `<atom:link href="http://example.com/blog/en/tags/tag1/index.xml"`) th.assertFileContent(t, fs, "public/en/tags/tag1/index.xml", defaultInSubDir, `<atom:link href="http://example.com/blog/en/tags/tag1/index.xml"`)
// Check paginators // Check paginators
assertFileContent(t, fs, "public/fr/page/1/index.html", defaultInSubDir, `refresh" content="0; url=http://example.com/blog/fr/"`) th.assertFileContent(t, fs, "public/fr/page/1/index.html", defaultInSubDir, `refresh" content="0; url=http://example.com/blog/fr/"`)
assertFileContent(t, fs, "public/en/page/1/index.html", defaultInSubDir, `refresh" content="0; url=http://example.com/blog/en/"`) th.assertFileContent(t, fs, "public/en/page/1/index.html", defaultInSubDir, `refresh" content="0; url=http://example.com/blog/en/"`)
assertFileContent(t, fs, "public/fr/page/2/index.html", defaultInSubDir, "Home Page 2", "Bonjour", "http://example.com/blog/fr/") th.assertFileContent(t, fs, "public/fr/page/2/index.html", defaultInSubDir, "Home Page 2", "Bonjour", "http://example.com/blog/fr/")
assertFileContent(t, fs, "public/en/page/2/index.html", defaultInSubDir, "Home Page 2", "Hello", "http://example.com/blog/en/") th.assertFileContent(t, fs, "public/en/page/2/index.html", defaultInSubDir, "Home Page 2", "Hello", "http://example.com/blog/en/")
assertFileContent(t, fs, "public/fr/sect/page/1/index.html", defaultInSubDir, `refresh" content="0; url=http://example.com/blog/fr/sect/"`) th.assertFileContent(t, fs, "public/fr/sect/page/1/index.html", defaultInSubDir, `refresh" content="0; url=http://example.com/blog/fr/sect/"`)
assertFileContent(t, fs, "public/en/sect/page/1/index.html", defaultInSubDir, `refresh" content="0; url=http://example.com/blog/en/sect/"`) th.assertFileContent(t, fs, "public/en/sect/page/1/index.html", defaultInSubDir, `refresh" content="0; url=http://example.com/blog/en/sect/"`)
assertFileContent(t, fs, "public/fr/sect/page/2/index.html", defaultInSubDir, "List Page 2", "Bonjour", "http://example.com/blog/fr/sect/") th.assertFileContent(t, fs, "public/fr/sect/page/2/index.html", defaultInSubDir, "List Page 2", "Bonjour", "http://example.com/blog/fr/sect/")
assertFileContent(t, fs, "public/en/sect/page/2/index.html", defaultInSubDir, "List Page 2", "Hello", "http://example.com/blog/en/sect/") th.assertFileContent(t, fs, "public/en/sect/page/2/index.html", defaultInSubDir, "List Page 2", "Hello", "http://example.com/blog/en/sect/")
assertFileContent(t, fs, "public/fr/plaques/frtag1/page/1/index.html", defaultInSubDir, `refresh" content="0; url=http://example.com/blog/fr/plaques/frtag1/"`) th.assertFileContent(t, fs, "public/fr/plaques/frtag1/page/1/index.html", defaultInSubDir, `refresh" content="0; url=http://example.com/blog/fr/plaques/frtag1/"`)
assertFileContent(t, fs, "public/en/tags/tag1/page/1/index.html", defaultInSubDir, `refresh" content="0; url=http://example.com/blog/en/tags/tag1/"`) th.assertFileContent(t, fs, "public/en/tags/tag1/page/1/index.html", defaultInSubDir, `refresh" content="0; url=http://example.com/blog/en/tags/tag1/"`)
assertFileContent(t, fs, "public/fr/plaques/frtag1/page/2/index.html", defaultInSubDir, "List Page 2", "Bonjour", "http://example.com/blog/fr/plaques/frtag1/") th.assertFileContent(t, fs, "public/fr/plaques/frtag1/page/2/index.html", defaultInSubDir, "List Page 2", "Bonjour", "http://example.com/blog/fr/plaques/frtag1/")
assertFileContent(t, fs, "public/en/tags/tag1/page/2/index.html", defaultInSubDir, "List Page 2", "Hello", "http://example.com/blog/en/tags/tag1/") th.assertFileContent(t, fs, "public/en/tags/tag1/page/2/index.html", defaultInSubDir, "List Page 2", "Hello", "http://example.com/blog/en/tags/tag1/")
// nn (Nynorsk) and nb (Bokmål) have custom pagePath: side ("page" in Norwegian) // nn (Nynorsk) and nb (Bokmål) have custom pagePath: side ("page" in Norwegian)
assertFileContent(t, fs, "public/nn/side/1/index.html", defaultInSubDir, `refresh" content="0; url=http://example.com/blog/nn/"`) th.assertFileContent(t, fs, "public/nn/side/1/index.html", defaultInSubDir, `refresh" content="0; url=http://example.com/blog/nn/"`)
assertFileContent(t, fs, "public/nb/side/1/index.html", defaultInSubDir, `refresh" content="0; url=http://example.com/blog/nb/"`) th.assertFileContent(t, fs, "public/nb/side/1/index.html", defaultInSubDir, `refresh" content="0; url=http://example.com/blog/nb/"`)
} }
func replaceDefaultContentLanguageValue(value string, defaultInSubDir bool) string { type testHelper struct {
replace := viper.GetString("defaultContentLanguage") + "/" Cfg config.Provider
}
func (th testHelper) replaceDefaultContentLanguageValue(value string, defaultInSubDir bool) string {
replace := th.Cfg.GetString("defaultContentLanguage") + "/"
if !defaultInSubDir { if !defaultInSubDir {
value = strings.Replace(value, replace, "", 1) value = strings.Replace(value, replace, "", 1)
@ -167,36 +153,33 @@ func replaceDefaultContentLanguageValue(value string, defaultInSubDir bool) stri
} }
func assertFileContent(t *testing.T, fs *hugofs.Fs, filename string, defaultInSubDir bool, matches ...string) { func (th testHelper) assertFileContent(t *testing.T, fs *hugofs.Fs, filename string, defaultInSubDir bool, matches ...string) {
filename = replaceDefaultContentLanguageValue(filename, defaultInSubDir) filename = th.replaceDefaultContentLanguageValue(filename, defaultInSubDir)
content := readDestination(t, fs, filename) content := readDestination(t, fs, filename)
for _, match := range matches { for _, match := range matches {
match = replaceDefaultContentLanguageValue(match, defaultInSubDir) match = th.replaceDefaultContentLanguageValue(match, defaultInSubDir)
require.True(t, strings.Contains(content, match), fmt.Sprintf("File no match for\n%q in\n%q:\n%s", strings.Replace(match, "%", "%%", -1), filename, strings.Replace(content, "%", "%%", -1))) require.True(t, strings.Contains(content, match), fmt.Sprintf("File no match for\n%q in\n%q:\n%s", strings.Replace(match, "%", "%%", -1), filename, strings.Replace(content, "%", "%%", -1)))
} }
} }
func assertFileContentRegexp(t *testing.T, fs *hugofs.Fs, filename string, defaultInSubDir bool, matches ...string) { func (th testHelper) assertFileContentRegexp(t *testing.T, fs *hugofs.Fs, filename string, defaultInSubDir bool, matches ...string) {
filename = replaceDefaultContentLanguageValue(filename, defaultInSubDir) filename = th.replaceDefaultContentLanguageValue(filename, defaultInSubDir)
content := readDestination(t, fs, filename) content := readDestination(t, fs, filename)
for _, match := range matches { for _, match := range matches {
match = replaceDefaultContentLanguageValue(match, defaultInSubDir) match = th.replaceDefaultContentLanguageValue(match, defaultInSubDir)
r := regexp.MustCompile(match) r := regexp.MustCompile(match)
require.True(t, r.MatchString(content), fmt.Sprintf("File no match for\n%q in\n%q:\n%s", match, filename, content)) require.True(t, r.MatchString(content), fmt.Sprintf("File no match for\n%q in\n%q:\n%s", strings.Replace(match, "%", "%%", -1), filename, strings.Replace(content, "%", "%%", -1)))
} }
} }
func TestMultiSitesWithTwoLanguages(t *testing.T) { func TestMultiSitesWithTwoLanguages(t *testing.T) {
testCommonResetState() t.Parallel()
mm := afero.NewMemMapFs()
viper.Set("defaultContentLanguage", "nn") writeToFs(t, mm, "config.toml", `
fs := hugofs.NewMem() defaultContentLanguage = "nn"
depsCfg := deps.DepsCfg{Fs: fs}
viper.SetFs(depsCfg.Fs.Source)
writeSource(t, depsCfg.Fs, "config.toml", `
[languages] [languages]
[languages.nn] [languages.nn]
languageName = "Nynorsk" languageName = "Nynorsk"
@ -210,11 +193,12 @@ weight = 2
`, `,
) )
if err := LoadGlobalConfig("", "config.toml"); err != nil { cfg, err := LoadConfig(mm, "", "config.toml")
t.Fatalf("Failed to load config: %s", err) require.NoError(t, err)
}
sites, err := NewHugoSitesFromConfiguration(depsCfg) fs := hugofs.NewFrom(mm, cfg)
sites, err := NewHugoSites(deps.DepsCfg{Fs: fs, Cfg: cfg})
if err != nil { if err != nil {
t.Fatalf("Failed to create sites: %s", err) t.Fatalf("Failed to create sites: %s", err)
@ -238,100 +222,108 @@ weight = 2
// //
func TestMultiSitesBuild(t *testing.T) { func TestMultiSitesBuild(t *testing.T) {
t.Parallel()
for _, config := range []struct { for _, config := range []struct {
content string content string
suffix string suffix string
}{ }{
{multiSiteTOMLConfigTemplate, "toml"}, {multiSiteTOMLConfigTemplate, "toml"},
{multiSiteYAMLConfig, "yml"}, {multiSiteYAMLConfigTemplate, "yml"},
{multiSiteJSONConfig, "json"}, {multiSiteJSONConfigTemplate, "json"},
} { } {
doTestMultiSitesBuild(t, config.content, config.suffix) doTestMultiSitesBuild(t, config.content, config.suffix)
} }
} }
func doTestMultiSitesBuild(t *testing.T, configTemplate, configSuffix string) { func doTestMultiSitesBuild(t *testing.T, configTemplate, configSuffix string) {
defer leaktest.Check(t)() siteConfig := testSiteConfig{Fs: afero.NewMemMapFs(), DefaultContentLanguage: "fr", DefaultContentLanguageInSubdir: true}
testCommonResetState()
fs := hugofs.NewMem()
siteConfig := testSiteConfig{DefaultContentLanguage: "fr", Fs: fs}
sites := createMultiTestSitesForConfig(t, siteConfig, configTemplate, configSuffix) sites := createMultiTestSitesForConfig(t, siteConfig, configTemplate, configSuffix)
require.Len(t, sites.Sites, 4)
fs := sites.Fs
th := testHelper{sites.Cfg}
err := sites.Build(BuildCfg{}) err := sites.Build(BuildCfg{})
if err != nil { if err != nil {
t.Fatalf("Failed to build sites: %s", err) t.Fatalf("Failed to build sites: %s", err)
} }
// Check site config
for _, s := range sites.Sites {
require.True(t, s.Info.defaultContentLanguageInSubdir, s.Info.Title)
}
enSite := sites.Sites[0] enSite := sites.Sites[0]
enSiteHome := enSite.getPage(KindHome) enSiteHome := enSite.getPage(KindHome)
require.True(t, enSiteHome.IsTranslated()) require.True(t, enSiteHome.IsTranslated())
assert.Equal(t, "en", enSite.Language.Lang) require.Equal(t, "en", enSite.Language.Lang)
if len(enSite.RegularPages) != 4 { if len(enSite.RegularPages) != 4 {
t.Fatal("Expected 4 english pages") t.Fatal("Expected 4 english pages")
} }
assert.Len(t, enSite.Source.Files(), 14, "should have 13 source files") require.Len(t, enSite.Source.Files(), 14, "should have 13 source files")
assert.Len(t, enSite.AllPages, 28, "should have 28 total pages (including translations and index types)") require.Len(t, enSite.AllPages, 28, "should have 28 total pages (including translations and index types)")
doc1en := enSite.RegularPages[0] doc1en := enSite.RegularPages[0]
permalink := doc1en.Permalink() permalink := doc1en.Permalink()
assert.NoError(t, err, "permalink call failed") require.NoError(t, err, "permalink call failed")
assert.Equal(t, "http://example.com/blog/en/sect/doc1-slug/", permalink, "invalid doc1.en permalink") require.Equal(t, "http://example.com/blog/en/sect/doc1-slug/", permalink, "invalid doc1.en permalink")
assert.Len(t, doc1en.Translations(), 1, "doc1-en should have one translation, excluding itself") require.Len(t, doc1en.Translations(), 1, "doc1-en should have one translation, excluding itself")
doc2 := enSite.RegularPages[1] doc2 := enSite.RegularPages[1]
permalink = doc2.Permalink() permalink = doc2.Permalink()
assert.NoError(t, err, "permalink call failed") require.NoError(t, err, "permalink call failed")
assert.Equal(t, "http://example.com/blog/en/sect/doc2/", permalink, "invalid doc2 permalink") require.Equal(t, "http://example.com/blog/en/sect/doc2/", permalink, "invalid doc2 permalink")
doc3 := enSite.RegularPages[2] doc3 := enSite.RegularPages[2]
permalink = doc3.Permalink() permalink = doc3.Permalink()
assert.NoError(t, err, "permalink call failed") require.NoError(t, err, "permalink call failed")
// Note that /superbob is a custom URL set in frontmatter. // Note that /superbob is a custom URL set in frontmatter.
// We respect that URL literally (it can be /search.json) // We respect that URL literally (it can be /search.json)
// and do no not do any language code prefixing. // and do no not do any language code prefixing.
assert.Equal(t, "http://example.com/blog/superbob", permalink, "invalid doc3 permalink") require.Equal(t, "http://example.com/blog/superbob", permalink, "invalid doc3 permalink")
assert.Equal(t, "/superbob", doc3.URL(), "invalid url, was specified on doc3") require.Equal(t, "/superbob", doc3.URL(), "invalid url, was specified on doc3")
assertFileContent(t, fs, "public/superbob/index.html", true, "doc3|Hello|en") th.assertFileContent(t, fs, "public/superbob/index.html", true, "doc3|Hello|en")
assert.Equal(t, doc2.Next, doc3, "doc3 should follow doc2, in .Next") require.Equal(t, doc2.Next, doc3, "doc3 should follow doc2, in .Next")
doc1fr := doc1en.Translations()[0] doc1fr := doc1en.Translations()[0]
permalink = doc1fr.Permalink() permalink = doc1fr.Permalink()
assert.NoError(t, err, "permalink call failed") require.NoError(t, err, "permalink call failed")
assert.Equal(t, "http://example.com/blog/fr/sect/doc1/", permalink, "invalid doc1fr permalink") require.Equal(t, "http://example.com/blog/fr/sect/doc1/", permalink, "invalid doc1fr permalink")
assert.Equal(t, doc1en.Translations()[0], doc1fr, "doc1-en should have doc1-fr as translation") require.Equal(t, doc1en.Translations()[0], doc1fr, "doc1-en should have doc1-fr as translation")
assert.Equal(t, doc1fr.Translations()[0], doc1en, "doc1-fr should have doc1-en as translation") require.Equal(t, doc1fr.Translations()[0], doc1en, "doc1-fr should have doc1-en as translation")
assert.Equal(t, "fr", doc1fr.Language().Lang) require.Equal(t, "fr", doc1fr.Language().Lang)
doc4 := enSite.AllPages[4] doc4 := enSite.AllPages[4]
permalink = doc4.Permalink() permalink = doc4.Permalink()
assert.Equal(t, "http://example.com/blog/fr/sect/doc4/", permalink, "invalid doc4 permalink") require.Equal(t, "http://example.com/blog/fr/sect/doc4/", permalink, "invalid doc4 permalink")
assert.Equal(t, "/blog/fr/sect/doc4/", doc4.URL()) require.Equal(t, "/blog/fr/sect/doc4/", doc4.URL())
assert.Len(t, doc4.Translations(), 0, "found translations for doc4") require.Len(t, doc4.Translations(), 0, "found translations for doc4")
doc5 := enSite.AllPages[5] doc5 := enSite.AllPages[5]
permalink = doc5.Permalink() permalink = doc5.Permalink()
assert.Equal(t, "http://example.com/blog/fr/somewhere/else/doc5", permalink, "invalid doc5 permalink") require.Equal(t, "http://example.com/blog/fr/somewhere/else/doc5", permalink, "invalid doc5 permalink")
// Taxonomies and their URLs // Taxonomies and their URLs
assert.Len(t, enSite.Taxonomies, 1, "should have 1 taxonomy") require.Len(t, enSite.Taxonomies, 1, "should have 1 taxonomy")
tags := enSite.Taxonomies["tags"] tags := enSite.Taxonomies["tags"]
assert.Len(t, tags, 2, "should have 2 different tags") require.Len(t, tags, 2, "should have 2 different tags")
assert.Equal(t, tags["tag1"][0].Page, doc1en, "first tag1 page should be doc1") require.Equal(t, tags["tag1"][0].Page, doc1en, "first tag1 page should be doc1")
frSite := sites.Sites[1] frSite := sites.Sites[1]
assert.Equal(t, "fr", frSite.Language.Lang) require.Equal(t, "fr", frSite.Language.Lang)
assert.Len(t, frSite.RegularPages, 3, "should have 3 pages") require.Len(t, frSite.RegularPages, 3, "should have 3 pages")
assert.Len(t, frSite.AllPages, 28, "should have 28 total pages (including translations and nodes)") require.Len(t, frSite.AllPages, 28, "should have 28 total pages (including translations and nodes)")
for _, frenchPage := range frSite.RegularPages { for _, frenchPage := range frSite.RegularPages {
assert.Equal(t, "fr", frenchPage.Lang()) require.Equal(t, "fr", frenchPage.Lang())
} }
// Check redirect to main language, French // Check redirect to main language, French
@ -339,12 +331,12 @@ func doTestMultiSitesBuild(t *testing.T, configTemplate, configSuffix string) {
require.True(t, strings.Contains(languageRedirect, "0; url=http://example.com/blog/fr"), languageRedirect) require.True(t, strings.Contains(languageRedirect, "0; url=http://example.com/blog/fr"), languageRedirect)
// check home page content (including data files rendering) // check home page content (including data files rendering)
assertFileContent(t, fs, "public/en/index.html", true, "Home Page 1", "Hello", "Hugo Rocks!") th.assertFileContent(t, fs, "public/en/index.html", true, "Home Page 1", "Hello", "Hugo Rocks!")
assertFileContent(t, fs, "public/fr/index.html", true, "Home Page 1", "Bonjour", "Hugo Rocks!") th.assertFileContent(t, fs, "public/fr/index.html", true, "Home Page 1", "Bonjour", "Hugo Rocks!")
// check single page content // check single page content
assertFileContent(t, fs, "public/fr/sect/doc1/index.html", true, "Single", "Shortcode: Bonjour") th.assertFileContent(t, fs, "public/fr/sect/doc1/index.html", true, "Single", "Shortcode: Bonjour")
assertFileContent(t, fs, "public/en/sect/doc1-slug/index.html", true, "Single", "Shortcode: Hello") th.assertFileContent(t, fs, "public/en/sect/doc1-slug/index.html", true, "Single", "Shortcode: Hello")
// Check node translations // Check node translations
homeEn := enSite.getPage(KindHome) homeEn := enSite.getPage(KindHome)
@ -397,9 +389,9 @@ func doTestMultiSitesBuild(t *testing.T, configTemplate, configSuffix string) {
readDestination(t, fs, "public/en/tags/tag1/index.html") readDestination(t, fs, "public/en/tags/tag1/index.html")
// Check Blackfriday config // Check Blackfriday config
assert.True(t, strings.Contains(string(doc1fr.Content), "&laquo;"), string(doc1fr.Content)) require.True(t, strings.Contains(string(doc1fr.Content), "&laquo;"), string(doc1fr.Content))
assert.False(t, strings.Contains(string(doc1en.Content), "&laquo;"), string(doc1en.Content)) require.False(t, strings.Contains(string(doc1en.Content), "&laquo;"), string(doc1en.Content))
assert.True(t, strings.Contains(string(doc1en.Content), "&ldquo;"), string(doc1en.Content)) require.True(t, strings.Contains(string(doc1en.Content), "&ldquo;"), string(doc1en.Content))
// Check that the drafts etc. are not built/processed/rendered. // Check that the drafts etc. are not built/processed/rendered.
assertShouldNotBuild(t, sites) assertShouldNotBuild(t, sites)
@ -415,13 +407,14 @@ func doTestMultiSitesBuild(t *testing.T, configTemplate, configSuffix string) {
} }
func TestMultiSitesRebuild(t *testing.T) { func TestMultiSitesRebuild(t *testing.T) {
// t.Parallel() not supported, see https://github.com/fortytw2/leaktest/issues/4
defer leaktest.Check(t)() defer leaktest.Check(t)()
testCommonResetState()
fs := hugofs.NewMem() siteConfig := testSiteConfig{Fs: afero.NewMemMapFs(), DefaultContentLanguage: "fr", DefaultContentLanguageInSubdir: true}
siteConfig := testSiteConfig{DefaultContentLanguage: "fr", Fs: fs}
sites := createMultiTestSites(t, siteConfig, multiSiteTOMLConfigTemplate) sites := createMultiTestSites(t, siteConfig, multiSiteTOMLConfigTemplate)
fs := sites.Fs
cfg := BuildCfg{Watching: true} cfg := BuildCfg{Watching: true}
th := testHelper{sites.Cfg}
err := sites.Build(cfg) err := sites.Build(cfg)
@ -442,12 +435,12 @@ func TestMultiSitesRebuild(t *testing.T) {
require.Len(t, frSite.RegularPages, 3) require.Len(t, frSite.RegularPages, 3)
// Verify translations // Verify translations
assertFileContent(t, fs, "public/en/sect/doc1-slug/index.html", true, "Hello") th.assertFileContent(t, fs, "public/en/sect/doc1-slug/index.html", true, "Hello")
assertFileContent(t, fs, "public/fr/sect/doc1/index.html", true, "Bonjour") th.assertFileContent(t, fs, "public/fr/sect/doc1/index.html", true, "Bonjour")
// check single page content // check single page content
assertFileContent(t, fs, "public/fr/sect/doc1/index.html", true, "Single", "Shortcode: Bonjour") th.assertFileContent(t, fs, "public/fr/sect/doc1/index.html", true, "Single", "Shortcode: Bonjour")
assertFileContent(t, fs, "public/en/sect/doc1-slug/index.html", true, "Single", "Shortcode: Hello") th.assertFileContent(t, fs, "public/en/sect/doc1-slug/index.html", true, "Single", "Shortcode: Hello")
for i, this := range []struct { for i, this := range []struct {
preFunc func(t *testing.T) preFunc func(t *testing.T)
@ -585,8 +578,8 @@ func TestMultiSitesRebuild(t *testing.T) {
require.Len(t, enSite.RegularPages, 5) require.Len(t, enSite.RegularPages, 5)
require.Len(t, enSite.AllPages, 30) require.Len(t, enSite.AllPages, 30)
require.Len(t, frSite.RegularPages, 4) require.Len(t, frSite.RegularPages, 4)
assertFileContent(t, fs, "public/fr/sect/doc1/index.html", true, "Single", "Modified Shortcode: Salut") th.assertFileContent(t, fs, "public/fr/sect/doc1/index.html", true, "Single", "Modified Shortcode: Salut")
assertFileContent(t, fs, "public/en/sect/doc1-slug/index.html", true, "Single", "Modified Shortcode: Hello") th.assertFileContent(t, fs, "public/en/sect/doc1-slug/index.html", true, "Single", "Modified Shortcode: Hello")
}, },
}, },
} { } {
@ -630,9 +623,8 @@ func assertShouldNotBuild(t *testing.T, sites *HugoSites) {
} }
func TestAddNewLanguage(t *testing.T) { func TestAddNewLanguage(t *testing.T) {
testCommonResetState() t.Parallel()
fs := hugofs.NewMem() siteConfig := testSiteConfig{Fs: afero.NewMemMapFs(), DefaultContentLanguage: "fr", DefaultContentLanguageInSubdir: true}
siteConfig := testSiteConfig{DefaultContentLanguage: "fr", Fs: fs}
sites := createMultiTestSites(t, siteConfig, multiSiteTOMLConfigTemplate) sites := createMultiTestSites(t, siteConfig, multiSiteTOMLConfigTemplate)
cfg := BuildCfg{} cfg := BuildCfg{}
@ -643,6 +635,8 @@ func TestAddNewLanguage(t *testing.T) {
t.Fatalf("Failed to build sites: %s", err) t.Fatalf("Failed to build sites: %s", err)
} }
fs := sites.Fs
newConfig := multiSiteTOMLConfigTemplate + ` newConfig := multiSiteTOMLConfigTemplate + `
[Languages.sv] [Languages.sv]
@ -657,7 +651,7 @@ title = "Svenska"
writeSource(t, fs, "multilangconfig.toml", newConfig) writeSource(t, fs, "multilangconfig.toml", newConfig)
// Watching does not work with in-memory fs, so we trigger a reload manually // Watching does not work with in-memory fs, so we trigger a reload manually
require.NoError(t, viper.ReadInConfig()) require.NoError(t, sites.Cfg.(*helpers.Language).Cfg.(*viper.Viper).ReadInConfig())
err = sites.Build(BuildCfg{CreateSitesFromConfig: true}) err = sites.Build(BuildCfg{CreateSitesFromConfig: true})
if err != nil { if err != nil {
@ -694,11 +688,15 @@ title = "Svenska"
} }
func TestChangeDefaultLanguage(t *testing.T) { func TestChangeDefaultLanguage(t *testing.T) {
testCommonResetState() t.Parallel()
viper.Set("defaultContentLanguageInSubdir", false) mf := afero.NewMemMapFs()
fs := hugofs.NewMem()
sites := createMultiTestSites(t, testSiteConfig{DefaultContentLanguage: "fr", Fs: fs}, multiSiteTOMLConfigTemplate) sites := createMultiTestSites(t, testSiteConfig{Fs: mf, DefaultContentLanguage: "fr", DefaultContentLanguageInSubdir: false}, multiSiteTOMLConfigTemplate)
require.Equal(t, mf, sites.Fs.Source)
cfg := BuildCfg{} cfg := BuildCfg{}
th := testHelper{sites.Cfg}
err := sites.Build(cfg) err := sites.Build(cfg)
@ -706,16 +704,19 @@ func TestChangeDefaultLanguage(t *testing.T) {
t.Fatalf("Failed to build sites: %s", err) t.Fatalf("Failed to build sites: %s", err)
} }
assertFileContent(t, fs, "public/sect/doc1/index.html", true, "Single", "Bonjour") fs := sites.Fs
assertFileContent(t, fs, "public/en/sect/doc2/index.html", true, "Single", "Hello")
newConfig := createConfig(t, testSiteConfig{DefaultContentLanguage: "en"}, multiSiteTOMLConfigTemplate) th.assertFileContent(t, fs, "public/sect/doc1/index.html", false, "Single", "Bonjour")
th.assertFileContent(t, fs, "public/en/sect/doc2/index.html", false, "Single", "Hello")
newConfig := createConfig(t, testSiteConfig{Fs: mf, DefaultContentLanguage: "en", DefaultContentLanguageInSubdir: false}, multiSiteTOMLConfigTemplate)
// replace the config // replace the config
writeSource(t, fs, "multilangconfig.toml", newConfig) writeSource(t, fs, "multilangconfig.toml", newConfig)
// Watching does not work with in-memory fs, so we trigger a reload manually // Watching does not work with in-memory fs, so we trigger a reload manually
require.NoError(t, viper.ReadInConfig()) // This does not look pretty, so we should think of something else.
require.NoError(t, th.Cfg.(*helpers.Language).Cfg.(*viper.Viper).ReadInConfig())
err = sites.Build(BuildCfg{CreateSitesFromConfig: true}) err = sites.Build(BuildCfg{CreateSitesFromConfig: true})
if err != nil { if err != nil {
@ -723,19 +724,19 @@ func TestChangeDefaultLanguage(t *testing.T) {
} }
// Default language is now en, so that should now be the "root" language // Default language is now en, so that should now be the "root" language
assertFileContent(t, fs, "public/fr/sect/doc1/index.html", true, "Single", "Bonjour") th.assertFileContent(t, fs, "public/fr/sect/doc1/index.html", false, "Single", "Bonjour")
assertFileContent(t, fs, "public/sect/doc2/index.html", true, "Single", "Hello") th.assertFileContent(t, fs, "public/sect/doc2/index.html", false, "Single", "Hello")
} }
func TestTableOfContentsInShortcodes(t *testing.T) { func TestTableOfContentsInShortcodes(t *testing.T) {
testCommonResetState() t.Parallel()
fs := hugofs.NewMem() mf := afero.NewMemMapFs()
writeSource(t, fs, "layouts/shortcodes/toc.html", tocShortcode) writeToFs(t, mf, "layouts/shortcodes/toc.html", tocShortcode)
writeSource(t, fs, "content/post/simple.en.md", tocPageSimple) writeToFs(t, mf, "content/post/simple.en.md", tocPageSimple)
writeSource(t, fs, "content/post/withSCInHeading.en.md", tocPageWithShortcodesInHeadings) writeToFs(t, mf, "content/post/withSCInHeading.en.md", tocPageWithShortcodesInHeadings)
sites := createMultiTestSites(t, testSiteConfig{DefaultContentLanguage: "en", Fs: fs}, multiSiteTOMLConfigTemplate) sites := createMultiTestSites(t, testSiteConfig{Fs: mf, DefaultContentLanguage: "en", DefaultContentLanguageInSubdir: true}, multiSiteTOMLConfigTemplate)
cfg := BuildCfg{} cfg := BuildCfg{}
@ -745,8 +746,11 @@ func TestTableOfContentsInShortcodes(t *testing.T) {
t.Fatalf("Failed to build sites: %s", err) t.Fatalf("Failed to build sites: %s", err)
} }
assertFileContent(t, fs, "public/en/post/simple/index.html", true, tocPageSimpleExpected) th := testHelper{sites.Cfg}
assertFileContent(t, fs, "public/en/post/withSCInHeading/index.html", true, tocPageWithShortcodesInHeadingsExpected) fs := sites.Fs
th.assertFileContent(t, fs, "public/en/post/simple/index.html", true, tocPageSimpleExpected)
th.assertFileContent(t, fs, "public/en/post/withSCInHeading/index.html", true, tocPageWithShortcodesInHeadingsExpected)
} }
var tocShortcode = ` var tocShortcode = `
@ -836,6 +840,7 @@ rssURI = "index.xml"
paginate = 1 paginate = 1
defaultContentLanguage = "{{ .DefaultContentLanguage }}" defaultContentLanguage = "{{ .DefaultContentLanguage }}"
defaultContentLanguageInSubdir = {{ .DefaultContentLanguageInSubdir }}
[permalinks] [permalinks]
other = "/somewhere/else/:filename" other = "/somewhere/else/:filename"
@ -886,7 +891,7 @@ paginatePath = "side"
lag = "lag" lag = "lag"
` `
var multiSiteYAMLConfig = ` var multiSiteYAMLConfigTemplate = `
defaultExtension: "html" defaultExtension: "html"
baseURL: "http://example.com/blog" baseURL: "http://example.com/blog"
disableSitemap: false disableSitemap: false
@ -894,7 +899,8 @@ disableRSS: false
rssURI: "index.xml" rssURI: "index.xml"
paginate: 1 paginate: 1
defaultContentLanguage: "fr" defaultContentLanguage: "{{ .DefaultContentLanguage }}"
defaultContentLanguageInSubdir: {{ .DefaultContentLanguageInSubdir }}
permalinks: permalinks:
other: "/somewhere/else/:filename" other: "/somewhere/else/:filename"
@ -945,7 +951,7 @@ Languages:
` `
var multiSiteJSONConfig = ` var multiSiteJSONConfigTemplate = `
{ {
"defaultExtension": "html", "defaultExtension": "html",
"baseURL": "http://example.com/blog", "baseURL": "http://example.com/blog",
@ -953,7 +959,8 @@ var multiSiteJSONConfig = `
"disableRSS": false, "disableRSS": false,
"rssURI": "index.xml", "rssURI": "index.xml",
"paginate": 1, "paginate": 1,
"defaultContentLanguage": "fr", "defaultContentLanguage": "{{ .DefaultContentLanguage }}",
"defaultContentLanguageInSubdir": true,
"permalinks": { "permalinks": {
"other": "/somewhere/else/:filename" "other": "/somewhere/else/:filename"
}, },
@ -1026,25 +1033,26 @@ func createMultiTestSites(t *testing.T, siteConfig testSiteConfig, tomlConfigTem
func createMultiTestSitesForConfig(t *testing.T, siteConfig testSiteConfig, configTemplate, configSuffix string) *HugoSites { func createMultiTestSitesForConfig(t *testing.T, siteConfig testSiteConfig, configTemplate, configSuffix string) *HugoSites {
depsCfg := deps.DepsCfg{Fs: siteConfig.Fs}
configContent := createConfig(t, siteConfig, configTemplate) configContent := createConfig(t, siteConfig, configTemplate)
mf := siteConfig.Fs
// Add some layouts // Add some layouts
if err := afero.WriteFile(depsCfg.Fs.Source, if err := afero.WriteFile(mf,
filepath.Join("layouts", "_default/single.html"), filepath.Join("layouts", "_default/single.html"),
[]byte("Single: {{ .Title }}|{{ i18n \"hello\" }}|{{.Lang}}|{{ .Content }}"), []byte("Single: {{ .Title }}|{{ i18n \"hello\" }}|{{.Lang}}|{{ .Content }}"),
0755); err != nil { 0755); err != nil {
t.Fatalf("Failed to write layout file: %s", err) t.Fatalf("Failed to write layout file: %s", err)
} }
if err := afero.WriteFile(depsCfg.Fs.Source, if err := afero.WriteFile(mf,
filepath.Join("layouts", "_default/list.html"), filepath.Join("layouts", "_default/list.html"),
[]byte("{{ $p := .Paginator }}List Page {{ $p.PageNumber }}: {{ .Title }}|{{ i18n \"hello\" }}|{{ .Permalink }}"), []byte("{{ $p := .Paginator }}List Page {{ $p.PageNumber }}: {{ .Title }}|{{ i18n \"hello\" }}|{{ .Permalink }}"),
0755); err != nil { 0755); err != nil {
t.Fatalf("Failed to write layout file: %s", err) t.Fatalf("Failed to write layout file: %s", err)
} }
if err := afero.WriteFile(depsCfg.Fs.Source, if err := afero.WriteFile(mf,
filepath.Join("layouts", "index.html"), filepath.Join("layouts", "index.html"),
[]byte("{{ $p := .Paginator }}Home Page {{ $p.PageNumber }}: {{ .Title }}|{{ .IsHome }}|{{ i18n \"hello\" }}|{{ .Permalink }}|{{ .Site.Data.hugo.slogan }}"), []byte("{{ $p := .Paginator }}Home Page {{ $p.PageNumber }}: {{ .Title }}|{{ .IsHome }}|{{ i18n \"hello\" }}|{{ .Permalink }}|{{ .Site.Data.hugo.slogan }}"),
0755); err != nil { 0755); err != nil {
@ -1052,7 +1060,7 @@ func createMultiTestSitesForConfig(t *testing.T, siteConfig testSiteConfig, conf
} }
// Add a shortcode // Add a shortcode
if err := afero.WriteFile(depsCfg.Fs.Source, if err := afero.WriteFile(mf,
filepath.Join("layouts", "shortcodes", "shortcode.html"), filepath.Join("layouts", "shortcodes", "shortcode.html"),
[]byte("Shortcode: {{ i18n \"hello\" }}"), []byte("Shortcode: {{ i18n \"hello\" }}"),
0755); err != nil { 0755); err != nil {
@ -1060,7 +1068,7 @@ func createMultiTestSitesForConfig(t *testing.T, siteConfig testSiteConfig, conf
} }
// Add some language files // Add some language files
if err := afero.WriteFile(depsCfg.Fs.Source, if err := afero.WriteFile(mf,
filepath.Join("i18n", "en.yaml"), filepath.Join("i18n", "en.yaml"),
[]byte(` []byte(`
- id: hello - id: hello
@ -1069,7 +1077,7 @@ func createMultiTestSitesForConfig(t *testing.T, siteConfig testSiteConfig, conf
0755); err != nil { 0755); err != nil {
t.Fatalf("Failed to write language file: %s", err) t.Fatalf("Failed to write language file: %s", err)
} }
if err := afero.WriteFile(depsCfg.Fs.Source, if err := afero.WriteFile(mf,
filepath.Join("i18n", "fr.yaml"), filepath.Join("i18n", "fr.yaml"),
[]byte(` []byte(`
- id: hello - id: hello
@ -1223,26 +1231,25 @@ lag:
} }
configFile := "multilangconfig." + configSuffix configFile := "multilangconfig." + configSuffix
writeSource(t, depsCfg.Fs, configFile, configContent) writeToFs(t, mf, configFile, configContent)
viper.SetFs(depsCfg.Fs.Source) cfg, err := LoadConfig(mf, "", configFile)
require.NoError(t, err)
if err := LoadGlobalConfig("", configFile); err != nil { fs := hugofs.NewFrom(mf, cfg)
t.Fatalf("Failed to load config: %s", err)
}
// Hugo support using ByteSource's directly (for testing), // Hugo support using ByteSource's directly (for testing),
// but to make it more real, we write them to the mem file system. // but to make it more real, we write them to the mem file system.
for _, s := range sources { for _, s := range sources {
if err := afero.WriteFile(depsCfg.Fs.Source, filepath.Join("content", s.Name), s.Content, 0755); err != nil { if err := afero.WriteFile(mf, filepath.Join("content", s.Name), s.Content, 0755); err != nil {
t.Fatalf("Failed to write file: %s", err) t.Fatalf("Failed to write file: %s", err)
} }
} }
// Add some data // Add some data
writeSource(t, depsCfg.Fs, "data/hugo.toml", "slogan = \"Hugo Rocks!\"") writeSource(t, fs, "data/hugo.toml", "slogan = \"Hugo Rocks!\"")
sites, err := NewHugoSitesFromConfiguration(depsCfg) sites, err := NewHugoSites(deps.DepsCfg{Fs: fs, Cfg: cfg}) //, Logger: newDebugLogger()})
if err != nil { if err != nil {
t.Fatalf("Failed to create sites: %s", err) t.Fatalf("Failed to create sites: %s", err)
@ -1252,11 +1259,19 @@ lag:
t.Fatalf("Got %d sites", len(sites.Sites)) t.Fatalf("Got %d sites", len(sites.Sites))
} }
if sites.Fs.Source != mf {
t.Fatal("FS mismatch")
}
return sites return sites
} }
func writeSource(t *testing.T, fs *hugofs.Fs, filename, content string) { func writeSource(t *testing.T, fs *hugofs.Fs, filename, content string) {
if err := afero.WriteFile(fs.Source, filepath.FromSlash(filename), []byte(content), 0755); err != nil { writeToFs(t, fs.Source, filename, content)
}
func writeToFs(t *testing.T, fs afero.Fs, filename, content string) {
if err := afero.WriteFile(fs, filepath.FromSlash(filename), []byte(content), 0755); err != nil {
t.Fatalf("Failed to write file: %s", err) t.Fatalf("Failed to write file: %s", err)
} }
} }

View file

@ -1,42 +0,0 @@
// Copyright 2016 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package hugolib
import (
"fmt"
"github.com/nicksnyder/go-i18n/i18n/bundle"
"github.com/spf13/hugo/source"
"github.com/spf13/hugo/tpl"
)
func (s *Site) loadI18n(sources []source.Input) error {
s.Log.DEBUG.Printf("Load I18n from %q", sources)
i18nBundle := bundle.New()
for _, currentSource := range sources {
for _, r := range currentSource.Files() {
err := i18nBundle.ParseTranslationFileBytes(r.LogicalName(), r.Bytes())
if err != nil {
return fmt.Errorf("Failed to load translations in file %q: %s", r.LogicalName(), err)
}
}
}
tpl.SetI18nTfuncs(i18nBundle)
return nil
}

View file

@ -20,14 +20,10 @@ import (
"github.com/spf13/hugo/deps" "github.com/spf13/hugo/deps"
"github.com/spf13/hugo/helpers"
"github.com/spf13/hugo/hugofs"
"path/filepath" "path/filepath"
toml "github.com/pelletier/go-toml" toml "github.com/pelletier/go-toml"
"github.com/spf13/hugo/source" "github.com/spf13/hugo/source"
"github.com/spf13/viper"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
@ -187,7 +183,7 @@ Front Matter with Menu with Identifier`, title, menu, identifier))
// Issue 817 - identifier should trump everything // Issue 817 - identifier should trump everything
func TestPageMenuWithIdentifier(t *testing.T) { func TestPageMenuWithIdentifier(t *testing.T) {
t.Parallel()
toml := []source.ByteSource{ toml := []source.ByteSource{
{Name: "sect/doc1.md", Content: tstCreateMenuPageWithIdentifierTOML("t1", "m1", "i1")}, {Name: "sect/doc1.md", Content: tstCreateMenuPageWithIdentifierTOML("t1", "m1", "i1")},
{Name: "sect/doc2.md", Content: tstCreateMenuPageWithIdentifierTOML("t1", "m1", "i2")}, {Name: "sect/doc2.md", Content: tstCreateMenuPageWithIdentifierTOML("t1", "m1", "i2")},
@ -206,7 +202,6 @@ func TestPageMenuWithIdentifier(t *testing.T) {
} }
func doTestPageMenuWithIdentifier(t *testing.T, menuPageSources []source.ByteSource) { func doTestPageMenuWithIdentifier(t *testing.T, menuPageSources []source.ByteSource) {
testCommonResetState()
s := setupMenuTests(t, menuPageSources) s := setupMenuTests(t, menuPageSources)
@ -225,7 +220,7 @@ func doTestPageMenuWithIdentifier(t *testing.T, menuPageSources []source.ByteSou
// Issue 817 contd - name should be second identifier in // Issue 817 contd - name should be second identifier in
func TestPageMenuWithDuplicateName(t *testing.T) { func TestPageMenuWithDuplicateName(t *testing.T) {
t.Parallel()
toml := []source.ByteSource{ toml := []source.ByteSource{
{Name: "sect/doc1.md", Content: tstCreateMenuPageWithNameTOML("t1", "m1", "n1")}, {Name: "sect/doc1.md", Content: tstCreateMenuPageWithNameTOML("t1", "m1", "n1")},
{Name: "sect/doc2.md", Content: tstCreateMenuPageWithNameTOML("t1", "m1", "n2")}, {Name: "sect/doc2.md", Content: tstCreateMenuPageWithNameTOML("t1", "m1", "n2")},
@ -244,7 +239,6 @@ func TestPageMenuWithDuplicateName(t *testing.T) {
} }
func doTestPageMenuWithDuplicateName(t *testing.T, menuPageSources []source.ByteSource) { func doTestPageMenuWithDuplicateName(t *testing.T, menuPageSources []source.ByteSource) {
testCommonResetState()
s := setupMenuTests(t, menuPageSources) s := setupMenuTests(t, menuPageSources)
@ -262,8 +256,7 @@ func doTestPageMenuWithDuplicateName(t *testing.T, menuPageSources []source.Byte
} }
func TestPageMenu(t *testing.T) { func TestPageMenu(t *testing.T) {
testCommonResetState() t.Parallel()
s := setupMenuTests(t, menuPageSources) s := setupMenuTests(t, menuPageSources)
if len(s.RegularPages) != 3 { if len(s.RegularPages) != 3 {
@ -312,8 +305,7 @@ func TestPageMenu(t *testing.T) {
} }
func TestMenuURL(t *testing.T) { func TestMenuURL(t *testing.T) {
testCommonResetState() t.Parallel()
s := setupMenuTests(t, menuPageSources) s := setupMenuTests(t, menuPageSources)
for i, this := range []struct { for i, this := range []struct {
@ -342,8 +334,7 @@ func TestMenuURL(t *testing.T) {
// Issue #1934 // Issue #1934
func TestYAMLMenuWithMultipleEntries(t *testing.T) { func TestYAMLMenuWithMultipleEntries(t *testing.T) {
testCommonResetState() t.Parallel()
ps1 := []byte(`--- ps1 := []byte(`---
title: "Yaml 1" title: "Yaml 1"
weight: 5 weight: 5
@ -373,18 +364,15 @@ Yaml Front Matter with Menu Pages`)
// issue #719 // issue #719
func TestMenuWithUnicodeURLs(t *testing.T) { func TestMenuWithUnicodeURLs(t *testing.T) {
t.Parallel()
for _, canonifyURLs := range []bool{true, false} { for _, canonifyURLs := range []bool{true, false} {
doTestMenuWithUnicodeURLs(t, canonifyURLs) doTestMenuWithUnicodeURLs(t, canonifyURLs)
} }
} }
func doTestMenuWithUnicodeURLs(t *testing.T, canonifyURLs bool) { func doTestMenuWithUnicodeURLs(t *testing.T, canonifyURLs bool) {
testCommonResetState()
viper.Set("canonifyURLs", canonifyURLs) s := setupMenuTests(t, menuPageSources, "canonifyURLs", canonifyURLs)
s := setupMenuTests(t, menuPageSources)
unicodeRussian := findTestMenuEntryByID(s, "unicode", "unicode-russian") unicodeRussian := findTestMenuEntryByID(s, "unicode", "unicode-russian")
@ -399,18 +387,17 @@ func doTestMenuWithUnicodeURLs(t *testing.T, canonifyURLs bool) {
// Issue #1114 // Issue #1114
func TestSectionPagesMenu(t *testing.T) { func TestSectionPagesMenu(t *testing.T) {
t.Parallel()
doTestSectionPagesMenu(true, t) doTestSectionPagesMenu(true, t)
doTestSectionPagesMenu(false, t) doTestSectionPagesMenu(false, t)
} }
func doTestSectionPagesMenu(canonifyURLs bool, t *testing.T) { func doTestSectionPagesMenu(canonifyURLs bool, t *testing.T) {
testCommonResetState()
viper.Set("sectionPagesMenu", "spm") s := setupMenuTests(t, menuPageSectionsSources,
"sectionPagesMenu", "spm",
viper.Set("canonifyURLs", canonifyURLs) "canonifyURLs", canonifyURLs,
s := setupMenuTests(t, menuPageSectionsSources) )
require.Equal(t, 3, len(s.Sections)) require.Equal(t, 3, len(s.Sections))
@ -463,16 +450,15 @@ func doTestSectionPagesMenu(canonifyURLs bool, t *testing.T) {
} }
func TestTaxonomyNodeMenu(t *testing.T) { func TestTaxonomyNodeMenu(t *testing.T) {
t.Parallel()
type taxRenderInfo struct { type taxRenderInfo struct {
key string key string
singular string singular string
plural string plural string
} }
testCommonResetState() s := setupMenuTests(t, menuPageSources, "canonifyURLs", true)
viper.Set("canonifyURLs", true)
s := setupMenuTests(t, menuPageSources)
for i, this := range []struct { for i, this := range []struct {
menu string menu string
@ -512,8 +498,7 @@ func TestTaxonomyNodeMenu(t *testing.T) {
} }
func TestMenuLimit(t *testing.T) { func TestMenuLimit(t *testing.T) {
testCommonResetState() t.Parallel()
s := setupMenuTests(t, menuPageSources) s := setupMenuTests(t, menuPageSources)
m := *s.Menus["main"] m := *s.Menus["main"]
@ -528,7 +513,7 @@ func TestMenuLimit(t *testing.T) {
} }
func TestMenuSortByN(t *testing.T) { func TestMenuSortByN(t *testing.T) {
t.Parallel()
for i, this := range []struct { for i, this := range []struct {
sortFunc func(p Menu) Menu sortFunc func(p Menu) Menu
assertFunc func(p Menu) bool assertFunc func(p Menu) bool
@ -554,12 +539,11 @@ func TestMenuSortByN(t *testing.T) {
} }
func TestHomeNodeMenu(t *testing.T) { func TestHomeNodeMenu(t *testing.T) {
testCommonResetState() t.Parallel()
s := setupMenuTests(t, menuPageSources,
viper.Set("canonifyURLs", true) "canonifyURLs", true,
viper.Set("uglyURLs", false) "uglyURLs", false,
)
s := setupMenuTests(t, menuPageSources)
home := s.getPage(KindHome) home := s.getPage(KindHome)
homeMenuEntry := &MenuEntry{Name: home.Title, URL: home.URL()} homeMenuEntry := &MenuEntry{Name: home.Title, URL: home.URL()}
@ -596,12 +580,14 @@ func TestHomeNodeMenu(t *testing.T) {
} }
func TestHopefullyUniqueID(t *testing.T) { func TestHopefullyUniqueID(t *testing.T) {
t.Parallel()
assert.Equal(t, "i", (&MenuEntry{Identifier: "i", URL: "u", Name: "n"}).hopefullyUniqueID()) assert.Equal(t, "i", (&MenuEntry{Identifier: "i", URL: "u", Name: "n"}).hopefullyUniqueID())
assert.Equal(t, "u", (&MenuEntry{Identifier: "", URL: "u", Name: "n"}).hopefullyUniqueID()) assert.Equal(t, "u", (&MenuEntry{Identifier: "", URL: "u", Name: "n"}).hopefullyUniqueID())
assert.Equal(t, "n", (&MenuEntry{Identifier: "", URL: "", Name: "n"}).hopefullyUniqueID()) assert.Equal(t, "n", (&MenuEntry{Identifier: "", URL: "", Name: "n"}).hopefullyUniqueID())
} }
func TestAddMenuEntryChild(t *testing.T) { func TestAddMenuEntryChild(t *testing.T) {
t.Parallel()
root := &MenuEntry{Weight: 1} root := &MenuEntry{Weight: 1}
root.addChild(&MenuEntry{Weight: 2}) root.addChild(&MenuEntry{Weight: 2})
root.addChild(&MenuEntry{Weight: 1}) root.addChild(&MenuEntry{Weight: 1})
@ -667,38 +653,28 @@ func findDescendantTestMenuEntry(parent *MenuEntry, id string, matcher func(me *
return found return found
} }
func setupTestMenuState(t *testing.T) { func setupMenuTests(t *testing.T, pageSources []source.ByteSource, configKeyValues ...interface{}) *Site {
var (
cfg, fs = newTestCfg()
)
menus, err := tomlToMap(confMenu1) menus, err := tomlToMap(confMenu1)
require.NoError(t, err)
if err != nil { cfg.Set("menu", menus["menu"])
t.Fatalf("Unable to read menus: %v", err) cfg.Set("baseURL", "http://foo.local/Zoo/")
for i := 0; i < len(configKeyValues); i += 2 {
cfg.Set(configKeyValues[i].(string), configKeyValues[i+1])
} }
viper.Set("menu", menus["menu"])
viper.Set("baseURL", "http://foo.local/Zoo/")
}
func setupMenuTests(t *testing.T, pageSources []source.ByteSource) *Site {
setupTestMenuState(t)
fs := hugofs.NewMem()
for _, src := range pageSources { for _, src := range pageSources {
writeSource(t, fs, filepath.Join("content", src.Name), string(src.Content)) writeSource(t, fs, filepath.Join("content", src.Name), string(src.Content))
} }
return buildSingleSite(t, deps.DepsCfg{Fs: fs}, BuildCfg{}) return buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{SkipRender: true})
}
func createTestSite(pageSources []source.ByteSource) *Site {
return &Site{
Source: &source.InMemorySource{ByteSource: pageSources},
Language: helpers.NewDefaultLanguage(),
}
} }

View file

@ -22,6 +22,7 @@ import (
"fmt" "fmt"
"github.com/spf13/cast" "github.com/spf13/cast"
"github.com/spf13/hugo/config"
"github.com/spf13/hugo/helpers" "github.com/spf13/hugo/helpers"
) )
@ -44,7 +45,7 @@ func (ml *Multilingual) Language(lang string) *helpers.Language {
return ml.langMap[lang] return ml.langMap[lang]
} }
func newMultiLingualFromSites(sites ...*Site) (*Multilingual, error) { func newMultiLingualFromSites(cfg config.Provider, sites ...*Site) (*Multilingual, error) {
languages := make(helpers.Languages, len(sites)) languages := make(helpers.Languages, len(sites))
for i, s := range sites { for i, s := range sites {
@ -54,12 +55,14 @@ func newMultiLingualFromSites(sites ...*Site) (*Multilingual, error) {
languages[i] = s.Language languages[i] = s.Language
} }
return &Multilingual{Languages: languages, DefaultLang: helpers.NewDefaultLanguage()}, nil defaultLang := cfg.GetString("defaultContentLanguage")
} if defaultLang == "" {
defaultLang = "en"
}
return &Multilingual{Languages: languages, DefaultLang: helpers.NewLanguage(defaultLang, cfg)}, nil
func newMultiLingualDefaultLanguage() *Multilingual {
return newMultiLingualForLanguage(helpers.NewDefaultLanguage())
} }
func newMultiLingualForLanguage(language *helpers.Language) *Multilingual { func newMultiLingualForLanguage(language *helpers.Language) *Multilingual {
@ -77,7 +80,7 @@ func (s *Site) multilingualEnabled() bool {
return s.owner.multilingual != nil && s.owner.multilingual.enabled() return s.owner.multilingual != nil && s.owner.multilingual.enabled()
} }
func toSortedLanguages(l map[string]interface{}) (helpers.Languages, error) { func toSortedLanguages(cfg config.Provider, l map[string]interface{}) (helpers.Languages, error) {
langs := make(helpers.Languages, len(l)) langs := make(helpers.Languages, len(l))
i := 0 i := 0
@ -88,7 +91,7 @@ func toSortedLanguages(l map[string]interface{}) (helpers.Languages, error) {
return nil, fmt.Errorf("Language config is not a map: %T", langConf) return nil, fmt.Errorf("Language config is not a map: %T", langConf)
} }
language := helpers.NewLanguage(lang) language := helpers.NewLanguage(lang, cfg)
for loki, v := range langsMap { for loki, v := range langsMap {
switch loki { switch loki {

View file

@ -21,9 +21,10 @@ import (
"time" "time"
"github.com/spf13/afero"
"github.com/spf13/hugo/deps" "github.com/spf13/hugo/deps"
"github.com/spf13/hugo/hugofs" "github.com/spf13/hugo/hugofs"
"github.com/spf13/viper"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
@ -35,6 +36,7 @@ import (
*/ */
func TestNodesAsPage(t *testing.T) { func TestNodesAsPage(t *testing.T) {
t.Parallel()
for _, preserveTaxonomyNames := range []bool{false, true} { for _, preserveTaxonomyNames := range []bool{false, true} {
for _, ugly := range []bool{true, false} { for _, ugly := range []bool{true, false} {
doTestNodeAsPage(t, ugly, preserveTaxonomyNames) doTestNodeAsPage(t, ugly, preserveTaxonomyNames)
@ -54,25 +56,24 @@ func doTestNodeAsPage(t *testing.T, ugly, preserveTaxonomyNames bool) {
*/ */
testCommonResetState() var (
cfg, fs = newTestCfg()
th = testHelper{cfg}
)
viper.Set("uglyURLs", ugly) cfg.Set("uglyURLs", ugly)
viper.Set("preserveTaxonomyNames", preserveTaxonomyNames) cfg.Set("preserveTaxonomyNames", preserveTaxonomyNames)
viper.Set("paginate", 1) cfg.Set("paginate", 1)
viper.Set("title", "Hugo Rocks") cfg.Set("title", "Hugo Rocks")
viper.Set("rssURI", "customrss.xml") cfg.Set("rssURI", "customrss.xml")
depsCfg := newTestDepsConfig() writeLayoutsForNodeAsPageTests(t, fs)
writeNodePagesForNodeAsPageTests(t, fs, "")
viper.SetFs(depsCfg.Fs.Source) writeRegularPagesForNodeAsPageTests(t, fs)
writeLayoutsForNodeAsPageTests(t, depsCfg.Fs) sites, err := NewHugoSites(deps.DepsCfg{Fs: fs, Cfg: cfg})
writeNodePagesForNodeAsPageTests(t, depsCfg.Fs, "")
writeRegularPagesForNodeAsPageTests(t, depsCfg.Fs)
sites, err := NewHugoSitesFromConfiguration(depsCfg)
require.NoError(t, err) require.NoError(t, err)
@ -80,7 +81,7 @@ func doTestNodeAsPage(t *testing.T, ugly, preserveTaxonomyNames bool) {
// date order: home, sect1, sect2, cat/hugo, cat/web, categories // date order: home, sect1, sect2, cat/hugo, cat/web, categories
assertFileContent(t, depsCfg.Fs, filepath.Join("public", "index.html"), false, th.assertFileContent(t, fs, filepath.Join("public", "index.html"), false,
"Index Title: Home Sweet Home!", "Index Title: Home Sweet Home!",
"Home <strong>Content!</strong>", "Home <strong>Content!</strong>",
"# Pages: 4", "# Pages: 4",
@ -89,7 +90,7 @@ func doTestNodeAsPage(t *testing.T, ugly, preserveTaxonomyNames bool) {
"GetPage: Section1 ", "GetPage: Section1 ",
) )
assertFileContent(t, depsCfg.Fs, expectedFilePath(ugly, "public", "sect1", "regular1"), false, "Single Title: Page 01", "Content Page 01") th.assertFileContent(t, fs, expectedFilePath(ugly, "public", "sect1", "regular1"), false, "Single Title: Page 01", "Content Page 01")
nodes := sites.findAllPagesByKindNotIn(KindPage) nodes := sites.findAllPagesByKindNotIn(KindPage)
@ -115,24 +116,24 @@ func doTestNodeAsPage(t *testing.T, ugly, preserveTaxonomyNames bool) {
require.True(t, first.IsPage()) require.True(t, first.IsPage())
// Check Home paginator // Check Home paginator
assertFileContent(t, depsCfg.Fs, expectedFilePath(ugly, "public", "page", "2"), false, th.assertFileContent(t, fs, expectedFilePath(ugly, "public", "page", "2"), false,
"Pag: Page 02") "Pag: Page 02")
// Check Sections // Check Sections
assertFileContent(t, depsCfg.Fs, expectedFilePath(ugly, "public", "sect1"), false, th.assertFileContent(t, fs, expectedFilePath(ugly, "public", "sect1"), false,
"Section Title: Section", "Section1 <strong>Content!</strong>", "Section Title: Section", "Section1 <strong>Content!</strong>",
"Date: 2009-01-04", "Date: 2009-01-04",
"Lastmod: 2009-01-05", "Lastmod: 2009-01-05",
) )
assertFileContent(t, depsCfg.Fs, expectedFilePath(ugly, "public", "sect2"), false, th.assertFileContent(t, fs, expectedFilePath(ugly, "public", "sect2"), false,
"Section Title: Section", "Section2 <strong>Content!</strong>", "Section Title: Section", "Section2 <strong>Content!</strong>",
"Date: 2009-01-06", "Date: 2009-01-06",
"Lastmod: 2009-01-07", "Lastmod: 2009-01-07",
) )
// Check Sections paginator // Check Sections paginator
assertFileContent(t, depsCfg.Fs, expectedFilePath(ugly, "public", "sect1", "page", "2"), false, th.assertFileContent(t, fs, expectedFilePath(ugly, "public", "sect1", "page", "2"), false,
"Pag: Page 02") "Pag: Page 02")
sections := sites.findAllPagesByKind(KindSection) sections := sites.findAllPagesByKind(KindSection)
@ -140,13 +141,13 @@ func doTestNodeAsPage(t *testing.T, ugly, preserveTaxonomyNames bool) {
require.Len(t, sections, 2) require.Len(t, sections, 2)
// Check taxonomy lists // Check taxonomy lists
assertFileContent(t, depsCfg.Fs, expectedFilePath(ugly, "public", "categories", "hugo"), false, th.assertFileContent(t, fs, expectedFilePath(ugly, "public", "categories", "hugo"), false,
"Taxonomy Title: Taxonomy Hugo", "Taxonomy Hugo <strong>Content!</strong>", "Taxonomy Title: Taxonomy Hugo", "Taxonomy Hugo <strong>Content!</strong>",
"Date: 2009-01-08", "Date: 2009-01-08",
"Lastmod: 2009-01-09", "Lastmod: 2009-01-09",
) )
assertFileContent(t, depsCfg.Fs, expectedFilePath(ugly, "public", "categories", "hugo-rocks"), false, th.assertFileContent(t, fs, expectedFilePath(ugly, "public", "categories", "hugo-rocks"), false,
"Taxonomy Title: Taxonomy Hugo Rocks", "Taxonomy Title: Taxonomy Hugo Rocks",
) )
@ -156,7 +157,7 @@ func doTestNodeAsPage(t *testing.T, ugly, preserveTaxonomyNames bool) {
require.NotNil(t, web) require.NotNil(t, web)
require.Len(t, web.Data["Pages"].(Pages), 4) require.Len(t, web.Data["Pages"].(Pages), 4)
assertFileContent(t, depsCfg.Fs, expectedFilePath(ugly, "public", "categories", "web"), false, th.assertFileContent(t, fs, expectedFilePath(ugly, "public", "categories", "web"), false,
"Taxonomy Title: Taxonomy Web", "Taxonomy Title: Taxonomy Web",
"Taxonomy Web <strong>Content!</strong>", "Taxonomy Web <strong>Content!</strong>",
"Date: 2009-01-10", "Date: 2009-01-10",
@ -164,12 +165,12 @@ func doTestNodeAsPage(t *testing.T, ugly, preserveTaxonomyNames bool) {
) )
// Check taxonomy list paginator // Check taxonomy list paginator
assertFileContent(t, depsCfg.Fs, expectedFilePath(ugly, "public", "categories", "hugo", "page", "2"), false, th.assertFileContent(t, fs, expectedFilePath(ugly, "public", "categories", "hugo", "page", "2"), false,
"Taxonomy Title: Taxonomy Hugo", "Taxonomy Title: Taxonomy Hugo",
"Pag: Page 02") "Pag: Page 02")
// Check taxonomy terms // Check taxonomy terms
assertFileContent(t, depsCfg.Fs, expectedFilePath(ugly, "public", "categories"), false, th.assertFileContent(t, fs, expectedFilePath(ugly, "public", "categories"), false,
"Taxonomy Terms Title: Taxonomy Term Categories", "Taxonomy Term Categories <strong>Content!</strong>", "k/v: hugo", "Taxonomy Terms Title: Taxonomy Term Categories", "Taxonomy Term Categories <strong>Content!</strong>", "k/v: hugo",
"Date: 2009-01-14", "Date: 2009-01-14",
"Lastmod: 2009-01-15", "Lastmod: 2009-01-15",
@ -178,34 +179,37 @@ func doTestNodeAsPage(t *testing.T, ugly, preserveTaxonomyNames bool) {
// There are no pages to paginate over in the taxonomy terms. // There are no pages to paginate over in the taxonomy terms.
// RSS // RSS
assertFileContent(t, depsCfg.Fs, filepath.Join("public", "customrss.xml"), false, "Recent content in Home Sweet Home! on Hugo Rocks", "<rss") th.assertFileContent(t, fs, filepath.Join("public", "customrss.xml"), false, "Recent content in Home Sweet Home! on Hugo Rocks", "<rss")
assertFileContent(t, depsCfg.Fs, filepath.Join("public", "sect1", "customrss.xml"), false, "Recent content in Section1 on Hugo Rocks", "<rss") th.assertFileContent(t, fs, filepath.Join("public", "sect1", "customrss.xml"), false, "Recent content in Section1 on Hugo Rocks", "<rss")
assertFileContent(t, depsCfg.Fs, filepath.Join("public", "sect2", "customrss.xml"), false, "Recent content in Section2 on Hugo Rocks", "<rss") th.assertFileContent(t, fs, filepath.Join("public", "sect2", "customrss.xml"), false, "Recent content in Section2 on Hugo Rocks", "<rss")
assertFileContent(t, depsCfg.Fs, filepath.Join("public", "categories", "hugo", "customrss.xml"), false, "Recent content in Taxonomy Hugo on Hugo Rocks", "<rss") th.assertFileContent(t, fs, filepath.Join("public", "categories", "hugo", "customrss.xml"), false, "Recent content in Taxonomy Hugo on Hugo Rocks", "<rss")
assertFileContent(t, depsCfg.Fs, filepath.Join("public", "categories", "web", "customrss.xml"), false, "Recent content in Taxonomy Web on Hugo Rocks", "<rss") th.assertFileContent(t, fs, filepath.Join("public", "categories", "web", "customrss.xml"), false, "Recent content in Taxonomy Web on Hugo Rocks", "<rss")
} }
func TestNodesWithNoContentFile(t *testing.T) { func TestNodesWithNoContentFile(t *testing.T) {
t.Parallel()
for _, ugly := range []bool{false, true} { for _, ugly := range []bool{false, true} {
doTestNodesWithNoContentFile(t, ugly) doTestNodesWithNoContentFile(t, ugly)
} }
} }
func doTestNodesWithNoContentFile(t *testing.T, ugly bool) { func doTestNodesWithNoContentFile(t *testing.T, ugly bool) {
testCommonResetState()
viper.Set("uglyURLs", ugly) var (
viper.Set("paginate", 1) cfg, fs = newTestCfg()
viper.Set("title", "Hugo Rocks!") th = testHelper{cfg}
viper.Set("rssURI", "customrss.xml") )
fs := hugofs.NewMem() cfg.Set("uglyURLs", ugly)
cfg.Set("paginate", 1)
cfg.Set("title", "Hugo Rocks!")
cfg.Set("rssURI", "customrss.xml")
writeLayoutsForNodeAsPageTests(t, fs) writeLayoutsForNodeAsPageTests(t, fs)
writeRegularPagesForNodeAsPageTests(t, fs) writeRegularPagesForNodeAsPageTests(t, fs)
sites, err := NewHugoSitesFromConfiguration(deps.DepsCfg{Fs: fs}) sites, err := NewHugoSites(deps.DepsCfg{Fs: fs, Cfg: cfg})
require.NoError(t, err) require.NoError(t, err)
@ -222,21 +226,21 @@ func doTestNodesWithNoContentFile(t *testing.T, ugly bool) {
require.Len(t, homePage.Pages, 4) require.Len(t, homePage.Pages, 4)
require.True(t, homePage.Path() == "") require.True(t, homePage.Path() == "")
assertFileContent(t, fs, filepath.Join("public", "index.html"), false, th.assertFileContent(t, fs, filepath.Join("public", "index.html"), false,
"Index Title: Hugo Rocks!", "Index Title: Hugo Rocks!",
"Date: 2010-06-12", "Date: 2010-06-12",
"Lastmod: 2010-06-13", "Lastmod: 2010-06-13",
) )
// Taxonomy list // Taxonomy list
assertFileContent(t, fs, expectedFilePath(ugly, "public", "categories", "hugo"), false, th.assertFileContent(t, fs, expectedFilePath(ugly, "public", "categories", "hugo"), false,
"Taxonomy Title: Hugo", "Taxonomy Title: Hugo",
"Date: 2010-06-12", "Date: 2010-06-12",
"Lastmod: 2010-06-13", "Lastmod: 2010-06-13",
) )
// Taxonomy terms // Taxonomy terms
assertFileContent(t, fs, expectedFilePath(ugly, "public", "categories"), false, th.assertFileContent(t, fs, expectedFilePath(ugly, "public", "categories"), false,
"Taxonomy Terms Title: Categories", "Taxonomy Terms Title: Categories",
) )
@ -254,28 +258,29 @@ func doTestNodesWithNoContentFile(t *testing.T, ugly bool) {
} }
// Sections // Sections
assertFileContent(t, fs, expectedFilePath(ugly, "public", "sect1"), false, th.assertFileContent(t, fs, expectedFilePath(ugly, "public", "sect1"), false,
"Section Title: Sect1s", "Section Title: Sect1s",
"Date: 2010-06-12", "Date: 2010-06-12",
"Lastmod: 2010-06-13", "Lastmod: 2010-06-13",
) )
assertFileContent(t, fs, expectedFilePath(ugly, "public", "sect2"), false, th.assertFileContent(t, fs, expectedFilePath(ugly, "public", "sect2"), false,
"Section Title: Sect2s", "Section Title: Sect2s",
"Date: 2008-07-06", "Date: 2008-07-06",
"Lastmod: 2008-07-09", "Lastmod: 2008-07-09",
) )
// RSS // RSS
assertFileContent(t, fs, filepath.Join("public", "customrss.xml"), false, "Hugo Rocks!", "<rss") th.assertFileContent(t, fs, filepath.Join("public", "customrss.xml"), false, "Hugo Rocks!", "<rss")
assertFileContent(t, fs, filepath.Join("public", "sect1", "customrss.xml"), false, "Recent content in Sect1s on Hugo Rocks!", "<rss") th.assertFileContent(t, fs, filepath.Join("public", "sect1", "customrss.xml"), false, "Recent content in Sect1s on Hugo Rocks!", "<rss")
assertFileContent(t, fs, filepath.Join("public", "sect2", "customrss.xml"), false, "Recent content in Sect2s on Hugo Rocks!", "<rss") th.assertFileContent(t, fs, filepath.Join("public", "sect2", "customrss.xml"), false, "Recent content in Sect2s on Hugo Rocks!", "<rss")
assertFileContent(t, fs, filepath.Join("public", "categories", "hugo", "customrss.xml"), false, "Recent content in Hugo on Hugo Rocks!", "<rss") th.assertFileContent(t, fs, filepath.Join("public", "categories", "hugo", "customrss.xml"), false, "Recent content in Hugo on Hugo Rocks!", "<rss")
assertFileContent(t, fs, filepath.Join("public", "categories", "web", "customrss.xml"), false, "Recent content in Web on Hugo Rocks!", "<rss") th.assertFileContent(t, fs, filepath.Join("public", "categories", "web", "customrss.xml"), false, "Recent content in Web on Hugo Rocks!", "<rss")
} }
func TestNodesAsPageMultilingual(t *testing.T) { func TestNodesAsPageMultilingual(t *testing.T) {
t.Parallel()
for _, ugly := range []bool{false, true} { for _, ugly := range []bool{false, true} {
doTestNodesAsPageMultilingual(t, ugly) doTestNodesAsPageMultilingual(t, ugly)
} }
@ -283,15 +288,9 @@ func TestNodesAsPageMultilingual(t *testing.T) {
func doTestNodesAsPageMultilingual(t *testing.T, ugly bool) { func doTestNodesAsPageMultilingual(t *testing.T, ugly bool) {
testCommonResetState() mf := afero.NewMemMapFs()
fs := hugofs.NewMem() writeToFs(t, mf, "config.toml",
viper.Set("uglyURLs", ugly)
viper.SetFs(fs.Source)
writeSource(t, fs, "config.toml",
` `
paginage = 1 paginage = 1
title = "Hugo Multilingual Rocks!" title = "Hugo Multilingual Rocks!"
@ -317,17 +316,22 @@ weight = 3
title = "Deutsche Hugo" title = "Deutsche Hugo"
`) `)
cfg, err := LoadConfig(mf, "", "config.toml")
require.NoError(t, err)
cfg.Set("uglyURLs", ugly)
fs := hugofs.NewFrom(mf, cfg)
writeLayoutsForNodeAsPageTests(t, fs) writeLayoutsForNodeAsPageTests(t, fs)
for _, lang := range []string{"nn", "en"} { for _, lang := range []string{"nn", "en"} {
writeRegularPagesForNodeAsPageTestsWithLang(t, fs, lang) writeRegularPagesForNodeAsPageTestsWithLang(t, fs, lang)
} }
if err := LoadGlobalConfig("", "config.toml"); err != nil { th := testHelper{cfg}
t.Fatalf("Failed to load config: %s", err)
}
sites, err := NewHugoSitesFromConfiguration(deps.DepsCfg{Fs: fs}) sites, err := NewHugoSites(deps.DepsCfg{Fs: fs, Cfg: cfg})
if err != nil { if err != nil {
t.Fatalf("Failed to create sites: %s", err) t.Fatalf("Failed to create sites: %s", err)
@ -372,63 +376,65 @@ title = "Deutsche Hugo"
require.Equal(t, expetedPermalink(ugly, "/en/sect1/"), enSect.Permalink()) require.Equal(t, expetedPermalink(ugly, "/en/sect1/"), enSect.Permalink())
assertFileContent(t, fs, filepath.Join("public", "nn", "index.html"), true, th.assertFileContent(t, fs, filepath.Join("public", "nn", "index.html"), true,
"Index Title: Hugo på norsk") "Index Title: Hugo på norsk")
assertFileContent(t, fs, filepath.Join("public", "en", "index.html"), true, th.assertFileContent(t, fs, filepath.Join("public", "en", "index.html"), true,
"Index Title: Home Sweet Home!", "<strong>Content!</strong>") "Index Title: Home Sweet Home!", "<strong>Content!</strong>")
assertFileContent(t, fs, filepath.Join("public", "de", "index.html"), true, th.assertFileContent(t, fs, filepath.Join("public", "de", "index.html"), true,
"Index Title: Home Sweet Home!", "<strong>Content!</strong>") "Index Title: Home Sweet Home!", "<strong>Content!</strong>")
// Taxonomy list // Taxonomy list
assertFileContent(t, fs, expectedFilePath(ugly, "public", "nn", "categories", "hugo"), true, th.assertFileContent(t, fs, expectedFilePath(ugly, "public", "nn", "categories", "hugo"), true,
"Taxonomy Title: Hugo") "Taxonomy Title: Hugo")
assertFileContent(t, fs, expectedFilePath(ugly, "public", "en", "categories", "hugo"), true, th.assertFileContent(t, fs, expectedFilePath(ugly, "public", "en", "categories", "hugo"), true,
"Taxonomy Title: Taxonomy Hugo") "Taxonomy Title: Taxonomy Hugo")
// Taxonomy terms // Taxonomy terms
assertFileContent(t, fs, expectedFilePath(ugly, "public", "nn", "categories"), true, th.assertFileContent(t, fs, expectedFilePath(ugly, "public", "nn", "categories"), true,
"Taxonomy Terms Title: Categories") "Taxonomy Terms Title: Categories")
assertFileContent(t, fs, expectedFilePath(ugly, "public", "en", "categories"), true, th.assertFileContent(t, fs, expectedFilePath(ugly, "public", "en", "categories"), true,
"Taxonomy Terms Title: Taxonomy Term Categories") "Taxonomy Terms Title: Taxonomy Term Categories")
// Sections // Sections
assertFileContent(t, fs, expectedFilePath(ugly, "public", "nn", "sect1"), true, th.assertFileContent(t, fs, expectedFilePath(ugly, "public", "nn", "sect1"), true,
"Section Title: Sect1s") "Section Title: Sect1s")
assertFileContent(t, fs, expectedFilePath(ugly, "public", "nn", "sect2"), true, th.assertFileContent(t, fs, expectedFilePath(ugly, "public", "nn", "sect2"), true,
"Section Title: Sect2s") "Section Title: Sect2s")
assertFileContent(t, fs, expectedFilePath(ugly, "public", "en", "sect1"), true, th.assertFileContent(t, fs, expectedFilePath(ugly, "public", "en", "sect1"), true,
"Section Title: Section1") "Section Title: Section1")
assertFileContent(t, fs, expectedFilePath(ugly, "public", "en", "sect2"), true, th.assertFileContent(t, fs, expectedFilePath(ugly, "public", "en", "sect2"), true,
"Section Title: Section2") "Section Title: Section2")
// Regular pages // Regular pages
assertFileContent(t, fs, expectedFilePath(ugly, "public", "en", "sect1", "regular1"), true, th.assertFileContent(t, fs, expectedFilePath(ugly, "public", "en", "sect1", "regular1"), true,
"Single Title: Page 01") "Single Title: Page 01")
assertFileContent(t, fs, expectedFilePath(ugly, "public", "nn", "sect1", "regular2"), true, th.assertFileContent(t, fs, expectedFilePath(ugly, "public", "nn", "sect1", "regular2"), true,
"Single Title: Page 02") "Single Title: Page 02")
// RSS // RSS
assertFileContent(t, fs, filepath.Join("public", "nn", "customrss.xml"), true, "Hugo på norsk", "<rss") th.assertFileContent(t, fs, filepath.Join("public", "nn", "customrss.xml"), true, "Hugo på norsk", "<rss")
assertFileContent(t, fs, filepath.Join("public", "nn", "sect1", "customrss.xml"), true, "Recent content in Sect1s on Hugo på norsk", "<rss") th.assertFileContent(t, fs, filepath.Join("public", "nn", "sect1", "customrss.xml"), true, "Recent content in Sect1s on Hugo på norsk", "<rss")
assertFileContent(t, fs, filepath.Join("public", "nn", "sect2", "customrss.xml"), true, "Recent content in Sect2s on Hugo på norsk", "<rss") th.assertFileContent(t, fs, filepath.Join("public", "nn", "sect2", "customrss.xml"), true, "Recent content in Sect2s on Hugo på norsk", "<rss")
assertFileContent(t, fs, filepath.Join("public", "nn", "categories", "hugo", "customrss.xml"), true, "Recent content in Hugo on Hugo på norsk", "<rss") th.assertFileContent(t, fs, filepath.Join("public", "nn", "categories", "hugo", "customrss.xml"), true, "Recent content in Hugo on Hugo på norsk", "<rss")
assertFileContent(t, fs, filepath.Join("public", "nn", "categories", "web", "customrss.xml"), true, "Recent content in Web on Hugo på norsk", "<rss") th.assertFileContent(t, fs, filepath.Join("public", "nn", "categories", "web", "customrss.xml"), true, "Recent content in Web on Hugo på norsk", "<rss")
assertFileContent(t, fs, filepath.Join("public", "en", "customrss.xml"), true, "Recent content in Home Sweet Home! on Hugo in English", "<rss") th.assertFileContent(t, fs, filepath.Join("public", "en", "customrss.xml"), true, "Recent content in Home Sweet Home! on Hugo in English", "<rss")
assertFileContent(t, fs, filepath.Join("public", "en", "sect1", "customrss.xml"), true, "Recent content in Section1 on Hugo in English", "<rss") th.assertFileContent(t, fs, filepath.Join("public", "en", "sect1", "customrss.xml"), true, "Recent content in Section1 on Hugo in English", "<rss")
assertFileContent(t, fs, filepath.Join("public", "en", "sect2", "customrss.xml"), true, "Recent content in Section2 on Hugo in English", "<rss") th.assertFileContent(t, fs, filepath.Join("public", "en", "sect2", "customrss.xml"), true, "Recent content in Section2 on Hugo in English", "<rss")
assertFileContent(t, fs, filepath.Join("public", "en", "categories", "hugo", "customrss.xml"), true, "Recent content in Taxonomy Hugo on Hugo in English", "<rss") th.assertFileContent(t, fs, filepath.Join("public", "en", "categories", "hugo", "customrss.xml"), true, "Recent content in Taxonomy Hugo on Hugo in English", "<rss")
assertFileContent(t, fs, filepath.Join("public", "en", "categories", "web", "customrss.xml"), true, "Recent content in Taxonomy Web on Hugo in English", "<rss") th.assertFileContent(t, fs, filepath.Join("public", "en", "categories", "web", "customrss.xml"), true, "Recent content in Taxonomy Web on Hugo in English", "<rss")
} }
func TestNodesWithTaxonomies(t *testing.T) { func TestNodesWithTaxonomies(t *testing.T) {
testCommonResetState() t.Parallel()
var (
cfg, fs = newTestCfg()
th = testHelper{cfg}
)
fs := hugofs.NewMem() cfg.Set("paginate", 1)
cfg.Set("title", "Hugo Rocks!")
viper.Set("paginate", 1)
viper.Set("title", "Hugo Rocks!")
writeLayoutsForNodeAsPageTests(t, fs) writeLayoutsForNodeAsPageTests(t, fs)
writeRegularPagesForNodeAsPageTests(t, fs) writeRegularPagesForNodeAsPageTests(t, fs)
@ -442,24 +448,26 @@ categories: [
--- ---
`) `)
h, err := NewHugoSitesFromConfiguration(deps.DepsCfg{Fs: fs}) h, err := NewHugoSites(deps.DepsCfg{Fs: fs, Cfg: cfg})
require.NoError(t, err) require.NoError(t, err)
require.NoError(t, h.Build(BuildCfg{})) require.NoError(t, h.Build(BuildCfg{}))
assertFileContent(t, fs, filepath.Join("public", "categories", "hugo", "index.html"), true, "Taxonomy Title: Hugo", "# Pages: 5") th.assertFileContent(t, fs, filepath.Join("public", "categories", "hugo", "index.html"), true, "Taxonomy Title: Hugo", "# Pages: 5")
assertFileContent(t, fs, filepath.Join("public", "categories", "home", "index.html"), true, "Taxonomy Title: Home", "# Pages: 1") th.assertFileContent(t, fs, filepath.Join("public", "categories", "home", "index.html"), true, "Taxonomy Title: Home", "# Pages: 1")
} }
func TestNodesWithMenu(t *testing.T) { func TestNodesWithMenu(t *testing.T) {
testCommonResetState() t.Parallel()
var (
cfg, fs = newTestCfg()
th = testHelper{cfg}
)
viper.Set("paginate", 1) cfg.Set("paginate", 1)
viper.Set("title", "Hugo Rocks!") cfg.Set("title", "Hugo Rocks!")
fs := hugofs.NewMem()
writeLayoutsForNodeAsPageTests(t, fs) writeLayoutsForNodeAsPageTests(t, fs)
writeRegularPagesForNodeAsPageTests(t, fs) writeRegularPagesForNodeAsPageTests(t, fs)
@ -488,26 +496,28 @@ menu:
--- ---
`) `)
h, err := NewHugoSitesFromConfiguration(deps.DepsCfg{Fs: fs}) h, err := NewHugoSites(deps.DepsCfg{Fs: fs, Cfg: cfg})
require.NoError(t, err) require.NoError(t, err)
require.NoError(t, h.Build(BuildCfg{})) require.NoError(t, h.Build(BuildCfg{}))
assertFileContent(t, fs, filepath.Join("public", "index.html"), true, "Home With Menu", "Home Menu Item: Go Home!: /") th.assertFileContent(t, fs, filepath.Join("public", "index.html"), true, "Home With Menu", "Home Menu Item: Go Home!: /")
assertFileContent(t, fs, filepath.Join("public", "sect1", "index.html"), true, "Sect1 With Menu", "Section Menu Item: Go Sect1!: /sect1/") th.assertFileContent(t, fs, filepath.Join("public", "sect1", "index.html"), true, "Sect1 With Menu", "Section Menu Item: Go Sect1!: /sect1/")
assertFileContent(t, fs, filepath.Join("public", "categories", "hugo", "index.html"), true, "Taxonomy With Menu", "Taxonomy Menu Item: Go Tax Hugo!: /categories/hugo/") th.assertFileContent(t, fs, filepath.Join("public", "categories", "hugo", "index.html"), true, "Taxonomy With Menu", "Taxonomy Menu Item: Go Tax Hugo!: /categories/hugo/")
} }
func TestNodesWithAlias(t *testing.T) { func TestNodesWithAlias(t *testing.T) {
testCommonResetState() t.Parallel()
var (
cfg, fs = newTestCfg()
th = testHelper{cfg}
)
fs := hugofs.NewMem() cfg.Set("paginate", 1)
cfg.Set("baseURL", "http://base/")
viper.Set("paginate", 1) cfg.Set("title", "Hugo Rocks!")
viper.Set("baseURL", "http://base/")
viper.Set("title", "Hugo Rocks!")
writeLayoutsForNodeAsPageTests(t, fs) writeLayoutsForNodeAsPageTests(t, fs)
writeRegularPagesForNodeAsPageTests(t, fs) writeRegularPagesForNodeAsPageTests(t, fs)
@ -519,24 +529,26 @@ aliases:
--- ---
`) `)
h, err := NewHugoSitesFromConfiguration(deps.DepsCfg{Fs: fs}) h, err := NewHugoSites(deps.DepsCfg{Fs: fs, Cfg: cfg})
require.NoError(t, err) require.NoError(t, err)
require.NoError(t, h.Build(BuildCfg{})) require.NoError(t, h.Build(BuildCfg{}))
assertFileContent(t, fs, filepath.Join("public", "index.html"), true, "Home With Alias") th.assertFileContent(t, fs, filepath.Join("public", "index.html"), true, "Home With Alias")
assertFileContent(t, fs, filepath.Join("public", "my", "new", "home.html"), true, "content=\"0; url=http://base/") th.assertFileContent(t, fs, filepath.Join("public", "my", "new", "home.html"), true, "content=\"0; url=http://base/")
} }
func TestNodesWithSectionWithIndexPageOnly(t *testing.T) { func TestNodesWithSectionWithIndexPageOnly(t *testing.T) {
testCommonResetState() t.Parallel()
var (
cfg, fs = newTestCfg()
th = testHelper{cfg}
)
fs := hugofs.NewMem() cfg.Set("paginate", 1)
cfg.Set("title", "Hugo Rocks!")
viper.Set("paginate", 1)
viper.Set("title", "Hugo Rocks!")
writeLayoutsForNodeAsPageTests(t, fs) writeLayoutsForNodeAsPageTests(t, fs)
@ -546,24 +558,26 @@ title: MySection
My Section Content My Section Content
`) `)
h, err := NewHugoSitesFromConfiguration(deps.DepsCfg{Fs: fs}) h, err := NewHugoSites(deps.DepsCfg{Fs: fs, Cfg: cfg})
require.NoError(t, err) require.NoError(t, err)
require.NoError(t, h.Build(BuildCfg{})) require.NoError(t, h.Build(BuildCfg{}))
assertFileContent(t, fs, filepath.Join("public", "sect", "index.html"), true, "My Section") th.assertFileContent(t, fs, filepath.Join("public", "sect", "index.html"), true, "My Section")
} }
func TestNodesWithURLs(t *testing.T) { func TestNodesWithURLs(t *testing.T) {
testCommonResetState() t.Parallel()
var (
cfg, fs = newTestCfg()
th = testHelper{cfg}
)
fs := hugofs.NewMem() cfg.Set("paginate", 1)
cfg.Set("title", "Hugo Rocks!")
viper.Set("paginate", 1) cfg.Set("baseURL", "http://bep.is/base/")
viper.Set("title", "Hugo Rocks!")
viper.Set("baseURL", "http://bep.is/base/")
writeLayoutsForNodeAsPageTests(t, fs) writeLayoutsForNodeAsPageTests(t, fs)
writeRegularPagesForNodeAsPageTests(t, fs) writeRegularPagesForNodeAsPageTests(t, fs)
@ -575,13 +589,13 @@ url: foo.html
My Section Content My Section Content
`) `)
h, err := NewHugoSitesFromConfiguration(deps.DepsCfg{Fs: fs}) h, err := NewHugoSites(deps.DepsCfg{Fs: fs, Cfg: cfg})
require.NoError(t, err) require.NoError(t, err)
require.NoError(t, h.Build(BuildCfg{})) require.NoError(t, h.Build(BuildCfg{}))
assertFileContent(t, fs, filepath.Join("public", "sect", "index.html"), true, "My Section") th.assertFileContent(t, fs, filepath.Join("public", "sect", "index.html"), true, "My Section")
s := h.Sites[0] s := h.Sites[0]

View file

@ -39,7 +39,6 @@ import (
"github.com/spf13/cast" "github.com/spf13/cast"
bp "github.com/spf13/hugo/bufferpool" bp "github.com/spf13/hugo/bufferpool"
"github.com/spf13/hugo/source" "github.com/spf13/hugo/source"
"github.com/spf13/viper"
) )
var ( var (
@ -195,8 +194,11 @@ type Page struct {
scratch *Scratch scratch *Scratch
// It would be tempting to use the language set on the Site, but in they way we do
// multi-site processing, these values may differ during the initial page processing.
language *helpers.Language language *helpers.Language
lang string
lang string
} }
// pageInit lazy initializes different parts of the page. It is extracted // pageInit lazy initializes different parts of the page. It is extracted
@ -518,10 +520,11 @@ func (p *Page) renderContent(content []byte) []byte {
return p.Site.SourceRelativeLinkFile(ref, p) return p.Site.SourceRelativeLinkFile(ref, p)
} }
} }
return helpers.RenderBytes(&helpers.RenderingContext{
return p.s.ContentSpec.RenderBytes(&helpers.RenderingContext{
Content: content, RenderTOC: true, PageFmt: p.determineMarkupType(), Content: content, RenderTOC: true, PageFmt: p.determineMarkupType(),
ConfigProvider: p.Language(), Cfg: p.Language(),
DocumentID: p.UniqueID(), DocumentName: p.Path(), DocumentID: p.UniqueID(), DocumentName: p.Path(),
Config: p.getRenderingConfig(), LinkResolver: fn, FileResolver: fileFn}) Config: p.getRenderingConfig(), LinkResolver: fn, FileResolver: fileFn})
} }
@ -532,7 +535,7 @@ func (p *Page) getRenderingConfig() *helpers.Blackfriday {
if p.Language() == nil { if p.Language() == nil {
panic(fmt.Sprintf("nil language for %s with source lang %s", p.BaseFileName(), p.lang)) panic(fmt.Sprintf("nil language for %s with source lang %s", p.BaseFileName(), p.lang))
} }
p.renderingConfig = helpers.NewBlackfriday(p.Language()) p.renderingConfig = p.s.ContentSpec.NewBlackfriday()
if err := mapstructure.Decode(pageParam, p.renderingConfig); err != nil { if err := mapstructure.Decode(pageParam, p.renderingConfig); err != nil {
p.s.Log.FATAL.Printf("Failed to get rendering config for %s:\n%s", p.BaseFileName(), err.Error()) p.s.Log.FATAL.Printf("Failed to get rendering config for %s:\n%s", p.BaseFileName(), err.Error())
@ -544,15 +547,18 @@ func (p *Page) getRenderingConfig() *helpers.Blackfriday {
} }
func (s *Site) newPage(filename string) *Page { func (s *Site) newPage(filename string) *Page {
sp := source.NewSourceSpec(s.Cfg, s.Fs)
page := Page{ page := Page{
pageInit: &pageInit{}, pageInit: &pageInit{},
Kind: kindFromFilename(filename), Kind: kindFromFilename(filename),
contentType: "", contentType: "",
Source: Source{File: *source.NewFile(filename)}, Source: Source{File: *sp.NewFile(filename)},
Keywords: []string{}, Sitemap: Sitemap{Priority: -1}, Keywords: []string{}, Sitemap: Sitemap{Priority: -1},
Params: make(map[string]interface{}), Params: make(map[string]interface{}),
translations: make(Pages, 0), translations: make(Pages, 0),
sections: sectionsFromFilename(filename), sections: sectionsFromFilename(filename),
Site: &s.Info,
s: s,
} }
s.Log.DEBUG.Println("Reading from", page.File.Path()) s.Log.DEBUG.Println("Reading from", page.File.Path())
@ -799,7 +805,7 @@ func (p *Page) Extension() string {
if p.extension != "" { if p.extension != "" {
return p.extension return p.extension
} }
return viper.GetString("defaultExtension") return p.s.Cfg.GetString("defaultExtension")
} }
// AllTranslations returns all translations, including the current Page. // AllTranslations returns all translations, including the current Page.
@ -832,8 +838,8 @@ func (p *Page) LinkTitle() string {
} }
func (p *Page) shouldBuild() bool { func (p *Page) shouldBuild() bool {
return shouldBuild(viper.GetBool("buildFuture"), viper.GetBool("buildExpired"), return shouldBuild(p.s.Cfg.GetBool("buildFuture"), p.s.Cfg.GetBool("buildExpired"),
viper.GetBool("buildDrafts"), p.Draft, p.PublishDate, p.ExpiryDate) p.s.Cfg.GetBool("buildDrafts"), p.Draft, p.PublishDate, p.ExpiryDate)
} }
func shouldBuild(buildFuture bool, buildExpired bool, buildDrafts bool, Draft bool, func shouldBuild(buildFuture bool, buildExpired bool, buildDrafts bool, Draft bool,
@ -886,7 +892,7 @@ func (p *Page) URL() string {
func (p *Page) RelPermalink() string { func (p *Page) RelPermalink() string {
link := p.getPermalink() link := p.getPermalink()
if viper.GetBool("canonifyURLs") { if p.s.Cfg.GetBool("canonifyURLs") {
// replacements for relpermalink with baseURL on the form http://myhost.com/sub/ will fail later on // replacements for relpermalink with baseURL on the form http://myhost.com/sub/ will fail later on
// have to return the URL relative from baseURL // have to return the URL relative from baseURL
relpath, err := helpers.GetRelativePath(link.String(), string(p.Site.BaseURL)) relpath, err := helpers.GetRelativePath(link.String(), string(p.Site.BaseURL))
@ -1047,8 +1053,8 @@ func (p *Page) update(f interface{}) error {
p.Draft = !*published p.Draft = !*published
} }
if p.Date.IsZero() && viper.GetBool("useModTimeAsFallback") { if p.Date.IsZero() && p.s.Cfg.GetBool("useModTimeAsFallback") {
fi, err := p.s.Fs.Source.Stat(filepath.Join(helpers.AbsPathify(viper.GetString("contentDir")), p.File.Path())) fi, err := p.s.Fs.Source.Stat(filepath.Join(p.s.PathSpec.AbsPathify(p.s.Cfg.GetString("contentDir")), p.File.Path()))
if err == nil { if err == nil {
p.Date = fi.ModTime() p.Date = fi.ModTime()
} }
@ -1060,7 +1066,7 @@ func (p *Page) update(f interface{}) error {
if isCJKLanguage != nil { if isCJKLanguage != nil {
p.isCJKLanguage = *isCJKLanguage p.isCJKLanguage = *isCJKLanguage
} else if viper.GetBool("hasCJKLanguage") { } else if p.s.Cfg.GetBool("hasCJKLanguage") {
if cjk.Match(p.rawContent) { if cjk.Match(p.rawContent) {
p.isCJKLanguage = true p.isCJKLanguage = true
} else { } else {
@ -1378,10 +1384,9 @@ func (p *Page) saveSourceAs(path string, safe bool) error {
func (p *Page) saveSource(by []byte, inpath string, safe bool) (err error) { func (p *Page) saveSource(by []byte, inpath string, safe bool) (err error) {
if !filepath.IsAbs(inpath) { if !filepath.IsAbs(inpath) {
inpath = helpers.AbsPathify(inpath) inpath = p.s.PathSpec.AbsPathify(inpath)
} }
p.s.Log.INFO.Println("creating", inpath) p.s.Log.INFO.Println("creating", inpath)
if safe { if safe {
err = helpers.SafeWriteToDisk(inpath, bytes.NewReader(by), p.s.Fs.Source) err = helpers.SafeWriteToDisk(inpath, bytes.NewReader(by), p.s.Fs.Source)
} else { } else {
@ -1691,25 +1696,27 @@ func (p *Page) initLanguage() {
if p.language != nil { if p.language != nil {
return return
} }
pageLang := p.lang
ml := p.Site.multilingual ml := p.Site.multilingual
if ml == nil { if ml == nil {
panic("Multilanguage not set") panic("Multilanguage not set")
} }
if pageLang == "" { if p.lang == "" {
p.lang = ml.DefaultLang.Lang
p.language = ml.DefaultLang p.language = ml.DefaultLang
return return
} }
language := ml.Language(pageLang) language := ml.Language(p.lang)
if language == nil { if language == nil {
// It can be a file named stefano.chiodino.md. // It can be a file named stefano.chiodino.md.
p.s.Log.WARN.Printf("Page language (if it is that) not found in multilang setup: %s.", pageLang) p.s.Log.WARN.Printf("Page language (if it is that) not found in multilang setup: %s.", p.lang)
language = ml.DefaultLang language = ml.DefaultLang
} }
p.language = language p.language = language
}) })
} }
@ -1743,6 +1750,7 @@ func (p *Page) addLangFilepathPrefix(outfile string) string {
if outfile == "" { if outfile == "" {
outfile = helpers.FilePathSeparator outfile = helpers.FilePathSeparator
} }
if !p.shouldAddLanguagePrefix() { if !p.shouldAddLanguagePrefix() {
return outfile return outfile
} }
@ -1795,7 +1803,4 @@ func (p *Page) setValuesForKind(s *Site) {
case KindTaxonomyTerm: case KindTaxonomyTerm:
p.URLPath.URL = "/" + path.Join(p.sections...) + "/" p.URLPath.URL = "/" + path.Join(p.sections...) + "/"
} }
p.s = s
} }

View file

@ -14,13 +14,15 @@
package hugolib package hugolib
import ( import (
"github.com/stretchr/testify/assert"
"sync" "sync"
"sync/atomic" "sync/atomic"
"testing" "testing"
"github.com/stretchr/testify/assert"
) )
func TestPageCache(t *testing.T) { func TestPageCache(t *testing.T) {
t.Parallel()
c1 := newPageCache() c1 := newPageCache()
changeFirst := func(p Pages) { changeFirst := func(p Pages) {
@ -37,8 +39,10 @@ func TestPageCache(t *testing.T) {
var testPageSets []Pages var testPageSets []Pages
s := newTestSite(t)
for i := 0; i < 50; i++ { for i := 0; i < 50; i++ {
testPageSets = append(testPageSets, createSortTestPages(i+1)) testPageSets = append(testPageSets, createSortTestPages(s, i+1))
} }
for j := 0; j < 100; j++ { for j := 0; j < 100; j++ {

View file

@ -38,24 +38,26 @@ var pageGroupTestSources = []pageGroupTestObject{
} }
func preparePageGroupTestPages(t *testing.T) Pages { func preparePageGroupTestPages(t *testing.T) Pages {
s := newTestSite(t)
var pages Pages var pages Pages
for _, s := range pageGroupTestSources { for _, src := range pageGroupTestSources {
p, err := pageTestSite.NewPage(filepath.FromSlash(s.path)) p, err := s.NewPage(filepath.FromSlash(src.path))
if err != nil { if err != nil {
t.Fatalf("failed to prepare test page %s", s.path) t.Fatalf("failed to prepare test page %s", src.path)
} }
p.Weight = s.weight p.Weight = src.weight
p.Date = cast.ToTime(s.date) p.Date = cast.ToTime(src.date)
p.PublishDate = cast.ToTime(s.date) p.PublishDate = cast.ToTime(src.date)
p.ExpiryDate = cast.ToTime(s.date) p.ExpiryDate = cast.ToTime(src.date)
p.Params["custom_param"] = s.param p.Params["custom_param"] = src.param
p.Params["custom_date"] = cast.ToTime(s.date) p.Params["custom_date"] = cast.ToTime(src.date)
pages = append(pages, p) pages = append(pages, p)
} }
return pages return pages
} }
func TestGroupByWithFieldNameArg(t *testing.T) { func TestGroupByWithFieldNameArg(t *testing.T) {
t.Parallel()
pages := preparePageGroupTestPages(t) pages := preparePageGroupTestPages(t)
expect := PagesGroup{ expect := PagesGroup{
{Key: 1, Pages: Pages{pages[3], pages[4]}}, {Key: 1, Pages: Pages{pages[3], pages[4]}},
@ -73,6 +75,7 @@ func TestGroupByWithFieldNameArg(t *testing.T) {
} }
func TestGroupByWithMethodNameArg(t *testing.T) { func TestGroupByWithMethodNameArg(t *testing.T) {
t.Parallel()
pages := preparePageGroupTestPages(t) pages := preparePageGroupTestPages(t)
expect := PagesGroup{ expect := PagesGroup{
{Key: "section1", Pages: Pages{pages[0], pages[1], pages[2]}}, {Key: "section1", Pages: Pages{pages[0], pages[1], pages[2]}},
@ -89,6 +92,7 @@ func TestGroupByWithMethodNameArg(t *testing.T) {
} }
func TestGroupByWithSectionArg(t *testing.T) { func TestGroupByWithSectionArg(t *testing.T) {
t.Parallel()
pages := preparePageGroupTestPages(t) pages := preparePageGroupTestPages(t)
expect := PagesGroup{ expect := PagesGroup{
{Key: "section1", Pages: Pages{pages[0], pages[1], pages[2]}}, {Key: "section1", Pages: Pages{pages[0], pages[1], pages[2]}},
@ -105,6 +109,7 @@ func TestGroupByWithSectionArg(t *testing.T) {
} }
func TestGroupByInReverseOrder(t *testing.T) { func TestGroupByInReverseOrder(t *testing.T) {
t.Parallel()
pages := preparePageGroupTestPages(t) pages := preparePageGroupTestPages(t)
expect := PagesGroup{ expect := PagesGroup{
{Key: 3, Pages: Pages{pages[0], pages[1]}}, {Key: 3, Pages: Pages{pages[0], pages[1]}},
@ -122,6 +127,7 @@ func TestGroupByInReverseOrder(t *testing.T) {
} }
func TestGroupByCalledWithEmptyPages(t *testing.T) { func TestGroupByCalledWithEmptyPages(t *testing.T) {
t.Parallel()
var pages Pages var pages Pages
groups, err := pages.GroupBy("Weight") groups, err := pages.GroupBy("Weight")
if err != nil { if err != nil {
@ -133,6 +139,7 @@ func TestGroupByCalledWithEmptyPages(t *testing.T) {
} }
func TestGroupByCalledWithUnavailableKey(t *testing.T) { func TestGroupByCalledWithUnavailableKey(t *testing.T) {
t.Parallel()
pages := preparePageGroupTestPages(t) pages := preparePageGroupTestPages(t)
_, err := pages.GroupBy("UnavailableKey") _, err := pages.GroupBy("UnavailableKey")
if err == nil { if err == nil {
@ -157,6 +164,7 @@ func (page *Page) dummyPageMethodReturnTwoValueForTest() (string, string) {
} }
func TestGroupByCalledWithInvalidMethod(t *testing.T) { func TestGroupByCalledWithInvalidMethod(t *testing.T) {
t.Parallel()
var err error var err error
pages := preparePageGroupTestPages(t) pages := preparePageGroupTestPages(t)
@ -182,6 +190,7 @@ func TestGroupByCalledWithInvalidMethod(t *testing.T) {
} }
func TestReverse(t *testing.T) { func TestReverse(t *testing.T) {
t.Parallel()
pages := preparePageGroupTestPages(t) pages := preparePageGroupTestPages(t)
groups1, err := pages.GroupBy("Weight", "desc") groups1, err := pages.GroupBy("Weight", "desc")
@ -201,6 +210,7 @@ func TestReverse(t *testing.T) {
} }
func TestGroupByParam(t *testing.T) { func TestGroupByParam(t *testing.T) {
t.Parallel()
pages := preparePageGroupTestPages(t) pages := preparePageGroupTestPages(t)
expect := PagesGroup{ expect := PagesGroup{
{Key: "bar", Pages: Pages{pages[1], pages[3]}}, {Key: "bar", Pages: Pages{pages[1], pages[3]}},
@ -218,6 +228,7 @@ func TestGroupByParam(t *testing.T) {
} }
func TestGroupByParamInReverseOrder(t *testing.T) { func TestGroupByParamInReverseOrder(t *testing.T) {
t.Parallel()
pages := preparePageGroupTestPages(t) pages := preparePageGroupTestPages(t)
expect := PagesGroup{ expect := PagesGroup{
{Key: "foo", Pages: Pages{pages[0], pages[2]}}, {Key: "foo", Pages: Pages{pages[0], pages[2]}},
@ -237,7 +248,8 @@ func TestGroupByParamInReverseOrder(t *testing.T) {
func TestGroupByParamCalledWithCapitalLetterString(t *testing.T) { func TestGroupByParamCalledWithCapitalLetterString(t *testing.T) {
testStr := "TestString" testStr := "TestString"
f := "/section1/test_capital.md" f := "/section1/test_capital.md"
p, err := pageTestSite.NewPage(filepath.FromSlash(f)) s := newTestSite(t)
p, err := s.NewPage(filepath.FromSlash(f))
if err != nil { if err != nil {
t.Fatalf("failed to prepare test page %s", f) t.Fatalf("failed to prepare test page %s", f)
} }
@ -254,6 +266,7 @@ func TestGroupByParamCalledWithCapitalLetterString(t *testing.T) {
} }
func TestGroupByParamCalledWithSomeUnavailableParams(t *testing.T) { func TestGroupByParamCalledWithSomeUnavailableParams(t *testing.T) {
t.Parallel()
pages := preparePageGroupTestPages(t) pages := preparePageGroupTestPages(t)
delete(pages[1].Params, "custom_param") delete(pages[1].Params, "custom_param")
delete(pages[3].Params, "custom_param") delete(pages[3].Params, "custom_param")
@ -273,6 +286,7 @@ func TestGroupByParamCalledWithSomeUnavailableParams(t *testing.T) {
} }
func TestGroupByParamCalledWithEmptyPages(t *testing.T) { func TestGroupByParamCalledWithEmptyPages(t *testing.T) {
t.Parallel()
var pages Pages var pages Pages
groups, err := pages.GroupByParam("custom_param") groups, err := pages.GroupByParam("custom_param")
if err != nil { if err != nil {
@ -284,6 +298,7 @@ func TestGroupByParamCalledWithEmptyPages(t *testing.T) {
} }
func TestGroupByParamCalledWithUnavailableParam(t *testing.T) { func TestGroupByParamCalledWithUnavailableParam(t *testing.T) {
t.Parallel()
pages := preparePageGroupTestPages(t) pages := preparePageGroupTestPages(t)
_, err := pages.GroupByParam("unavailable_param") _, err := pages.GroupByParam("unavailable_param")
if err == nil { if err == nil {
@ -292,6 +307,7 @@ func TestGroupByParamCalledWithUnavailableParam(t *testing.T) {
} }
func TestGroupByDate(t *testing.T) { func TestGroupByDate(t *testing.T) {
t.Parallel()
pages := preparePageGroupTestPages(t) pages := preparePageGroupTestPages(t)
expect := PagesGroup{ expect := PagesGroup{
{Key: "2012-04", Pages: Pages{pages[4], pages[2], pages[0]}}, {Key: "2012-04", Pages: Pages{pages[4], pages[2], pages[0]}},
@ -309,6 +325,7 @@ func TestGroupByDate(t *testing.T) {
} }
func TestGroupByDateInReverseOrder(t *testing.T) { func TestGroupByDateInReverseOrder(t *testing.T) {
t.Parallel()
pages := preparePageGroupTestPages(t) pages := preparePageGroupTestPages(t)
expect := PagesGroup{ expect := PagesGroup{
{Key: "2012-01", Pages: Pages{pages[1]}}, {Key: "2012-01", Pages: Pages{pages[1]}},
@ -326,6 +343,7 @@ func TestGroupByDateInReverseOrder(t *testing.T) {
} }
func TestGroupByPublishDate(t *testing.T) { func TestGroupByPublishDate(t *testing.T) {
t.Parallel()
pages := preparePageGroupTestPages(t) pages := preparePageGroupTestPages(t)
expect := PagesGroup{ expect := PagesGroup{
{Key: "2012-04", Pages: Pages{pages[4], pages[2], pages[0]}}, {Key: "2012-04", Pages: Pages{pages[4], pages[2], pages[0]}},
@ -343,6 +361,7 @@ func TestGroupByPublishDate(t *testing.T) {
} }
func TestGroupByPublishDateInReverseOrder(t *testing.T) { func TestGroupByPublishDateInReverseOrder(t *testing.T) {
t.Parallel()
pages := preparePageGroupTestPages(t) pages := preparePageGroupTestPages(t)
expect := PagesGroup{ expect := PagesGroup{
{Key: "2012-01", Pages: Pages{pages[1]}}, {Key: "2012-01", Pages: Pages{pages[1]}},
@ -360,6 +379,7 @@ func TestGroupByPublishDateInReverseOrder(t *testing.T) {
} }
func TestGroupByPublishDateWithEmptyPages(t *testing.T) { func TestGroupByPublishDateWithEmptyPages(t *testing.T) {
t.Parallel()
var pages Pages var pages Pages
groups, err := pages.GroupByPublishDate("2006-01") groups, err := pages.GroupByPublishDate("2006-01")
if err != nil { if err != nil {
@ -371,6 +391,7 @@ func TestGroupByPublishDateWithEmptyPages(t *testing.T) {
} }
func TestGroupByExpiryDate(t *testing.T) { func TestGroupByExpiryDate(t *testing.T) {
t.Parallel()
pages := preparePageGroupTestPages(t) pages := preparePageGroupTestPages(t)
expect := PagesGroup{ expect := PagesGroup{
{Key: "2012-04", Pages: Pages{pages[4], pages[2], pages[0]}}, {Key: "2012-04", Pages: Pages{pages[4], pages[2], pages[0]}},
@ -388,6 +409,7 @@ func TestGroupByExpiryDate(t *testing.T) {
} }
func TestGroupByParamDate(t *testing.T) { func TestGroupByParamDate(t *testing.T) {
t.Parallel()
pages := preparePageGroupTestPages(t) pages := preparePageGroupTestPages(t)
expect := PagesGroup{ expect := PagesGroup{
{Key: "2012-04", Pages: Pages{pages[4], pages[2], pages[0]}}, {Key: "2012-04", Pages: Pages{pages[4], pages[2], pages[0]}},
@ -405,6 +427,7 @@ func TestGroupByParamDate(t *testing.T) {
} }
func TestGroupByParamDateInReverseOrder(t *testing.T) { func TestGroupByParamDateInReverseOrder(t *testing.T) {
t.Parallel()
pages := preparePageGroupTestPages(t) pages := preparePageGroupTestPages(t)
expect := PagesGroup{ expect := PagesGroup{
{Key: "2012-01", Pages: Pages{pages[1]}}, {Key: "2012-01", Pages: Pages{pages[1]}},
@ -422,6 +445,7 @@ func TestGroupByParamDateInReverseOrder(t *testing.T) {
} }
func TestGroupByParamDateWithEmptyPages(t *testing.T) { func TestGroupByParamDateWithEmptyPages(t *testing.T) {
t.Parallel()
var pages Pages var pages Pages
groups, err := pages.GroupByParamDate("custom_date", "2006-01") groups, err := pages.GroupByParamDate("custom_date", "2006-01")
if err != nil { if err != nil {

View file

@ -68,7 +68,7 @@ var defaultPageSort = func(p1, p2 *Page) bool {
} }
var languagePageSort = func(p1, p2 *Page) bool { var languagePageSort = func(p1, p2 *Page) bool {
if p1.language.Weight == p2.language.Weight { if p1.Language().Weight == p2.Language().Weight {
if p1.Date.Unix() == p2.Date.Unix() { if p1.Date.Unix() == p2.Date.Unix() {
if p1.LinkTitle() == p2.LinkTitle() { if p1.LinkTitle() == p2.LinkTitle() {
return (p1.FullFilePath() < p2.FullFilePath()) return (p1.FullFilePath() < p2.FullFilePath())
@ -78,15 +78,15 @@ var languagePageSort = func(p1, p2 *Page) bool {
return p1.Date.Unix() > p2.Date.Unix() return p1.Date.Unix() > p2.Date.Unix()
} }
if p2.language.Weight == 0 { if p2.Language().Weight == 0 {
return true return true
} }
if p1.language.Weight == 0 { if p1.Language().Weight == 0 {
return false return false
} }
return p1.language.Weight < p2.language.Weight return p1.Language().Weight < p2.Language().Weight
} }
func (ps *pageSorter) Len() int { return len(ps.pages) } func (ps *pageSorter) Len() int { return len(ps.pages) }

View file

@ -21,19 +21,19 @@ import (
"time" "time"
"github.com/spf13/cast" "github.com/spf13/cast"
"github.com/spf13/hugo/helpers"
"github.com/spf13/hugo/source"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
) )
func TestDefaultSort(t *testing.T) { func TestDefaultSort(t *testing.T) {
t.Parallel()
d1 := time.Now() d1 := time.Now()
d2 := d1.Add(-1 * time.Hour) d2 := d1.Add(-1 * time.Hour)
d3 := d1.Add(-2 * time.Hour) d3 := d1.Add(-2 * time.Hour)
d4 := d1.Add(-3 * time.Hour) d4 := d1.Add(-3 * time.Hour)
p := createSortTestPages(4) s := newTestSite(t)
p := createSortTestPages(s, 4)
// first by weight // first by weight
setSortVals([4]time.Time{d1, d2, d3, d4}, [4]string{"b", "a", "c", "d"}, [4]int{4, 3, 2, 1}, p) setSortVals([4]time.Time{d1, d2, d3, d4}, [4]string{"b", "a", "c", "d"}, [4]int{4, 3, 2, 1}, p)
@ -61,13 +61,14 @@ func TestDefaultSort(t *testing.T) {
} }
func TestSortByN(t *testing.T) { func TestSortByN(t *testing.T) {
t.Parallel()
s := newTestSite(t)
d1 := time.Now() d1 := time.Now()
d2 := d1.Add(-2 * time.Hour) d2 := d1.Add(-2 * time.Hour)
d3 := d1.Add(-10 * time.Hour) d3 := d1.Add(-10 * time.Hour)
d4 := d1.Add(-20 * time.Hour) d4 := d1.Add(-20 * time.Hour)
p := createSortTestPages(4) p := createSortTestPages(s, 4)
for i, this := range []struct { for i, this := range []struct {
sortFunc func(p Pages) Pages sortFunc func(p Pages) Pages
@ -93,7 +94,9 @@ func TestSortByN(t *testing.T) {
} }
func TestLimit(t *testing.T) { func TestLimit(t *testing.T) {
p := createSortTestPages(10) t.Parallel()
s := newTestSite(t)
p := createSortTestPages(s, 10)
firstFive := p.Limit(5) firstFive := p.Limit(5)
assert.Equal(t, 5, len(firstFive)) assert.Equal(t, 5, len(firstFive))
for i := 0; i < 5; i++ { for i := 0; i < 5; i++ {
@ -104,7 +107,9 @@ func TestLimit(t *testing.T) {
} }
func TestPageSortReverse(t *testing.T) { func TestPageSortReverse(t *testing.T) {
p1 := createSortTestPages(10) t.Parallel()
s := newTestSite(t)
p1 := createSortTestPages(s, 10)
assert.Equal(t, 0, p1[0].fuzzyWordCount) assert.Equal(t, 0, p1[0].fuzzyWordCount)
assert.Equal(t, 9, p1[9].fuzzyWordCount) assert.Equal(t, 9, p1[9].fuzzyWordCount)
p2 := p1.Reverse() p2 := p1.Reverse()
@ -115,9 +120,11 @@ func TestPageSortReverse(t *testing.T) {
} }
func TestPageSortByParam(t *testing.T) { func TestPageSortByParam(t *testing.T) {
t.Parallel()
var k interface{} = "arbitrary" var k interface{} = "arbitrary"
s := newTestSite(t)
unsorted := createSortTestPages(10) unsorted := createSortTestPages(s, 10)
delete(unsorted[9].Params, cast.ToString(k)) delete(unsorted[9].Params, cast.ToString(k))
firstSetValue, _ := unsorted[0].Param(k) firstSetValue, _ := unsorted[0].Param(k)
@ -143,8 +150,8 @@ func TestPageSortByParam(t *testing.T) {
} }
func BenchmarkSortByWeightAndReverse(b *testing.B) { func BenchmarkSortByWeightAndReverse(b *testing.B) {
s := newTestSite(b)
p := createSortTestPages(300) p := createSortTestPages(s, 300)
b.ResetTimer() b.ResetTimer()
for i := 0; i < b.N; i++ { for i := 0; i < b.N; i++ {
@ -169,32 +176,25 @@ func setSortVals(dates [4]time.Time, titles [4]string, weights [4]int, pages Pag
pages[1].Lastmod = lastLastMod pages[1].Lastmod = lastLastMod
} }
func createSortTestPages(num int) Pages { func createSortTestPages(s *Site, num int) Pages {
pages := make(Pages, num) pages := make(Pages, num)
info := newSiteInfo(siteBuilderCfg{baseURL: "http://base", language: helpers.NewDefaultLanguage()})
for i := 0; i < num; i++ { for i := 0; i < num; i++ {
pages[i] = &Page{ p := s.newPage(filepath.FromSlash(fmt.Sprintf("/x/y/p%d.md", i)))
pageInit: &pageInit{}, p.Params = map[string]interface{}{
URLPath: URLPath{ "arbitrary": "xyz" + fmt.Sprintf("%v", 100-i),
Section: "z",
URL: fmt.Sprintf("http://base/x/y/p%d.html", i),
},
Site: &info,
Source: Source{File: *source.NewFile(filepath.FromSlash(fmt.Sprintf("/x/y/p%d.md", i)))},
Params: map[string]interface{}{
"arbitrary": "xyz" + fmt.Sprintf("%v", 100-i),
},
} }
w := 5 w := 5
if i%2 == 0 { if i%2 == 0 {
w = 10 w = 10
} }
pages[i].fuzzyWordCount = i p.fuzzyWordCount = i
pages[i].Weight = w p.Weight = w
pages[i].Description = "initial" p.Description = "initial"
pages[i] = p
} }
return pages return pages

View file

@ -14,18 +14,18 @@
package hugolib package hugolib
import ( import (
"fmt"
"html/template" "html/template"
"path/filepath" "path/filepath"
"testing" "testing"
"github.com/spf13/hugo/helpers" "github.com/stretchr/testify/require"
"github.com/spf13/hugo/source"
"github.com/spf13/viper" "github.com/spf13/hugo/deps"
) )
// TODO(bep) globals test siteinfo func TestPermalink(t *testing.T) {
func _TestPermalink(t *testing.T) { t.Parallel()
testCommonResetState()
tests := []struct { tests := []struct {
file string file string
@ -50,51 +50,53 @@ func _TestPermalink(t *testing.T) {
{"x/y/z/boofar.md", "", "boofar", "", true, false, "/x/y/z/boofar.html", "/x/y/z/boofar.html"}, {"x/y/z/boofar.md", "", "boofar", "", true, false, "/x/y/z/boofar.html", "/x/y/z/boofar.html"},
{"x/y/z/boofar.md", "http://barnew/", "", "", true, false, "http://barnew/x/y/z/boofar.html", "/x/y/z/boofar.html"}, {"x/y/z/boofar.md", "http://barnew/", "", "", true, false, "http://barnew/x/y/z/boofar.html", "/x/y/z/boofar.html"},
{"x/y/z/boofar.md", "http://barnew/", "boofar", "", true, false, "http://barnew/x/y/z/boofar.html", "/x/y/z/boofar.html"}, {"x/y/z/boofar.md", "http://barnew/", "boofar", "", true, false, "http://barnew/x/y/z/boofar.html", "/x/y/z/boofar.html"},
{"x/y/z/boofar.md", "http://barnew/boo/", "boofar", "", true, false, "http://barnew/boo/x/y/z/boofar.html", "/boo/x/y/z/boofar.html"}, {"x/y/z/boofar.md", "http://barnew/boo/", "booslug", "", true, false, "http://barnew/boo/x/y/z/booslug.html", "/boo/x/y/z/booslug.html"},
{"x/y/z/boofar.md", "http://barnew/boo/", "boofar", "", false, true, "http://barnew/boo/x/y/z/boofar/", "/x/y/z/boofar/"}, {"x/y/z/boofar.md", "http://barnew/boo/", "booslug", "", false, true, "http://barnew/boo/x/y/z/booslug/", "/x/y/z/booslug/"},
{"x/y/z/boofar.md", "http://barnew/boo/", "boofar", "", false, false, "http://barnew/boo/x/y/z/boofar/", "/boo/x/y/z/boofar/"}, {"x/y/z/boofar.md", "http://barnew/boo/", "booslug", "", false, false, "http://barnew/boo/x/y/z/booslug/", "/boo/x/y/z/booslug/"},
{"x/y/z/boofar.md", "http://barnew/boo/", "boofar", "", true, true, "http://barnew/boo/x/y/z/boofar.html", "/x/y/z/boofar.html"}, {"x/y/z/boofar.md", "http://barnew/boo/", "booslug", "", true, true, "http://barnew/boo/x/y/z/booslug.html", "/x/y/z/booslug.html"},
{"x/y/z/boofar.md", "http://barnew/boo", "boofar", "", true, true, "http://barnew/boo/x/y/z/boofar.html", "/x/y/z/boofar.html"}, {"x/y/z/boofar.md", "http://barnew/boo", "booslug", "", true, true, "http://barnew/boo/x/y/z/booslug.html", "/x/y/z/booslug.html"},
// test URL overrides // test URL overrides
{"x/y/z/boofar.md", "", "", "/z/y/q/", false, false, "/z/y/q/", "/z/y/q/"}, {"x/y/z/boofar.md", "", "", "/z/y/q/", false, false, "/z/y/q/", "/z/y/q/"},
} }
viper.Set("defaultExtension", "html")
for i, test := range tests { for i, test := range tests {
viper.Set("uglyURLs", test.uglyURLs)
viper.Set("canonifyURLs", test.canonifyURLs)
info := newSiteInfo(siteBuilderCfg{baseURL: string(test.base), language: helpers.NewDefaultLanguage()})
p := &Page{ cfg, fs := newTestCfg()
pageInit: &pageInit{},
Kind: KindPage,
URLPath: URLPath{
Section: "z",
URL: test.url,
},
Site: &info,
Source: Source{File: *source.NewFile(filepath.FromSlash(test.file))},
}
if test.slug != "" { cfg.Set("defaultExtension", "html")
p.update(map[string]interface{}{
"slug": test.slug, cfg.Set("uglyURLs", test.uglyURLs)
}) cfg.Set("canonifyURLs", test.canonifyURLs)
} cfg.Set("baseURL", test.base)
pageContent := fmt.Sprintf(`---
title: Page
slug: %q
url: %q
---
Content
`, test.slug, test.url)
writeSource(t, fs, filepath.Join("content", filepath.FromSlash(test.file)), pageContent)
s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{SkipRender: true})
require.Len(t, s.RegularPages, 1)
p := s.RegularPages[0]
u := p.Permalink() u := p.Permalink()
expected := test.expectedAbs expected := test.expectedAbs
if u != expected { if u != expected {
t.Errorf("Test %d: Expected abs url: %s, got: %s", i, expected, u) t.Fatalf("[%d] Expected abs url: %s, got: %s", i, expected, u)
} }
u = p.RelPermalink() u = p.RelPermalink()
expected = test.expectedRel expected = test.expectedRel
if u != expected { if u != expected {
t.Errorf("Test %d: Expected rel url: %s, got: %s", i, expected, u) t.Errorf("[%d] Expected rel url: %s, got: %s", i, expected, u)
} }
} }
} }

View file

@ -57,6 +57,7 @@ categories = "d"
TOML Front Matter with tags and categories` TOML Front Matter with tags and categories`
func TestParseTaxonomies(t *testing.T) { func TestParseTaxonomies(t *testing.T) {
t.Parallel()
for _, test := range []string{pageTomlWithTaxonomies, for _, test := range []string{pageTomlWithTaxonomies,
pageJSONWithTaxonomies, pageJSONWithTaxonomies,
pageYamlWithTaxonomiesA, pageYamlWithTaxonomiesA,
@ -64,7 +65,8 @@ func TestParseTaxonomies(t *testing.T) {
pageYamlWithTaxonomiesC, pageYamlWithTaxonomiesC,
} { } {
p, _ := pageTestSite.NewPage("page/with/taxonomy") s := newTestSite(t)
p, _ := s.NewPage("page/with/taxonomy")
_, err := p.ReadFrom(strings.NewReader(test)) _, err := p.ReadFrom(strings.NewReader(test))
if err != nil { if err != nil {
t.Fatalf("Failed parsing %q: %s", test, err) t.Fatalf("Failed parsing %q: %s", test, err)

View file

@ -28,8 +28,6 @@ import (
"github.com/spf13/cast" "github.com/spf13/cast"
"github.com/spf13/hugo/deps" "github.com/spf13/hugo/deps"
"github.com/spf13/hugo/helpers" "github.com/spf13/hugo/helpers"
"github.com/spf13/hugo/hugofs"
"github.com/spf13/viper"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
@ -467,13 +465,6 @@ activity = "exam"
Hi. Hi.
` `
func init() {
testCommonResetState()
pageTestSite, _ = NewSiteDefaultLang()
}
var pageTestSite *Site
func checkError(t *testing.T, err error, expected string) { func checkError(t *testing.T, err error, expected string) {
if err == nil { if err == nil {
t.Fatalf("err is nil. Expected: %s", expected) t.Fatalf("err is nil. Expected: %s", expected)
@ -484,8 +475,9 @@ func checkError(t *testing.T, err error, expected string) {
} }
func TestDegenerateEmptyPageZeroLengthName(t *testing.T) { func TestDegenerateEmptyPageZeroLengthName(t *testing.T) {
t.Parallel()
_, err := pageTestSite.NewPage("") s := newTestSite(t)
_, err := s.NewPage("")
if err == nil { if err == nil {
t.Fatalf("A zero length page name must return an error") t.Fatalf("A zero length page name must return an error")
} }
@ -494,7 +486,9 @@ func TestDegenerateEmptyPageZeroLengthName(t *testing.T) {
} }
func TestDegenerateEmptyPage(t *testing.T) { func TestDegenerateEmptyPage(t *testing.T) {
_, err := pageTestSite.NewPageFrom(strings.NewReader(emptyPage), "test") t.Parallel()
s := newTestSite(t)
_, err := s.NewPageFrom(strings.NewReader(emptyPage), "test")
if err != nil { if err != nil {
t.Fatalf("Empty files should not trigger an error. Should be able to touch a file while watching without erroring out.") t.Fatalf("Empty files should not trigger an error. Should be able to touch a file while watching without erroring out.")
} }
@ -611,19 +605,17 @@ func testAllMarkdownEnginesForPages(t *testing.T,
continue continue
} }
testCommonResetState() cfg, fs := newTestCfg()
fs := hugofs.NewMem()
if settings != nil { if settings != nil {
for k, v := range settings { for k, v := range settings {
viper.Set(k, v) cfg.Set(k, v)
} }
} }
contentDir := "content" contentDir := "content"
if s := viper.GetString("contentDir"); s != "" { if s := cfg.GetString("contentDir"); s != "" {
contentDir = s contentDir = s
} }
@ -637,7 +629,7 @@ func testAllMarkdownEnginesForPages(t *testing.T,
writeSource(t, fs, filepath.Join(contentDir, fileSourcePairs[i]), fileSourcePairs[i+1]) writeSource(t, fs, filepath.Join(contentDir, fileSourcePairs[i]), fileSourcePairs[i+1])
} }
s := buildSingleSite(t, deps.DepsCfg{Fs: fs}, BuildCfg{SkipRender: true}) s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{SkipRender: true})
require.Len(t, s.RegularPages, len(pageSources)) require.Len(t, s.RegularPages, len(pageSources))
@ -648,7 +640,7 @@ func testAllMarkdownEnginesForPages(t *testing.T,
} }
func TestCreateNewPage(t *testing.T) { func TestCreateNewPage(t *testing.T) {
t.Parallel()
assertFunc := func(t *testing.T, ext string, pages Pages) { assertFunc := func(t *testing.T, ext string, pages Pages) {
p := pages[0] p := pages[0]
@ -671,7 +663,7 @@ func TestCreateNewPage(t *testing.T) {
} }
func TestSplitSummaryAndContent(t *testing.T) { func TestSplitSummaryAndContent(t *testing.T) {
t.Parallel()
for i, this := range []struct { for i, this := range []struct {
markup string markup string
content string content string
@ -727,7 +719,7 @@ func TestSplitSummaryAndContent(t *testing.T) {
} }
func TestPageWithDelimiter(t *testing.T) { func TestPageWithDelimiter(t *testing.T) {
t.Parallel()
assertFunc := func(t *testing.T, ext string, pages Pages) { assertFunc := func(t *testing.T, ext string, pages Pages) {
p := pages[0] p := pages[0]
checkPageTitle(t, p, "Simple") checkPageTitle(t, p, "Simple")
@ -743,14 +735,12 @@ func TestPageWithDelimiter(t *testing.T) {
// Issue #1076 // Issue #1076
func TestPageWithDelimiterForMarkdownThatCrossesBorder(t *testing.T) { func TestPageWithDelimiterForMarkdownThatCrossesBorder(t *testing.T) {
t.Parallel()
testCommonResetState() cfg, fs := newTestCfg()
fs := hugofs.NewMem()
writeSource(t, fs, filepath.Join("content", "simple.md"), simplePageWithSummaryDelimiterAndMarkdownThatCrossesBorder) writeSource(t, fs, filepath.Join("content", "simple.md"), simplePageWithSummaryDelimiterAndMarkdownThatCrossesBorder)
s := buildSingleSite(t, deps.DepsCfg{Fs: fs}, BuildCfg{SkipRender: true}) s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{SkipRender: true})
require.Len(t, s.RegularPages, 1) require.Len(t, s.RegularPages, 1)
@ -767,9 +757,8 @@ func TestPageWithDelimiterForMarkdownThatCrossesBorder(t *testing.T) {
// Issue #2601 // Issue #2601
func TestPageRawContent(t *testing.T) { func TestPageRawContent(t *testing.T) {
testCommonResetState() t.Parallel()
cfg, fs := newTestCfg()
fs := hugofs.NewMem()
writeSource(t, fs, filepath.Join("content", "raw.md"), `--- writeSource(t, fs, filepath.Join("content", "raw.md"), `---
title: Raw title: Raw
@ -778,7 +767,7 @@ title: Raw
writeSource(t, fs, filepath.Join("layouts", "_default", "single.html"), `{{ .RawContent }}`) writeSource(t, fs, filepath.Join("layouts", "_default", "single.html"), `{{ .RawContent }}`)
s := buildSingleSite(t, deps.DepsCfg{Fs: fs}, BuildCfg{SkipRender: true}) s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{SkipRender: true})
require.Len(t, s.RegularPages, 1) require.Len(t, s.RegularPages, 1)
p := s.RegularPages[0] p := s.RegularPages[0]
@ -788,7 +777,7 @@ title: Raw
} }
func TestPageWithShortCodeInSummary(t *testing.T) { func TestPageWithShortCodeInSummary(t *testing.T) {
t.Parallel()
assertFunc := func(t *testing.T, ext string, pages Pages) { assertFunc := func(t *testing.T, ext string, pages Pages) {
p := pages[0] p := pages[0]
checkPageTitle(t, p, "Simple") checkPageTitle(t, p, "Simple")
@ -802,7 +791,7 @@ func TestPageWithShortCodeInSummary(t *testing.T) {
} }
func TestPageWithEmbeddedScriptTag(t *testing.T) { func TestPageWithEmbeddedScriptTag(t *testing.T) {
t.Parallel()
assertFunc := func(t *testing.T, ext string, pages Pages) { assertFunc := func(t *testing.T, ext string, pages Pages) {
p := pages[0] p := pages[0]
if ext == "ad" || ext == "rst" { if ext == "ad" || ext == "rst" {
@ -816,12 +805,12 @@ func TestPageWithEmbeddedScriptTag(t *testing.T) {
} }
func TestPageWithAdditionalExtension(t *testing.T) { func TestPageWithAdditionalExtension(t *testing.T) {
t.Parallel()
fs := hugofs.NewMem() cfg, fs := newTestCfg()
writeSource(t, fs, filepath.Join("content", "simple.md"), simplePageWithAdditionalExtension) writeSource(t, fs, filepath.Join("content", "simple.md"), simplePageWithAdditionalExtension)
s := buildSingleSite(t, deps.DepsCfg{Fs: fs}, BuildCfg{SkipRender: true}) s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{SkipRender: true})
require.Len(t, s.RegularPages, 1) require.Len(t, s.RegularPages, 1)
@ -832,11 +821,11 @@ func TestPageWithAdditionalExtension(t *testing.T) {
func TestTableOfContents(t *testing.T) { func TestTableOfContents(t *testing.T) {
fs := hugofs.NewMem() cfg, fs := newTestCfg()
writeSource(t, fs, filepath.Join("content", "tocpage.md"), pageWithToC) writeSource(t, fs, filepath.Join("content", "tocpage.md"), pageWithToC)
s := buildSingleSite(t, deps.DepsCfg{Fs: fs}, BuildCfg{SkipRender: true}) s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{SkipRender: true})
require.Len(t, s.RegularPages, 1) require.Len(t, s.RegularPages, 1)
@ -847,7 +836,7 @@ func TestTableOfContents(t *testing.T) {
} }
func TestPageWithMoreTag(t *testing.T) { func TestPageWithMoreTag(t *testing.T) {
t.Parallel()
assertFunc := func(t *testing.T, ext string, pages Pages) { assertFunc := func(t *testing.T, ext string, pages Pages) {
p := pages[0] p := pages[0]
checkPageTitle(t, p, "Simple") checkPageTitle(t, p, "Simple")
@ -862,11 +851,12 @@ func TestPageWithMoreTag(t *testing.T) {
} }
func TestPageWithDate(t *testing.T) { func TestPageWithDate(t *testing.T) {
fs := hugofs.NewMem() t.Parallel()
cfg, fs := newTestCfg()
writeSource(t, fs, filepath.Join("content", "simple.md"), simplePageRFC3339Date) writeSource(t, fs, filepath.Join("content", "simple.md"), simplePageRFC3339Date)
s := buildSingleSite(t, deps.DepsCfg{Fs: fs}, BuildCfg{SkipRender: true}) s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{SkipRender: true})
require.Len(t, s.RegularPages, 1) require.Len(t, s.RegularPages, 1)
@ -877,8 +867,7 @@ func TestPageWithDate(t *testing.T) {
} }
func TestWordCountWithAllCJKRunesWithoutHasCJKLanguage(t *testing.T) { func TestWordCountWithAllCJKRunesWithoutHasCJKLanguage(t *testing.T) {
testCommonResetState() t.Parallel()
assertFunc := func(t *testing.T, ext string, pages Pages) { assertFunc := func(t *testing.T, ext string, pages Pages) {
p := pages[0] p := pages[0]
if p.WordCount() != 8 { if p.WordCount() != 8 {
@ -890,6 +879,7 @@ func TestWordCountWithAllCJKRunesWithoutHasCJKLanguage(t *testing.T) {
} }
func TestWordCountWithAllCJKRunesHasCJKLanguage(t *testing.T) { func TestWordCountWithAllCJKRunesHasCJKLanguage(t *testing.T) {
t.Parallel()
settings := map[string]interface{}{"hasCJKLanguage": true} settings := map[string]interface{}{"hasCJKLanguage": true}
assertFunc := func(t *testing.T, ext string, pages Pages) { assertFunc := func(t *testing.T, ext string, pages Pages) {
@ -902,6 +892,7 @@ func TestWordCountWithAllCJKRunesHasCJKLanguage(t *testing.T) {
} }
func TestWordCountWithMainEnglishWithCJKRunes(t *testing.T) { func TestWordCountWithMainEnglishWithCJKRunes(t *testing.T) {
t.Parallel()
settings := map[string]interface{}{"hasCJKLanguage": true} settings := map[string]interface{}{"hasCJKLanguage": true}
assertFunc := func(t *testing.T, ext string, pages Pages) { assertFunc := func(t *testing.T, ext string, pages Pages) {
@ -920,8 +911,10 @@ func TestWordCountWithMainEnglishWithCJKRunes(t *testing.T) {
} }
func TestWordCountWithIsCJKLanguageFalse(t *testing.T) { func TestWordCountWithIsCJKLanguageFalse(t *testing.T) {
testCommonResetState() t.Parallel()
viper.Set("hasCJKLanguage", true) settings := map[string]interface{}{
"hasCJKLanguage": true,
}
assertFunc := func(t *testing.T, ext string, pages Pages) { assertFunc := func(t *testing.T, ext string, pages Pages) {
p := pages[0] p := pages[0]
@ -935,12 +928,12 @@ func TestWordCountWithIsCJKLanguageFalse(t *testing.T) {
} }
} }
testAllMarkdownEnginesForPages(t, assertFunc, nil, simplePageWithIsCJKLanguageFalse) testAllMarkdownEnginesForPages(t, assertFunc, settings, simplePageWithIsCJKLanguageFalse)
} }
func TestWordCount(t *testing.T) { func TestWordCount(t *testing.T) {
t.Parallel()
assertFunc := func(t *testing.T, ext string, pages Pages) { assertFunc := func(t *testing.T, ext string, pages Pages) {
p := pages[0] p := pages[0]
if p.WordCount() != 483 { if p.WordCount() != 483 {
@ -962,6 +955,7 @@ func TestWordCount(t *testing.T) {
} }
func TestCreatePage(t *testing.T) { func TestCreatePage(t *testing.T) {
t.Parallel()
var tests = []struct { var tests = []struct {
r string r string
}{ }{
@ -972,7 +966,8 @@ func TestCreatePage(t *testing.T) {
} }
for _, test := range tests { for _, test := range tests {
p, _ := pageTestSite.NewPage("page") s := newTestSite(t)
p, _ := s.NewPage("page")
if _, err := p.ReadFrom(strings.NewReader(test.r)); err != nil { if _, err := p.ReadFrom(strings.NewReader(test.r)); err != nil {
t.Errorf("Unable to parse page: %s", err) t.Errorf("Unable to parse page: %s", err)
} }
@ -980,6 +975,7 @@ func TestCreatePage(t *testing.T) {
} }
func TestDegenerateInvalidFrontMatterShortDelim(t *testing.T) { func TestDegenerateInvalidFrontMatterShortDelim(t *testing.T) {
t.Parallel()
var tests = []struct { var tests = []struct {
r string r string
err string err string
@ -987,14 +983,15 @@ func TestDegenerateInvalidFrontMatterShortDelim(t *testing.T) {
{invalidFrontmatterShortDelimEnding, "unable to read frontmatter at filepos 45: EOF"}, {invalidFrontmatterShortDelimEnding, "unable to read frontmatter at filepos 45: EOF"},
} }
for _, test := range tests { for _, test := range tests {
s := newTestSite(t)
p, _ := pageTestSite.NewPage("invalid/front/matter/short/delim") p, _ := s.NewPage("invalid/front/matter/short/delim")
_, err := p.ReadFrom(strings.NewReader(test.r)) _, err := p.ReadFrom(strings.NewReader(test.r))
checkError(t, err, test.err) checkError(t, err, test.err)
} }
} }
func TestShouldRenderContent(t *testing.T) { func TestShouldRenderContent(t *testing.T) {
t.Parallel()
var tests = []struct { var tests = []struct {
text string text string
render bool render bool
@ -1010,8 +1007,8 @@ func TestShouldRenderContent(t *testing.T) {
} }
for _, test := range tests { for _, test := range tests {
s := newTestSite(t)
p, _ := pageTestSite.NewPage("render/front/matter") p, _ := s.NewPage("render/front/matter")
_, err := p.ReadFrom(strings.NewReader(test.text)) _, err := p.ReadFrom(strings.NewReader(test.text))
p = pageMust(p, err) p = pageMust(p, err)
if p.IsRenderable() != test.render { if p.IsRenderable() != test.render {
@ -1022,13 +1019,15 @@ func TestShouldRenderContent(t *testing.T) {
// Issue #768 // Issue #768
func TestCalendarParamsVariants(t *testing.T) { func TestCalendarParamsVariants(t *testing.T) {
pageJSON, _ := pageTestSite.NewPage("test/fileJSON.md") t.Parallel()
s := newTestSite(t)
pageJSON, _ := s.NewPage("test/fileJSON.md")
_, _ = pageJSON.ReadFrom(strings.NewReader(pageWithCalendarJSONFrontmatter)) _, _ = pageJSON.ReadFrom(strings.NewReader(pageWithCalendarJSONFrontmatter))
pageYAML, _ := pageTestSite.NewPage("test/fileYAML.md") pageYAML, _ := s.NewPage("test/fileYAML.md")
_, _ = pageYAML.ReadFrom(strings.NewReader(pageWithCalendarYAMLFrontmatter)) _, _ = pageYAML.ReadFrom(strings.NewReader(pageWithCalendarYAMLFrontmatter))
pageTOML, _ := pageTestSite.NewPage("test/fileTOML.md") pageTOML, _ := s.NewPage("test/fileTOML.md")
_, _ = pageTOML.ReadFrom(strings.NewReader(pageWithCalendarTOMLFrontmatter)) _, _ = pageTOML.ReadFrom(strings.NewReader(pageWithCalendarTOMLFrontmatter))
assert.True(t, compareObjects(pageJSON.Params, pageYAML.Params)) assert.True(t, compareObjects(pageJSON.Params, pageYAML.Params))
@ -1037,7 +1036,9 @@ func TestCalendarParamsVariants(t *testing.T) {
} }
func TestDifferentFrontMatterVarTypes(t *testing.T) { func TestDifferentFrontMatterVarTypes(t *testing.T) {
page, _ := pageTestSite.NewPage("test/file1.md") t.Parallel()
s := newTestSite(t)
page, _ := s.NewPage("test/file1.md")
_, _ = page.ReadFrom(strings.NewReader(pageWithVariousFrontmatterTypes)) _, _ = page.ReadFrom(strings.NewReader(pageWithVariousFrontmatterTypes))
dateval, _ := time.Parse(time.RFC3339, "1979-05-27T07:32:00Z") dateval, _ := time.Parse(time.RFC3339, "1979-05-27T07:32:00Z")
@ -1066,7 +1067,9 @@ func TestDifferentFrontMatterVarTypes(t *testing.T) {
} }
func TestDegenerateInvalidFrontMatterLeadingWhitespace(t *testing.T) { func TestDegenerateInvalidFrontMatterLeadingWhitespace(t *testing.T) {
p, _ := pageTestSite.NewPage("invalid/front/matter/leading/ws") t.Parallel()
s := newTestSite(t)
p, _ := s.NewPage("invalid/front/matter/leading/ws")
_, err := p.ReadFrom(strings.NewReader(invalidFrontmatterLadingWs)) _, err := p.ReadFrom(strings.NewReader(invalidFrontmatterLadingWs))
if err != nil { if err != nil {
t.Fatalf("Unable to parse front matter given leading whitespace: %s", err) t.Fatalf("Unable to parse front matter given leading whitespace: %s", err)
@ -1074,7 +1077,9 @@ func TestDegenerateInvalidFrontMatterLeadingWhitespace(t *testing.T) {
} }
func TestSectionEvaluation(t *testing.T) { func TestSectionEvaluation(t *testing.T) {
page, _ := pageTestSite.NewPage(filepath.FromSlash("blue/file1.md")) t.Parallel()
s := newTestSite(t)
page, _ := s.NewPage(filepath.FromSlash("blue/file1.md"))
page.ReadFrom(strings.NewReader(simplePage)) page.ReadFrom(strings.NewReader(simplePage))
if page.Section() != "blue" { if page.Section() != "blue" {
t.Errorf("Section should be %s, got: %s", "blue", page.Section()) t.Errorf("Section should be %s, got: %s", "blue", page.Section())
@ -1086,6 +1091,7 @@ func L(s ...string) []string {
} }
func TestLayoutOverride(t *testing.T) { func TestLayoutOverride(t *testing.T) {
t.Parallel()
var ( var (
pathContentTwoDir = filepath.Join("content", "dub", "sub", "file1.md") pathContentTwoDir = filepath.Join("content", "dub", "sub", "file1.md")
pathContentOneDir = filepath.Join("content", "gub", "file1.md") pathContentOneDir = filepath.Join("content", "gub", "file1.md")
@ -1119,7 +1125,8 @@ func TestLayoutOverride(t *testing.T) {
{simplePageTypeLayout, pathNoDirectory, L("barfoo/buzfoo.html", "_default/buzfoo.html")}, {simplePageTypeLayout, pathNoDirectory, L("barfoo/buzfoo.html", "_default/buzfoo.html")},
} }
for _, test := range tests { for _, test := range tests {
p, _ := pageTestSite.NewPage(test.path) s := newTestSite(t)
p, _ := s.NewPage(test.path)
_, err := p.ReadFrom(strings.NewReader(test.content)) _, err := p.ReadFrom(strings.NewReader(test.content))
if err != nil { if err != nil {
t.Fatalf("Unable to parse content:\n%s\n", test.content) t.Fatalf("Unable to parse content:\n%s\n", test.content)
@ -1135,6 +1142,7 @@ func TestLayoutOverride(t *testing.T) {
} }
func TestSliceToLower(t *testing.T) { func TestSliceToLower(t *testing.T) {
t.Parallel()
tests := []struct { tests := []struct {
value []string value []string
expected []string expected []string
@ -1155,10 +1163,9 @@ func TestSliceToLower(t *testing.T) {
} }
func TestPagePaths(t *testing.T) { func TestPagePaths(t *testing.T) {
testCommonResetState() t.Parallel()
viper.Set("defaultExtension", "html") siteParmalinksSetting := map[string]string{
siteParmalinksSetting := PermalinkOverrides{
"post": ":year/:month/:day/:title/", "post": ":year/:month/:day/:title/",
} }
@ -1168,34 +1175,41 @@ func TestPagePaths(t *testing.T) {
hasPermalink bool hasPermalink bool
expected string expected string
}{ }{
{simplePage, "content/post/x.md", false, "content/post/x.html"}, {simplePage, "post/x.md", false, "post/x.html"},
{simplePageWithURL, "content/post/x.md", false, "simple/url/index.html"}, {simplePageWithURL, "post/x.md", false, "simple/url/index.html"},
{simplePageWithSlug, "content/post/x.md", false, "content/post/simple-slug.html"}, {simplePageWithSlug, "post/x.md", false, "post/simple-slug.html"},
{simplePageWithDate, "content/post/x.md", true, "2013/10/15/simple/index.html"}, {simplePageWithDate, "post/x.md", true, "2013/10/15/simple/index.html"},
{UTF8Page, "content/post/x.md", false, "content/post/x.html"}, {UTF8Page, "post/x.md", false, "post/x.html"},
{UTF8PageWithURL, "content/post/x.md", false, "ラーメン/url/index.html"}, {UTF8PageWithURL, "post/x.md", false, "ラーメン/url/index.html"},
{UTF8PageWithSlug, "content/post/x.md", false, "content/post/ラーメン-slug.html"}, {UTF8PageWithSlug, "post/x.md", false, "post/ラーメン-slug.html"},
{UTF8PageWithDate, "content/post/x.md", true, "2013/10/15/ラーメン/index.html"}, {UTF8PageWithDate, "post/x.md", true, "2013/10/15/ラーメン/index.html"},
} }
for _, test := range tests { for i, test := range tests {
p, _ := pageTestSite.NewPageFrom(strings.NewReader(test.content), filepath.FromSlash(test.path)) cfg, fs := newTestCfg()
info := newSiteInfo(siteBuilderCfg{language: helpers.NewDefaultLanguage()})
p.Site = &info cfg.Set("defaultExtension", "html")
if test.hasPermalink { if test.hasPermalink {
p.Site.Permalinks = siteParmalinksSetting cfg.Set("permalinks", siteParmalinksSetting)
} }
writeSource(t, fs, filepath.Join("content", filepath.FromSlash(test.path)), test.content)
s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{SkipRender: true})
require.Len(t, s.RegularPages, 1)
p := s.RegularPages[0]
expectedTargetPath := filepath.FromSlash(test.expected) expectedTargetPath := filepath.FromSlash(test.expected)
expectedFullFilePath := filepath.FromSlash(test.path) expectedFullFilePath := filepath.FromSlash(test.path)
if p.TargetPath() != expectedTargetPath { if p.TargetPath() != expectedTargetPath {
t.Errorf("%s => TargetPath expected: '%s', got: '%s'", test.content, expectedTargetPath, p.TargetPath()) t.Fatalf("[%d] %s => TargetPath expected: '%s', got: '%s'", i, test.content, expectedTargetPath, p.TargetPath())
} }
if p.FullFilePath() != expectedFullFilePath { if p.FullFilePath() != expectedFullFilePath {
t.Errorf("%s => FullFilePath expected: '%s', got: '%s'", test.content, expectedFullFilePath, p.FullFilePath()) t.Fatalf("[%d] %s => FullFilePath expected: '%s', got: '%s'", i, test.content, expectedFullFilePath, p.FullFilePath())
} }
} }
} }
@ -1209,7 +1223,9 @@ some content
` `
func TestDraftAndPublishedFrontMatterError(t *testing.T) { func TestDraftAndPublishedFrontMatterError(t *testing.T) {
_, err := pageTestSite.NewPageFrom(strings.NewReader(pageWithDraftAndPublished), "content/post/broken.md") t.Parallel()
s := newTestSite(t)
_, err := s.NewPageFrom(strings.NewReader(pageWithDraftAndPublished), "content/post/broken.md")
if err != ErrHasDraftAndPublished { if err != ErrHasDraftAndPublished {
t.Errorf("expected ErrHasDraftAndPublished, was %#v", err) t.Errorf("expected ErrHasDraftAndPublished, was %#v", err)
} }
@ -1229,14 +1245,16 @@ some content
` `
func TestPublishedFrontMatter(t *testing.T) { func TestPublishedFrontMatter(t *testing.T) {
p, err := pageTestSite.NewPageFrom(strings.NewReader(pagesWithPublishedFalse), "content/post/broken.md") t.Parallel()
s := newTestSite(t)
p, err := s.NewPageFrom(strings.NewReader(pagesWithPublishedFalse), "content/post/broken.md")
if err != nil { if err != nil {
t.Fatalf("err during parse: %s", err) t.Fatalf("err during parse: %s", err)
} }
if !p.Draft { if !p.Draft {
t.Errorf("expected true, got %t", p.Draft) t.Errorf("expected true, got %t", p.Draft)
} }
p, err = pageTestSite.NewPageFrom(strings.NewReader(pageWithPublishedTrue), "content/post/broken.md") p, err = s.NewPageFrom(strings.NewReader(pageWithPublishedTrue), "content/post/broken.md")
if err != nil { if err != nil {
t.Fatalf("err during parse: %s", err) t.Fatalf("err during parse: %s", err)
} }
@ -1261,10 +1279,12 @@ some content
} }
func TestDraft(t *testing.T) { func TestDraft(t *testing.T) {
t.Parallel()
s := newTestSite(t)
for _, draft := range []bool{true, false} { for _, draft := range []bool{true, false} {
for i, templ := range pagesDraftTemplate { for i, templ := range pagesDraftTemplate {
pageContent := fmt.Sprintf(templ, draft) pageContent := fmt.Sprintf(templ, draft)
p, err := pageTestSite.NewPageFrom(strings.NewReader(pageContent), "content/post/broken.md") p, err := s.NewPageFrom(strings.NewReader(pageContent), "content/post/broken.md")
if err != nil { if err != nil {
t.Fatalf("err during parse: %s", err) t.Fatalf("err during parse: %s", err)
} }
@ -1314,6 +1334,8 @@ some content
} }
func TestPageParams(t *testing.T) { func TestPageParams(t *testing.T) {
t.Parallel()
s := newTestSite(t)
want := map[string]interface{}{ want := map[string]interface{}{
"tags": []string{"hugo", "web"}, "tags": []string{"hugo", "web"},
// Issue #2752 // Issue #2752
@ -1324,13 +1346,15 @@ func TestPageParams(t *testing.T) {
} }
for i, c := range pagesParamsTemplate { for i, c := range pagesParamsTemplate {
p, err := pageTestSite.NewPageFrom(strings.NewReader(c), "content/post/params.md") p, err := s.NewPageFrom(strings.NewReader(c), "content/post/params.md")
require.NoError(t, err, "err during parse", "#%d", i) require.NoError(t, err, "err during parse", "#%d", i)
assert.Equal(t, want, p.Params, "#%d", i) assert.Equal(t, want, p.Params, "#%d", i)
} }
} }
func TestPageSimpleMethods(t *testing.T) { func TestPageSimpleMethods(t *testing.T) {
t.Parallel()
s := newTestSite(t)
for i, this := range []struct { for i, this := range []struct {
assertFunc func(p *Page) bool assertFunc func(p *Page) bool
}{ }{
@ -1340,7 +1364,7 @@ func TestPageSimpleMethods(t *testing.T) {
{func(p *Page) bool { return strings.Join(p.PlainWords(), " ") == "Do Be Do Be Do" }}, {func(p *Page) bool { return strings.Join(p.PlainWords(), " ") == "Do Be Do Be Do" }},
} { } {
p, _ := pageTestSite.NewPage("Test") p, _ := s.NewPage("Test")
p.Content = "<h1>Do Be Do Be Do</h1>" p.Content = "<h1>Do Be Do Be Do</h1>"
if !this.assertFunc(p) { if !this.assertFunc(p) {
t.Errorf("[%d] Page method error", i) t.Errorf("[%d] Page method error", i)
@ -1349,6 +1373,7 @@ func TestPageSimpleMethods(t *testing.T) {
} }
func TestIndexPageSimpleMethods(t *testing.T) { func TestIndexPageSimpleMethods(t *testing.T) {
t.Parallel()
for i, this := range []struct { for i, this := range []struct {
assertFunc func(n *Page) bool assertFunc func(n *Page) bool
}{ }{
@ -1371,7 +1396,7 @@ func TestIndexPageSimpleMethods(t *testing.T) {
} }
func TestKind(t *testing.T) { func TestKind(t *testing.T) {
t.Parallel()
// Add tests for these constants to make sure they don't change // Add tests for these constants to make sure they don't change
require.Equal(t, "page", KindPage) require.Equal(t, "page", KindPage)
require.Equal(t, "home", KindHome) require.Equal(t, "home", KindHome)
@ -1382,13 +1407,14 @@ func TestKind(t *testing.T) {
} }
func TestChompBOM(t *testing.T) { func TestChompBOM(t *testing.T) {
t.Parallel()
const utf8BOM = "\xef\xbb\xbf" const utf8BOM = "\xef\xbb\xbf"
fs := hugofs.NewMem() cfg, fs := newTestCfg()
writeSource(t, fs, filepath.Join("content", "simple.md"), utf8BOM+simplePage) writeSource(t, fs, filepath.Join("content", "simple.md"), utf8BOM+simplePage)
s := buildSingleSite(t, deps.DepsCfg{Fs: fs}, BuildCfg{SkipRender: true}) s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{SkipRender: true})
require.Len(t, s.RegularPages, 1) require.Len(t, s.RegularPages, 1)
@ -1423,6 +1449,7 @@ func compareObjects(a interface{}, b interface{}) bool {
} }
func TestShouldBuild(t *testing.T) { func TestShouldBuild(t *testing.T) {
t.Parallel()
var past = time.Date(2009, 11, 17, 20, 34, 58, 651387237, time.UTC) var past = time.Date(2009, 11, 17, 20, 34, 58, 651387237, time.UTC)
var future = time.Date(2037, 11, 17, 20, 34, 58, 651387237, time.UTC) var future = time.Date(2037, 11, 17, 20, 34, 58, 651387237, time.UTC)
var zero = time.Time{} var zero = time.Time{}
@ -1469,12 +1496,13 @@ func TestShouldBuild(t *testing.T) {
} }
func BenchmarkParsePage(b *testing.B) { func BenchmarkParsePage(b *testing.B) {
s := newTestSite(b)
f, _ := os.Open("testdata/redis.cn.md") f, _ := os.Open("testdata/redis.cn.md")
var buf bytes.Buffer var buf bytes.Buffer
buf.ReadFrom(f) buf.ReadFrom(f)
b.ResetTimer() b.ResetTimer()
for i := 0; i < b.N; i++ { for i := 0; i < b.N; i++ {
page, _ := pageTestSite.NewPage("bench") page, _ := s.NewPage("bench")
page.ReadFrom(bytes.NewReader(buf.Bytes())) page.ReadFrom(bytes.NewReader(buf.Bytes()))
} }
} }

View file

@ -89,13 +89,17 @@ Page With Date HugoLong`
) )
func TestDegenerateDateFrontMatter(t *testing.T) { func TestDegenerateDateFrontMatter(t *testing.T) {
p, _ := pageTestSite.NewPageFrom(strings.NewReader(pageWithInvalidDate), "page/with/invalid/date") t.Parallel()
s := newTestSite(t)
p, _ := s.NewPageFrom(strings.NewReader(pageWithInvalidDate), "page/with/invalid/date")
if p.Date != *new(time.Time) { if p.Date != *new(time.Time) {
t.Fatalf("Date should be set to time.Time zero value. Got: %s", p.Date) t.Fatalf("Date should be set to time.Time zero value. Got: %s", p.Date)
} }
} }
func TestParsingDateInFrontMatter(t *testing.T) { func TestParsingDateInFrontMatter(t *testing.T) {
t.Parallel()
s := newTestSite(t)
tests := []struct { tests := []struct {
buf string buf string
dt string dt string
@ -131,7 +135,7 @@ func TestParsingDateInFrontMatter(t *testing.T) {
if e != nil { if e != nil {
t.Fatalf("Unable to parse date time (RFC3339) for running the test: %s", e) t.Fatalf("Unable to parse date time (RFC3339) for running the test: %s", e)
} }
p, err := pageTestSite.NewPageFrom(strings.NewReader(test.buf), "page/with/date") p, err := s.NewPageFrom(strings.NewReader(test.buf), "page/with/date")
if err != nil { if err != nil {
t.Fatalf("Expected to be able to parse page.") t.Fatalf("Expected to be able to parse page.")
} }

View file

@ -35,6 +35,7 @@ var pagePNTestSources = []pagePNTestObject{
} }
func TestPrev(t *testing.T) { func TestPrev(t *testing.T) {
t.Parallel()
pages := preparePageGroupTestPages(t) pages := preparePageGroupTestPages(t)
assert.Equal(t, pages.Prev(pages[0]), pages[4]) assert.Equal(t, pages.Prev(pages[0]), pages[4])
assert.Equal(t, pages.Prev(pages[1]), pages[0]) assert.Equal(t, pages.Prev(pages[1]), pages[0])
@ -42,6 +43,7 @@ func TestPrev(t *testing.T) {
} }
func TestNext(t *testing.T) { func TestNext(t *testing.T) {
t.Parallel()
pages := preparePageGroupTestPages(t) pages := preparePageGroupTestPages(t)
assert.Equal(t, pages.Next(pages[0]), pages[1]) assert.Equal(t, pages.Next(pages[0]), pages[1])
assert.Equal(t, pages.Next(pages[1]), pages[2]) assert.Equal(t, pages.Next(pages[1]), pages[2])
@ -49,16 +51,17 @@ func TestNext(t *testing.T) {
} }
func prepareWeightedPagesPrevNext(t *testing.T) WeightedPages { func prepareWeightedPagesPrevNext(t *testing.T) WeightedPages {
s := newTestSite(t)
w := WeightedPages{} w := WeightedPages{}
for _, s := range pagePNTestSources { for _, src := range pagePNTestSources {
p, err := pageTestSite.NewPage(s.path) p, err := s.NewPage(src.path)
if err != nil { if err != nil {
t.Fatalf("failed to prepare test page %s", s.path) t.Fatalf("failed to prepare test page %s", src.path)
} }
p.Weight = s.weight p.Weight = src.weight
p.Date = cast.ToTime(s.date) p.Date = cast.ToTime(src.date)
p.PublishDate = cast.ToTime(s.date) p.PublishDate = cast.ToTime(src.date)
w = append(w, WeightedPage{p.Weight, p}) w = append(w, WeightedPage{p.Weight, p})
} }
@ -67,6 +70,7 @@ func prepareWeightedPagesPrevNext(t *testing.T) WeightedPages {
} }
func TestWeightedPagesPrev(t *testing.T) { func TestWeightedPagesPrev(t *testing.T) {
t.Parallel()
w := prepareWeightedPagesPrevNext(t) w := prepareWeightedPagesPrevNext(t)
assert.Equal(t, w.Prev(w[0].Page), w[4].Page) assert.Equal(t, w.Prev(w[0].Page), w[4].Page)
assert.Equal(t, w.Prev(w[1].Page), w[0].Page) assert.Equal(t, w.Prev(w[1].Page), w[0].Page)
@ -74,6 +78,7 @@ func TestWeightedPagesPrev(t *testing.T) {
} }
func TestWeightedPagesNext(t *testing.T) { func TestWeightedPagesNext(t *testing.T) {
t.Parallel()
w := prepareWeightedPagesPrevNext(t) w := prepareWeightedPagesPrevNext(t)
assert.Equal(t, w.Next(w[0].Page), w[1].Page) assert.Equal(t, w.Next(w[0].Page), w[1].Page)
assert.Equal(t, w.Next(w[1].Page), w[2].Page) assert.Equal(t, w.Next(w[1].Page), w[2].Page)

View file

@ -21,6 +21,8 @@ import (
"path" "path"
"reflect" "reflect"
"github.com/spf13/hugo/config"
"github.com/spf13/cast" "github.com/spf13/cast"
"github.com/spf13/hugo/helpers" "github.com/spf13/hugo/helpers"
) )
@ -266,7 +268,7 @@ func (p *Page) Paginator(options ...interface{}) (*Pager, error) {
if !p.IsNode() { if !p.IsNode() {
return nil, fmt.Errorf("Paginators not supported for pages of type %q (%q)", p.Kind, p.Title) return nil, fmt.Errorf("Paginators not supported for pages of type %q (%q)", p.Kind, p.Title)
} }
pagerSize, err := resolvePagerSize(options...) pagerSize, err := resolvePagerSize(p.s.Cfg, options...)
if err != nil { if err != nil {
return nil, err return nil, err
@ -310,7 +312,7 @@ func (p *Page) Paginate(seq interface{}, options ...interface{}) (*Pager, error)
return nil, fmt.Errorf("Paginators not supported for pages of type %q (%q)", p.Kind, p.Title) return nil, fmt.Errorf("Paginators not supported for pages of type %q (%q)", p.Kind, p.Title)
} }
pagerSize, err := resolvePagerSize(options...) pagerSize, err := resolvePagerSize(p.s.Cfg, options...)
if err != nil { if err != nil {
return nil, err return nil, err
@ -353,9 +355,9 @@ func (p *Page) Paginate(seq interface{}, options ...interface{}) (*Pager, error)
return p.paginator, nil return p.paginator, nil
} }
func resolvePagerSize(options ...interface{}) (int, error) { func resolvePagerSize(cfg config.Provider, options ...interface{}) (int, error) {
if len(options) == 0 { if len(options) == 0 {
return helpers.Config().GetInt("paginate"), nil return cfg.GetInt("paginate"), nil
} }
if len(options) > 1 { if len(options) > 1 {

View file

@ -20,46 +20,44 @@ import (
"testing" "testing"
"github.com/spf13/hugo/deps" "github.com/spf13/hugo/deps"
"github.com/spf13/hugo/helpers"
"github.com/spf13/hugo/hugofs"
"github.com/spf13/hugo/source"
"github.com/spf13/viper"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
func TestSplitPages(t *testing.T) { func TestSplitPages(t *testing.T) {
t.Parallel()
s := newTestSite(t)
pages := createTestPages(21) pages := createTestPages(s, 21)
chunks := splitPages(pages, 5) chunks := splitPages(pages, 5)
assert.Equal(t, 5, len(chunks)) require.Equal(t, 5, len(chunks))
for i := 0; i < 4; i++ { for i := 0; i < 4; i++ {
assert.Equal(t, 5, chunks[i].Len()) require.Equal(t, 5, chunks[i].Len())
} }
lastChunk := chunks[4] lastChunk := chunks[4]
assert.Equal(t, 1, lastChunk.Len()) require.Equal(t, 1, lastChunk.Len())
} }
func TestSplitPageGroups(t *testing.T) { func TestSplitPageGroups(t *testing.T) {
t.Parallel()
pages := createTestPages(21) s := newTestSite(t)
pages := createTestPages(s, 21)
groups, _ := pages.GroupBy("Weight", "desc") groups, _ := pages.GroupBy("Weight", "desc")
chunks := splitPageGroups(groups, 5) chunks := splitPageGroups(groups, 5)
assert.Equal(t, 5, len(chunks)) require.Equal(t, 5, len(chunks))
firstChunk := chunks[0] firstChunk := chunks[0]
// alternate weight 5 and 10 // alternate weight 5 and 10
if groups, ok := firstChunk.(PagesGroup); ok { if groups, ok := firstChunk.(PagesGroup); ok {
assert.Equal(t, 5, groups.Len()) require.Equal(t, 5, groups.Len())
for _, pg := range groups { for _, pg := range groups {
// first group 10 in weight // first group 10 in weight
assert.Equal(t, 10, pg.Key) require.Equal(t, 10, pg.Key)
for _, p := range pg.Pages { for _, p := range pg.Pages {
assert.True(t, p.fuzzyWordCount%2 == 0) // magic test require.True(t, p.fuzzyWordCount%2 == 0) // magic test
} }
} }
} else { } else {
@ -69,12 +67,12 @@ func TestSplitPageGroups(t *testing.T) {
lastChunk := chunks[4] lastChunk := chunks[4]
if groups, ok := lastChunk.(PagesGroup); ok { if groups, ok := lastChunk.(PagesGroup); ok {
assert.Equal(t, 1, groups.Len()) require.Equal(t, 1, groups.Len())
for _, pg := range groups { for _, pg := range groups {
// last should have 5 in weight // last should have 5 in weight
assert.Equal(t, 5, pg.Key) require.Equal(t, 5, pg.Key)
for _, p := range pg.Pages { for _, p := range pg.Pages {
assert.True(t, p.fuzzyWordCount%2 != 0) // magic test require.True(t, p.fuzzyWordCount%2 != 0) // magic test
} }
} }
} else { } else {
@ -84,7 +82,9 @@ func TestSplitPageGroups(t *testing.T) {
} }
func TestPager(t *testing.T) { func TestPager(t *testing.T) {
pages := createTestPages(21) t.Parallel()
s := newTestSite(t)
pages := createTestPages(s, 21)
groups, _ := pages.GroupBy("Weight", "desc") groups, _ := pages.GroupBy("Weight", "desc")
urlFactory := func(page int) string { urlFactory := func(page int) string {
@ -92,25 +92,25 @@ func TestPager(t *testing.T) {
} }
_, err := newPaginatorFromPages(pages, -1, urlFactory) _, err := newPaginatorFromPages(pages, -1, urlFactory)
assert.NotNil(t, err) require.NotNil(t, err)
_, err = newPaginatorFromPageGroups(groups, -1, urlFactory) _, err = newPaginatorFromPageGroups(groups, -1, urlFactory)
assert.NotNil(t, err) require.NotNil(t, err)
pag, err := newPaginatorFromPages(pages, 5, urlFactory) pag, err := newPaginatorFromPages(pages, 5, urlFactory)
assert.Nil(t, err) require.Nil(t, err)
doTestPages(t, pag) doTestPages(t, pag)
first := pag.Pagers()[0].First() first := pag.Pagers()[0].First()
assert.Equal(t, "Pager 1", first.String()) require.Equal(t, "Pager 1", first.String())
assert.NotEmpty(t, first.Pages()) require.NotEmpty(t, first.Pages())
assert.Empty(t, first.PageGroups()) require.Empty(t, first.PageGroups())
pag, err = newPaginatorFromPageGroups(groups, 5, urlFactory) pag, err = newPaginatorFromPageGroups(groups, 5, urlFactory)
assert.Nil(t, err) require.Nil(t, err)
doTestPages(t, pag) doTestPages(t, pag)
first = pag.Pagers()[0].First() first = pag.Pagers()[0].First()
assert.NotEmpty(t, first.PageGroups()) require.NotEmpty(t, first.PageGroups())
assert.Empty(t, first.Pages()) require.Empty(t, first.Pages())
} }
@ -118,38 +118,40 @@ func doTestPages(t *testing.T, paginator *paginator) {
paginatorPages := paginator.Pagers() paginatorPages := paginator.Pagers()
assert.Equal(t, 5, len(paginatorPages)) require.Equal(t, 5, len(paginatorPages))
assert.Equal(t, 21, paginator.TotalNumberOfElements()) require.Equal(t, 21, paginator.TotalNumberOfElements())
assert.Equal(t, 5, paginator.PageSize()) require.Equal(t, 5, paginator.PageSize())
assert.Equal(t, 5, paginator.TotalPages()) require.Equal(t, 5, paginator.TotalPages())
first := paginatorPages[0] first := paginatorPages[0]
assert.Equal(t, template.HTML("page/1/"), first.URL()) require.Equal(t, template.HTML("page/1/"), first.URL())
assert.Equal(t, first, first.First()) require.Equal(t, first, first.First())
assert.True(t, first.HasNext()) require.True(t, first.HasNext())
assert.Equal(t, paginatorPages[1], first.Next()) require.Equal(t, paginatorPages[1], first.Next())
assert.False(t, first.HasPrev()) require.False(t, first.HasPrev())
assert.Nil(t, first.Prev()) require.Nil(t, first.Prev())
assert.Equal(t, 5, first.NumberOfElements()) require.Equal(t, 5, first.NumberOfElements())
assert.Equal(t, 1, first.PageNumber()) require.Equal(t, 1, first.PageNumber())
third := paginatorPages[2] third := paginatorPages[2]
assert.True(t, third.HasNext()) require.True(t, third.HasNext())
assert.True(t, third.HasPrev()) require.True(t, third.HasPrev())
assert.Equal(t, paginatorPages[1], third.Prev()) require.Equal(t, paginatorPages[1], third.Prev())
last := paginatorPages[4] last := paginatorPages[4]
assert.Equal(t, template.HTML("page/5/"), last.URL()) require.Equal(t, template.HTML("page/5/"), last.URL())
assert.Equal(t, last, last.Last()) require.Equal(t, last, last.Last())
assert.False(t, last.HasNext()) require.False(t, last.HasNext())
assert.Nil(t, last.Next()) require.Nil(t, last.Next())
assert.True(t, last.HasPrev()) require.True(t, last.HasPrev())
assert.Equal(t, 1, last.NumberOfElements()) require.Equal(t, 1, last.NumberOfElements())
assert.Equal(t, 5, last.PageNumber()) require.Equal(t, 5, last.PageNumber())
} }
func TestPagerNoPages(t *testing.T) { func TestPagerNoPages(t *testing.T) {
pages := createTestPages(0) t.Parallel()
s := newTestSite(t)
pages := createTestPages(s, 0)
groups, _ := pages.GroupBy("Weight", "desc") groups, _ := pages.GroupBy("Weight", "desc")
urlFactory := func(page int) string { urlFactory := func(page int) string {
@ -160,54 +162,55 @@ func TestPagerNoPages(t *testing.T) {
doTestPagerNoPages(t, paginator) doTestPagerNoPages(t, paginator)
first := paginator.Pagers()[0].First() first := paginator.Pagers()[0].First()
assert.Empty(t, first.PageGroups()) require.Empty(t, first.PageGroups())
assert.Empty(t, first.Pages()) require.Empty(t, first.Pages())
paginator, _ = newPaginatorFromPageGroups(groups, 5, urlFactory) paginator, _ = newPaginatorFromPageGroups(groups, 5, urlFactory)
doTestPagerNoPages(t, paginator) doTestPagerNoPages(t, paginator)
first = paginator.Pagers()[0].First() first = paginator.Pagers()[0].First()
assert.Empty(t, first.PageGroups()) require.Empty(t, first.PageGroups())
assert.Empty(t, first.Pages()) require.Empty(t, first.Pages())
} }
func doTestPagerNoPages(t *testing.T, paginator *paginator) { func doTestPagerNoPages(t *testing.T, paginator *paginator) {
paginatorPages := paginator.Pagers() paginatorPages := paginator.Pagers()
assert.Equal(t, 1, len(paginatorPages)) require.Equal(t, 1, len(paginatorPages))
assert.Equal(t, 0, paginator.TotalNumberOfElements()) require.Equal(t, 0, paginator.TotalNumberOfElements())
assert.Equal(t, 5, paginator.PageSize()) require.Equal(t, 5, paginator.PageSize())
assert.Equal(t, 0, paginator.TotalPages()) require.Equal(t, 0, paginator.TotalPages())
// pageOne should be nothing but the first // pageOne should be nothing but the first
pageOne := paginatorPages[0] pageOne := paginatorPages[0]
assert.NotNil(t, pageOne.First()) require.NotNil(t, pageOne.First())
assert.False(t, pageOne.HasNext()) require.False(t, pageOne.HasNext())
assert.False(t, pageOne.HasPrev()) require.False(t, pageOne.HasPrev())
assert.Nil(t, pageOne.Next()) require.Nil(t, pageOne.Next())
assert.Equal(t, 1, len(pageOne.Pagers())) require.Equal(t, 1, len(pageOne.Pagers()))
assert.Equal(t, 0, pageOne.Pages().Len()) require.Equal(t, 0, pageOne.Pages().Len())
assert.Equal(t, 0, pageOne.NumberOfElements()) require.Equal(t, 0, pageOne.NumberOfElements())
assert.Equal(t, 0, pageOne.TotalNumberOfElements()) require.Equal(t, 0, pageOne.TotalNumberOfElements())
assert.Equal(t, 0, pageOne.TotalPages()) require.Equal(t, 0, pageOne.TotalPages())
assert.Equal(t, 1, pageOne.PageNumber()) require.Equal(t, 1, pageOne.PageNumber())
assert.Equal(t, 5, pageOne.PageSize()) require.Equal(t, 5, pageOne.PageSize())
} }
func TestPaginationURLFactory(t *testing.T) { func TestPaginationURLFactory(t *testing.T) {
testCommonResetState() t.Parallel()
cfg, fs := newTestCfg()
viper.Set("paginatePath", "zoo") cfg.Set("paginatePath", "zoo")
pathSpec := newTestPathSpec() pathSpec := newTestPathSpec(fs, cfg)
unicode := newPaginationURLFactory(pathSpec, "новости проекта") unicode := newPaginationURLFactory(pathSpec, "новости проекта")
fooBar := newPaginationURLFactory(pathSpec, "foo", "bar") fooBar := newPaginationURLFactory(pathSpec, "foo", "bar")
assert.Equal(t, "/foo/bar/", fooBar(1)) require.Equal(t, "/foo/bar/", fooBar(1))
assert.Equal(t, "/%D0%BD%D0%BE%D0%B2%D0%BE%D1%81%D1%82%D0%B8-%D0%BF%D1%80%D0%BE%D0%B5%D0%BA%D1%82%D0%B0/zoo/4/", unicode(4)) require.Equal(t, "/%D0%BD%D0%BE%D0%B2%D0%BE%D1%81%D1%82%D0%B8-%D0%BF%D1%80%D0%BE%D0%B5%D0%BA%D1%82%D0%B0/zoo/4/", unicode(4))
unicoded := unicode(4) unicoded := unicode(4)
unicodedExpected := "/%D0%BD%D0%BE%D0%B2%D0%BE%D1%81%D1%82%D0%B8-%D0%BF%D1%80%D0%BE%D0%B5%D0%BA%D1%82%D0%B0/zoo/4/" unicodedExpected := "/%D0%BD%D0%BE%D0%B2%D0%BE%D1%81%D1%82%D0%B8-%D0%BF%D1%80%D0%BE%D0%B5%D0%BA%D1%82%D0%B0/zoo/4/"
@ -216,30 +219,32 @@ func TestPaginationURLFactory(t *testing.T) {
t.Fatal("Expected\n", unicodedExpected, "\nGot\n", unicoded) t.Fatal("Expected\n", unicodedExpected, "\nGot\n", unicoded)
} }
assert.Equal(t, "/foo/bar/zoo/12345/", fooBar(12345)) require.Equal(t, "/foo/bar/zoo/12345/", fooBar(12345))
} }
func TestPaginator(t *testing.T) { func TestPaginator(t *testing.T) {
testCommonResetState() t.Parallel()
for _, useViper := range []bool{false, true} { for _, useViper := range []bool{false, true} {
doTestPaginator(t, useViper) doTestPaginator(t, useViper)
} }
} }
func doTestPaginator(t *testing.T, useViper bool) { func doTestPaginator(t *testing.T, useViper bool) {
testCommonResetState()
cfg, fs := newTestCfg()
pagerSize := 5 pagerSize := 5
if useViper { if useViper {
viper.Set("paginate", pagerSize) cfg.Set("paginate", pagerSize)
} else { } else {
viper.Set("paginate", -1) cfg.Set("paginate", -1)
} }
pages := createTestPages(12)
s, err := NewSiteDefaultLang() s, err := NewSiteForCfg(deps.DepsCfg{Cfg: cfg, Fs: fs})
require.NoError(t, err) require.NoError(t, err)
pages := createTestPages(s, 12)
n1 := s.newHomePage() n1 := s.newHomePage()
n2 := s.newHomePage() n2 := s.newHomePage()
n1.Data["Pages"] = pages n1.Data["Pages"] = pages
@ -252,50 +257,45 @@ func doTestPaginator(t *testing.T, useViper bool) {
paginator1, err = n1.Paginator(pagerSize) paginator1, err = n1.Paginator(pagerSize)
} }
assert.Nil(t, err) require.Nil(t, err)
assert.NotNil(t, paginator1) require.NotNil(t, paginator1)
assert.Equal(t, 3, paginator1.TotalPages()) require.Equal(t, 3, paginator1.TotalPages())
assert.Equal(t, 12, paginator1.TotalNumberOfElements()) require.Equal(t, 12, paginator1.TotalNumberOfElements())
n2.paginator = paginator1.Next() n2.paginator = paginator1.Next()
paginator2, err := n2.Paginator() paginator2, err := n2.Paginator()
assert.Nil(t, err) require.Nil(t, err)
assert.Equal(t, paginator2, paginator1.Next()) require.Equal(t, paginator2, paginator1.Next())
n1.Data["Pages"] = createTestPages(1) n1.Data["Pages"] = createTestPages(s, 1)
samePaginator, _ := n1.Paginator() samePaginator, _ := n1.Paginator()
assert.Equal(t, paginator1, samePaginator) require.Equal(t, paginator1, samePaginator)
p, _ := pageTestSite.NewPage("test") p, _ := s.NewPage("test")
_, err = p.Paginator() _, err = p.Paginator()
assert.NotNil(t, err) require.NotNil(t, err)
} }
func TestPaginatorWithNegativePaginate(t *testing.T) { func TestPaginatorWithNegativePaginate(t *testing.T) {
testCommonResetState() t.Parallel()
s := newTestSite(t, "paginate", -1)
viper.Set("paginate", -1) _, err := s.newHomePage().Paginator()
s, err := NewSiteDefaultLang()
require.NoError(t, err)
_, err = s.newHomePage().Paginator()
require.Error(t, err) require.Error(t, err)
} }
func TestPaginate(t *testing.T) { func TestPaginate(t *testing.T) {
testCommonResetState() t.Parallel()
for _, useViper := range []bool{false, true} { for _, useViper := range []bool{false, true} {
doTestPaginate(t, useViper) doTestPaginate(t, useViper)
} }
} }
func TestPaginatorURL(t *testing.T) { func TestPaginatorURL(t *testing.T) {
t.Parallel()
cfg, fs := newTestCfg()
testCommonResetState() cfg.Set("paginate", 2)
viper.Set("paginate", 2) cfg.Set("paginatePath", "testing")
viper.Set("paginatePath", "testing")
fs := hugofs.NewMem()
for i := 0; i < 10; i++ { for i := 0; i < 10; i++ {
// Issue #2177, do not double encode URLs // Issue #2177, do not double encode URLs
@ -318,23 +318,29 @@ Pages: {{ .Paginator.TotalPages }}
{{ end }} {{ end }}
</body></html>`) </body></html>`)
buildSingleSite(t, deps.DepsCfg{Fs: fs}, BuildCfg{}) s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{})
assertFileContent(t, fs, filepath.Join("public", "阅读", "testing", "2", "index.html"), false, "2: /%E9%98%85%E8%AF%BB/testing/2/") th := testHelper{s.Cfg}
th.assertFileContent(t, fs, filepath.Join("public", "阅读", "testing", "2", "index.html"), false, "2: /%E9%98%85%E8%AF%BB/testing/2/")
} }
func doTestPaginate(t *testing.T, useViper bool) { func doTestPaginate(t *testing.T, useViper bool) {
pagerSize := 5 pagerSize := 5
var (
s *Site
err error
)
if useViper { if useViper {
viper.Set("paginate", pagerSize) s = newTestSite(t, "paginate", pagerSize)
} else { } else {
viper.Set("paginate", -1) s = newTestSite(t, "paginate", -1)
} }
pages := createTestPages(6) pages := createTestPages(s, 6)
s, err := NewSiteDefaultLang()
require.NoError(t, err)
n1 := s.newHomePage() n1 := s.newHomePage()
n2 := s.newHomePage() n2 := s.newHomePage()
@ -346,10 +352,10 @@ func doTestPaginate(t *testing.T, useViper bool) {
paginator1, err = n1.Paginate(pages, pagerSize) paginator1, err = n1.Paginate(pages, pagerSize)
} }
assert.Nil(t, err) require.Nil(t, err)
assert.NotNil(t, paginator1) require.NotNil(t, paginator1)
assert.Equal(t, 2, paginator1.TotalPages()) require.Equal(t, 2, paginator1.TotalPages())
assert.Equal(t, 6, paginator1.TotalNumberOfElements()) require.Equal(t, 6, paginator1.TotalNumberOfElements())
n2.paginator = paginator1.Next() n2.paginator = paginator1.Next()
if useViper { if useViper {
@ -357,101 +363,102 @@ func doTestPaginate(t *testing.T, useViper bool) {
} else { } else {
paginator2, err = n2.Paginate(pages, pagerSize) paginator2, err = n2.Paginate(pages, pagerSize)
} }
assert.Nil(t, err) require.Nil(t, err)
assert.Equal(t, paginator2, paginator1.Next()) require.Equal(t, paginator2, paginator1.Next())
p, _ := pageTestSite.NewPage("test") p, _ := s.NewPage("test")
_, err = p.Paginate(pages) _, err = p.Paginate(pages)
assert.NotNil(t, err) require.NotNil(t, err)
} }
func TestInvalidOptions(t *testing.T) { func TestInvalidOptions(t *testing.T) {
s, err := NewSiteDefaultLang() t.Parallel()
require.NoError(t, err) s := newTestSite(t)
n1 := s.newHomePage() n1 := s.newHomePage()
_, err = n1.Paginate(createTestPages(1), 1, 2) _, err := n1.Paginate(createTestPages(s, 1), 1, 2)
assert.NotNil(t, err) require.NotNil(t, err)
_, err = n1.Paginator(1, 2) _, err = n1.Paginator(1, 2)
assert.NotNil(t, err) require.NotNil(t, err)
_, err = n1.Paginator(-1) _, err = n1.Paginator(-1)
assert.NotNil(t, err) require.NotNil(t, err)
} }
func TestPaginateWithNegativePaginate(t *testing.T) { func TestPaginateWithNegativePaginate(t *testing.T) {
testCommonResetState() t.Parallel()
cfg, fs := newTestCfg()
cfg.Set("paginate", -1)
viper.Set("paginate", -1) s, err := NewSiteForCfg(deps.DepsCfg{Cfg: cfg, Fs: fs})
s, err := NewSiteDefaultLang()
require.NoError(t, err) require.NoError(t, err)
_, err = s.newHomePage().Paginate(createTestPages(2))
assert.NotNil(t, err) _, err = s.newHomePage().Paginate(createTestPages(s, 2))
require.NotNil(t, err)
} }
func TestPaginatePages(t *testing.T) { func TestPaginatePages(t *testing.T) {
groups, _ := createTestPages(31).GroupBy("Weight", "desc") t.Parallel()
pathSpec := newTestPathSpec() s := newTestSite(t)
for i, seq := range []interface{}{createTestPages(11), groups, WeightedPages{}, PageGroup{}, &Pages{}} { groups, _ := createTestPages(s, 31).GroupBy("Weight", "desc")
v, err := paginatePages(pathSpec, seq, 11, "t")
assert.NotNil(t, v, "Val %d", i) for i, seq := range []interface{}{createTestPages(s, 11), groups, WeightedPages{}, PageGroup{}, &Pages{}} {
assert.Nil(t, err, "Err %d", i) v, err := paginatePages(s.PathSpec, seq, 11, "t")
require.NotNil(t, v, "Val %d", i)
require.Nil(t, err, "Err %d", i)
} }
_, err := paginatePages(pathSpec, Site{}, 11, "t") _, err := paginatePages(s.PathSpec, Site{}, 11, "t")
assert.NotNil(t, err) require.NotNil(t, err)
} }
// Issue #993 // Issue #993
func TestPaginatorFollowedByPaginateShouldFail(t *testing.T) { func TestPaginatorFollowedByPaginateShouldFail(t *testing.T) {
testCommonResetState() t.Parallel()
s := newTestSite(t, "paginate", 10)
viper.Set("paginate", 10)
s, err := NewSiteDefaultLang()
require.NoError(t, err)
n1 := s.newHomePage() n1 := s.newHomePage()
n2 := s.newHomePage() n2 := s.newHomePage()
_, err = n1.Paginator() _, err := n1.Paginator()
assert.Nil(t, err) require.Nil(t, err)
_, err = n1.Paginate(createTestPages(2)) _, err = n1.Paginate(createTestPages(s, 2))
assert.NotNil(t, err) require.NotNil(t, err)
_, err = n2.Paginate(createTestPages(2)) _, err = n2.Paginate(createTestPages(s, 2))
assert.Nil(t, err) require.Nil(t, err)
} }
func TestPaginateFollowedByDifferentPaginateShouldFail(t *testing.T) { func TestPaginateFollowedByDifferentPaginateShouldFail(t *testing.T) {
testCommonResetState() t.Parallel()
s := newTestSite(t, "paginate", 10)
viper.Set("paginate", 10)
s, err := NewSiteDefaultLang()
require.NoError(t, err)
n1 := s.newHomePage() n1 := s.newHomePage()
n2 := s.newHomePage() n2 := s.newHomePage()
p1 := createTestPages(2) p1 := createTestPages(s, 2)
p2 := createTestPages(10) p2 := createTestPages(s, 10)
_, err := n1.Paginate(p1)
require.Nil(t, err)
_, err = n1.Paginate(p1) _, err = n1.Paginate(p1)
assert.Nil(t, err) require.Nil(t, err)
_, err = n1.Paginate(p1)
assert.Nil(t, err)
_, err = n1.Paginate(p2) _, err = n1.Paginate(p2)
assert.NotNil(t, err) require.NotNil(t, err)
_, err = n2.Paginate(p2) _, err = n2.Paginate(p2)
assert.Nil(t, err) require.Nil(t, err)
} }
func TestProbablyEqualPageLists(t *testing.T) { func TestProbablyEqualPageLists(t *testing.T) {
fivePages := createTestPages(5) t.Parallel()
zeroPages := createTestPages(0) s := newTestSite(t)
zeroPagesByWeight, _ := createTestPages(0).GroupBy("Weight", "asc") fivePages := createTestPages(s, 5)
fivePagesByWeight, _ := createTestPages(5).GroupBy("Weight", "asc") zeroPages := createTestPages(s, 0)
ninePagesByWeight, _ := createTestPages(9).GroupBy("Weight", "asc") zeroPagesByWeight, _ := createTestPages(s, 0).GroupBy("Weight", "asc")
fivePagesByWeight, _ := createTestPages(s, 5).GroupBy("Weight", "asc")
ninePagesByWeight, _ := createTestPages(s, 9).GroupBy("Weight", "asc")
for i, this := range []struct { for i, this := range []struct {
v1 interface{} v1 interface{}
@ -462,7 +469,7 @@ func TestProbablyEqualPageLists(t *testing.T) {
{"a", "b", true}, {"a", "b", true},
{"a", fivePages, false}, {"a", fivePages, false},
{fivePages, "a", false}, {fivePages, "a", false},
{fivePages, createTestPages(2), false}, {fivePages, createTestPages(s, 2), false},
{fivePages, fivePages, true}, {fivePages, fivePages, true},
{zeroPages, zeroPages, true}, {zeroPages, zeroPages, true},
{fivePagesByWeight, fivePagesByWeight, true}, {fivePagesByWeight, fivePagesByWeight, true},
@ -481,12 +488,15 @@ func TestProbablyEqualPageLists(t *testing.T) {
} }
func TestPage(t *testing.T) { func TestPage(t *testing.T) {
t.Parallel()
urlFactory := func(page int) string { urlFactory := func(page int) string {
return fmt.Sprintf("page/%d/", page) return fmt.Sprintf("page/%d/", page)
} }
fivePages := createTestPages(7) s := newTestSite(t)
fivePagesFuzzyWordCount, _ := createTestPages(7).GroupBy("FuzzyWordCount", "asc")
fivePages := createTestPages(s, 7)
fivePagesFuzzyWordCount, _ := createTestPages(s, 7).GroupBy("FuzzyWordCount", "asc")
p1, _ := newPaginatorFromPages(fivePages, 2, urlFactory) p1, _ := newPaginatorFromPages(fivePages, 2, urlFactory)
p2, _ := newPaginatorFromPageGroups(fivePagesFuzzyWordCount, 2, urlFactory) p2, _ := newPaginatorFromPageGroups(fivePagesFuzzyWordCount, 2, urlFactory)
@ -500,33 +510,26 @@ func TestPage(t *testing.T) {
page21, _ := f2.page(1) page21, _ := f2.page(1)
page2Nil, _ := f2.page(3) page2Nil, _ := f2.page(3)
assert.Equal(t, 3, page11.fuzzyWordCount) require.Equal(t, 3, page11.fuzzyWordCount)
assert.Nil(t, page1Nil) require.Nil(t, page1Nil)
assert.Equal(t, 3, page21.fuzzyWordCount) require.Equal(t, 3, page21.fuzzyWordCount)
assert.Nil(t, page2Nil) require.Nil(t, page2Nil)
} }
func createTestPages(num int) Pages { func createTestPages(s *Site, num int) Pages {
pages := make(Pages, num) pages := make(Pages, num)
info := newSiteInfo(siteBuilderCfg{baseURL: "http://base/", language: helpers.NewDefaultLanguage()})
for i := 0; i < num; i++ { for i := 0; i < num; i++ {
pages[i] = &Page{ p := s.newPage(filepath.FromSlash(fmt.Sprintf("/x/y/z/p%d.md", i)))
pageInit: &pageInit{},
URLPath: URLPath{
Section: "z",
URL: fmt.Sprintf("http://base/x/y/p%d.html", i),
},
Site: &info,
Source: Source{File: *source.NewFile(filepath.FromSlash(fmt.Sprintf("/x/y/p%d.md", i)))},
}
w := 5 w := 5
if i%2 == 0 { if i%2 == 0 {
w = 10 w = 10
} }
pages[i].fuzzyWordCount = i + 2 p.fuzzyWordCount = i + 2
pages[i].Weight = w p.Weight = w
pages[i] = p
} }
return pages return pages

View file

@ -26,7 +26,9 @@ Sample Text
` `
func TestDegenerateMissingFolderInPageFilename(t *testing.T) { func TestDegenerateMissingFolderInPageFilename(t *testing.T) {
p, err := pageTestSite.NewPageFrom(strings.NewReader(simplePageYAML), filepath.Join("foobar")) t.Parallel()
s := newTestSite(t)
p, err := s.NewPageFrom(strings.NewReader(simplePageYAML), filepath.Join("foobar"))
if err != nil { if err != nil {
t.Fatalf("Error in NewPageFrom") t.Fatalf("Error in NewPageFrom")
} }
@ -36,6 +38,8 @@ func TestDegenerateMissingFolderInPageFilename(t *testing.T) {
} }
func TestNewPageWithFilePath(t *testing.T) { func TestNewPageWithFilePath(t *testing.T) {
t.Parallel()
s := newTestSite(t)
toCheck := []struct { toCheck := []struct {
input string input string
section string section string
@ -48,7 +52,7 @@ func TestNewPageWithFilePath(t *testing.T) {
} }
for i, el := range toCheck { for i, el := range toCheck {
p, err := pageTestSite.NewPageFrom(strings.NewReader(simplePageYAML), el.input) p, err := s.NewPageFrom(strings.NewReader(simplePageYAML), el.input)
if err != nil { if err != nil {
t.Errorf("[%d] Reading from simplePageYAML resulted in an error: %s", i, err) t.Errorf("[%d] Reading from simplePageYAML resulted in an error: %s", i, err)
} }

View file

@ -14,8 +14,9 @@
package hugolib package hugolib
import ( import (
"github.com/spf13/hugo/tpl"
"testing" "testing"
"github.com/spf13/hugo/tpl"
) )
const ( const (
@ -24,6 +25,7 @@ const (
) )
func TestTemplatePathSeparator(t *testing.T) { func TestTemplatePathSeparator(t *testing.T) {
t.Parallel()
tmpl := new(tpl.GoHTMLTemplate) tmpl := new(tpl.GoHTMLTemplate)
if name := tmpl.GenerateTemplateNameFrom(win_base, win_path); name != "sub1/index.html" { if name := tmpl.GenerateTemplateNameFrom(win_base, win_path); name != "sub1/index.html" {
t.Fatalf("Template name incorrect. got %s but expected %s", name, "sub1/index.html") t.Fatalf("Template name incorrect. got %s but expected %s", name, "sub1/index.html")

View file

@ -16,8 +16,6 @@ package hugolib
import ( import (
"strings" "strings"
"testing" "testing"
"github.com/spf13/hugo/helpers"
) )
// testdataPermalinks is used by a couple of tests; the expandsTo content is // testdataPermalinks is used by a couple of tests; the expandsTo content is
@ -54,6 +52,7 @@ var testdataPermalinks = []struct {
} }
func TestPermalinkValidation(t *testing.T) { func TestPermalinkValidation(t *testing.T) {
t.Parallel()
for _, item := range testdataPermalinks { for _, item := range testdataPermalinks {
pp := pathPattern(item.spec) pp := pathPattern(item.spec)
have := pp.validate() have := pp.validate()
@ -71,9 +70,10 @@ func TestPermalinkValidation(t *testing.T) {
} }
func TestPermalinkExpansion(t *testing.T) { func TestPermalinkExpansion(t *testing.T) {
page, err := pageTestSite.NewPageFrom(strings.NewReader(simplePageJSON), "blue/test-page.md") t.Parallel()
info := newSiteInfo(siteBuilderCfg{language: helpers.NewDefaultLanguage()}) s := newTestSite(t)
page.Site = &info page, err := s.NewPageFrom(strings.NewReader(simplePageJSON), "blue/test-page.md")
if err != nil { if err != nil {
t.Fatalf("failed before we began, could not parse SIMPLE_PAGE_JSON: %s", err) t.Fatalf("failed before we began, could not parse SIMPLE_PAGE_JSON: %s", err)
} }

View file

@ -17,10 +17,7 @@ import (
"path/filepath" "path/filepath"
"testing" "testing"
"github.com/spf13/hugo/hugofs"
"github.com/spf13/hugo/deps" "github.com/spf13/hugo/deps"
"github.com/spf13/viper"
) )
const robotTxtTemplate = `User-agent: Googlebot const robotTxtTemplate = `User-agent: Googlebot
@ -30,18 +27,20 @@ const robotTxtTemplate = `User-agent: Googlebot
` `
func TestRobotsTXTOutput(t *testing.T) { func TestRobotsTXTOutput(t *testing.T) {
testCommonResetState() t.Parallel()
var (
cfg, fs = newTestCfg()
th = testHelper{cfg}
)
viper.Set("baseURL", "http://auth/bub/") cfg.Set("baseURL", "http://auth/bub/")
viper.Set("enableRobotsTXT", true) cfg.Set("enableRobotsTXT", true)
fs := hugofs.NewMem()
writeSource(t, fs, filepath.Join("layouts", "robots.txt"), robotTxtTemplate) writeSource(t, fs, filepath.Join("layouts", "robots.txt"), robotTxtTemplate)
writeSourcesToSource(t, "content", fs, weightedSources...) writeSourcesToSource(t, "content", fs, weightedSources...)
buildSingleSite(t, deps.DepsCfg{Fs: fs}, BuildCfg{}) buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{})
assertFileContent(t, fs, "public/robots.txt", true, "User-agent: Googlebot") th.assertFileContent(t, fs, "public/robots.txt", true, "User-agent: Googlebot")
} }

View file

@ -18,31 +18,32 @@ import (
"testing" "testing"
"github.com/spf13/hugo/deps" "github.com/spf13/hugo/deps"
"github.com/spf13/hugo/hugofs"
"github.com/spf13/viper"
) )
func TestRSSOutput(t *testing.T) { func TestRSSOutput(t *testing.T) {
testCommonResetState() t.Parallel()
var (
cfg, fs = newTestCfg()
th = testHelper{cfg}
)
rssURI := "customrss.xml" rssURI := "customrss.xml"
viper.Set("baseURL", "http://auth/bub/")
viper.Set("rssURI", rssURI)
viper.Set("title", "RSSTest")
fs := hugofs.NewMem() cfg.Set("baseURL", "http://auth/bub/")
cfg.Set("rssURI", rssURI)
cfg.Set("title", "RSSTest")
for _, src := range weightedSources { for _, src := range weightedSources {
writeSource(t, fs, filepath.Join("content", "sect", src.Name), string(src.Content)) writeSource(t, fs, filepath.Join("content", "sect", src.Name), string(src.Content))
} }
buildSingleSite(t, deps.DepsCfg{Fs: fs}, BuildCfg{}) buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{})
// Home RSS // Home RSS
assertFileContent(t, fs, filepath.Join("public", rssURI), true, "<?xml", "rss version", "RSSTest") th.assertFileContent(t, fs, filepath.Join("public", rssURI), true, "<?xml", "rss version", "RSSTest")
// Section RSS // Section RSS
assertFileContent(t, fs, filepath.Join("public", "sect", rssURI), true, "<?xml", "rss version", "Sects on RSSTest") th.assertFileContent(t, fs, filepath.Join("public", "sect", rssURI), true, "<?xml", "rss version", "Sects on RSSTest")
// Taxonomy RSS // Taxonomy RSS
assertFileContent(t, fs, filepath.Join("public", "categories", "hugo", rssURI), true, "<?xml", "rss version", "Hugo on RSSTest") th.assertFileContent(t, fs, filepath.Join("public", "categories", "hugo", rssURI), true, "<?xml", "rss version", "Hugo on RSSTest")
} }

View file

@ -14,13 +14,15 @@
package hugolib package hugolib
import ( import (
"github.com/stretchr/testify/assert"
"reflect" "reflect"
"sync" "sync"
"testing" "testing"
"github.com/stretchr/testify/assert"
) )
func TestScratchAdd(t *testing.T) { func TestScratchAdd(t *testing.T) {
t.Parallel()
scratch := newScratch() scratch := newScratch()
scratch.Add("int1", 10) scratch.Add("int1", 10)
scratch.Add("int1", 20) scratch.Add("int1", 20)
@ -50,6 +52,7 @@ func TestScratchAdd(t *testing.T) {
} }
func TestScratchAddSlice(t *testing.T) { func TestScratchAddSlice(t *testing.T) {
t.Parallel()
scratch := newScratch() scratch := newScratch()
_, err := scratch.Add("intSlice", []int{1, 2}) _, err := scratch.Add("intSlice", []int{1, 2})
@ -78,6 +81,7 @@ func TestScratchAddSlice(t *testing.T) {
} }
func TestScratchSet(t *testing.T) { func TestScratchSet(t *testing.T) {
t.Parallel()
scratch := newScratch() scratch := newScratch()
scratch.Set("key", "val") scratch.Set("key", "val")
assert.Equal(t, "val", scratch.Get("key")) assert.Equal(t, "val", scratch.Get("key"))
@ -119,6 +123,7 @@ func TestScratchInParallel(t *testing.T) {
} }
func TestScratchGet(t *testing.T) { func TestScratchGet(t *testing.T) {
t.Parallel()
scratch := newScratch() scratch := newScratch()
nothing := scratch.Get("nothing") nothing := scratch.Get("nothing")
if nothing != nil { if nothing != nil {
@ -127,6 +132,7 @@ func TestScratchGet(t *testing.T) {
} }
func TestScratchSetInMap(t *testing.T) { func TestScratchSetInMap(t *testing.T) {
t.Parallel()
scratch := newScratch() scratch := newScratch()
scratch.SetInMap("key", "lux", "Lux") scratch.SetInMap("key", "lux", "Lux")
scratch.SetInMap("key", "abc", "Abc") scratch.SetInMap("key", "abc", "Abc")
@ -137,6 +143,7 @@ func TestScratchSetInMap(t *testing.T) {
} }
func TestScratchGetSortedMapValues(t *testing.T) { func TestScratchGetSortedMapValues(t *testing.T) {
t.Parallel()
scratch := newScratch() scratch := newScratch()
nothing := scratch.GetSortedMapValues("nothing") nothing := scratch.GetSortedMapValues("nothing")
if nothing != nil { if nothing != nil {

View file

@ -149,31 +149,6 @@ func (sc shortcode) String() string {
return fmt.Sprintf("%s(%q, %t){%s}", sc.name, params, sc.doMarkup, sc.inner) return fmt.Sprintf("%s(%q, %t){%s}", sc.name, params, sc.doMarkup, sc.inner)
} }
// HandleShortcodes does all in one go: extract, render and replace
// only used for testing
func HandleShortcodes(stringToParse string, page *Page) (string, error) {
tmpContent, tmpShortcodes, err := extractAndRenderShortcodes(stringToParse, page)
if err != nil {
return "", err
}
if len(tmpShortcodes) > 0 {
shortcodes, err := executeShortcodeFuncMap(tmpShortcodes)
if err != nil {
return "", err
}
tmpContentWithTokensReplaced, err := replaceShortcodeTokens([]byte(tmpContent), shortcodePlaceholderPrefix, shortcodes)
if err != nil {
return "", fmt.Errorf("Failed to replace shortcode tokens in %s:\n%s", page.BaseFileName(), err.Error())
}
return string(tmpContentWithTokensReplaced), nil
}
return tmpContent, nil
}
var isInnerShortcodeCache = struct { var isInnerShortcodeCache = struct {
sync.RWMutex sync.RWMutex
m map[string]bool m map[string]bool
@ -239,12 +214,12 @@ func renderShortcode(sc shortcode, parent *ShortcodeWithPage, p *Page) string {
} }
if sc.doMarkup { if sc.doMarkup {
newInner := helpers.RenderBytes(&helpers.RenderingContext{ newInner := p.s.ContentSpec.RenderBytes(&helpers.RenderingContext{
Content: []byte(inner), PageFmt: p.determineMarkupType(), Content: []byte(inner), PageFmt: p.determineMarkupType(),
ConfigProvider: p.Language(), Cfg: p.Language(),
DocumentID: p.UniqueID(), DocumentID: p.UniqueID(),
DocumentName: p.Path(), DocumentName: p.Path(),
Config: p.getRenderingConfig()}) Config: p.getRenderingConfig()})
// If the type is “unknown” or “markdown”, we assume the markdown // If the type is “unknown” or “markdown”, we assume the markdown
// generation has been performed. Given the input: `a line`, markdown // generation has been performed. Given the input: `a line`, markdown

View file

@ -24,20 +24,22 @@ import (
"github.com/spf13/hugo/deps" "github.com/spf13/hugo/deps"
"github.com/spf13/hugo/helpers" "github.com/spf13/hugo/helpers"
"github.com/spf13/hugo/hugofs"
"github.com/spf13/hugo/source" "github.com/spf13/hugo/source"
"github.com/spf13/hugo/tplapi" "github.com/spf13/hugo/tplapi"
"github.com/spf13/viper"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
// TODO(bep) remove // TODO(bep) remove
func pageFromString(in, filename string, withTemplate ...func(templ tplapi.Template) error) (*Page, error) { func pageFromString(in, filename string, withTemplate ...func(templ tplapi.Template) error) (*Page, error) {
s := pageTestSite s := newTestSite(nil)
if len(withTemplate) > 0 { if len(withTemplate) > 0 {
// Have to create a new site // Have to create a new site
var err error var err error
s, err = NewSiteDefaultLang(withTemplate...) cfg, fs := newTestCfg()
d := deps.DepsCfg{Language: helpers.NewLanguage("en", cfg), Fs: fs, WithTemplate: withTemplate[0]}
s, err = NewSiteForCfg(d)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -50,9 +52,8 @@ func CheckShortCodeMatch(t *testing.T, input, expected string, withTemplate func
} }
func CheckShortCodeMatchAndError(t *testing.T, input, expected string, withTemplate func(templ tplapi.Template) error, expectError bool) { func CheckShortCodeMatchAndError(t *testing.T, input, expected string, withTemplate func(templ tplapi.Template) error, expectError bool) {
testCommonResetState()
fs := hugofs.NewMem() cfg, fs := newTestCfg()
// Need some front matter, see https://github.com/spf13/hugo/issues/2337 // Need some front matter, see https://github.com/spf13/hugo/issues/2337
contentFile := `--- contentFile := `---
@ -62,7 +63,7 @@ title: "Title"
writeSource(t, fs, "content/simple.md", contentFile) writeSource(t, fs, "content/simple.md", contentFile)
h, err := NewHugoSitesFromConfiguration(deps.DepsCfg{Fs: fs, WithTemplate: withTemplate}) h, err := NewHugoSites(deps.DepsCfg{Fs: fs, Cfg: cfg, WithTemplate: withTemplate})
require.NoError(t, err) require.NoError(t, err)
require.Len(t, h.Sites, 1) require.Len(t, h.Sites, 1)
@ -90,43 +91,15 @@ title: "Title"
} }
} }
func TestShortcodeGoFuzzReports(t *testing.T) {
p, _ := pageFromString(simplePage, "simple.md", func(templ tplapi.Template) error {
return templ.AddInternalShortcode("sc.html", `foo`)
})
for i, this := range []struct {
data string
expectErr bool
}{
{"{{</*/", true},
} {
output, err := HandleShortcodes(this.data, p)
if this.expectErr && err == nil {
t.Errorf("[%d] should have errored", i)
}
if !this.expectErr && err != nil {
t.Errorf("[%d] should not have errored: %s", i, err)
}
if !this.expectErr && err == nil && len(output) == 0 {
t.Errorf("[%d] empty result", i)
}
}
}
func TestNonSC(t *testing.T) { func TestNonSC(t *testing.T) {
t.Parallel()
// notice the syntax diff from 0.12, now comment delims must be added // notice the syntax diff from 0.12, now comment delims must be added
CheckShortCodeMatch(t, "{{%/* movie 47238zzb */%}}", "{{% movie 47238zzb %}}", nil) CheckShortCodeMatch(t, "{{%/* movie 47238zzb */%}}", "{{% movie 47238zzb %}}", nil)
} }
// Issue #929 // Issue #929
func TestHyphenatedSC(t *testing.T) { func TestHyphenatedSC(t *testing.T) {
t.Parallel()
wt := func(tem tplapi.Template) error { wt := func(tem tplapi.Template) error {
tem.AddInternalShortcode("hyphenated-video.html", `Playing Video {{ .Get 0 }}`) tem.AddInternalShortcode("hyphenated-video.html", `Playing Video {{ .Get 0 }}`)
return nil return nil
@ -137,6 +110,7 @@ func TestHyphenatedSC(t *testing.T) {
// Issue #1753 // Issue #1753
func TestNoTrailingNewline(t *testing.T) { func TestNoTrailingNewline(t *testing.T) {
t.Parallel()
wt := func(tem tplapi.Template) error { wt := func(tem tplapi.Template) error {
tem.AddInternalShortcode("a.html", `{{ .Get 0 }}`) tem.AddInternalShortcode("a.html", `{{ .Get 0 }}`)
return nil return nil
@ -146,6 +120,7 @@ func TestNoTrailingNewline(t *testing.T) {
} }
func TestPositionalParamSC(t *testing.T) { func TestPositionalParamSC(t *testing.T) {
t.Parallel()
wt := func(tem tplapi.Template) error { wt := func(tem tplapi.Template) error {
tem.AddInternalShortcode("video.html", `Playing Video {{ .Get 0 }}`) tem.AddInternalShortcode("video.html", `Playing Video {{ .Get 0 }}`)
return nil return nil
@ -159,6 +134,7 @@ func TestPositionalParamSC(t *testing.T) {
} }
func TestPositionalParamIndexOutOfBounds(t *testing.T) { func TestPositionalParamIndexOutOfBounds(t *testing.T) {
t.Parallel()
wt := func(tem tplapi.Template) error { wt := func(tem tplapi.Template) error {
tem.AddInternalShortcode("video.html", `Playing Video {{ .Get 1 }}`) tem.AddInternalShortcode("video.html", `Playing Video {{ .Get 1 }}`)
return nil return nil
@ -169,6 +145,7 @@ func TestPositionalParamIndexOutOfBounds(t *testing.T) {
// some repro issues for panics in Go Fuzz testing // some repro issues for panics in Go Fuzz testing
func TestNamedParamSC(t *testing.T) { func TestNamedParamSC(t *testing.T) {
t.Parallel()
wt := func(tem tplapi.Template) error { wt := func(tem tplapi.Template) error {
tem.AddInternalShortcode("img.html", `<img{{ with .Get "src" }} src="{{.}}"{{end}}{{with .Get "class"}} class="{{.}}"{{end}}>`) tem.AddInternalShortcode("img.html", `<img{{ with .Get "src" }} src="{{.}}"{{end}}{{with .Get "class"}} class="{{.}}"{{end}}>`)
return nil return nil
@ -183,6 +160,7 @@ func TestNamedParamSC(t *testing.T) {
// Issue #2294 // Issue #2294
func TestNestedNamedMissingParam(t *testing.T) { func TestNestedNamedMissingParam(t *testing.T) {
t.Parallel()
wt := func(tem tplapi.Template) error { wt := func(tem tplapi.Template) error {
tem.AddInternalShortcode("acc.html", `<div class="acc">{{ .Inner }}</div>`) tem.AddInternalShortcode("acc.html", `<div class="acc">{{ .Inner }}</div>`)
tem.AddInternalShortcode("div.html", `<div {{with .Get "class"}} class="{{ . }}"{{ end }}>{{ .Inner }}</div>`) tem.AddInternalShortcode("div.html", `<div {{with .Get "class"}} class="{{ . }}"{{ end }}>{{ .Inner }}</div>`)
@ -195,6 +173,7 @@ func TestNestedNamedMissingParam(t *testing.T) {
} }
func TestIsNamedParamsSC(t *testing.T) { func TestIsNamedParamsSC(t *testing.T) {
t.Parallel()
wt := func(tem tplapi.Template) error { wt := func(tem tplapi.Template) error {
tem.AddInternalShortcode("byposition.html", `<div id="{{ .Get 0 }}">`) tem.AddInternalShortcode("byposition.html", `<div id="{{ .Get 0 }}">`)
tem.AddInternalShortcode("byname.html", `<div id="{{ .Get "id" }}">`) tem.AddInternalShortcode("byname.html", `<div id="{{ .Get "id" }}">`)
@ -210,6 +189,7 @@ func TestIsNamedParamsSC(t *testing.T) {
} }
func TestInnerSC(t *testing.T) { func TestInnerSC(t *testing.T) {
t.Parallel()
wt := func(tem tplapi.Template) error { wt := func(tem tplapi.Template) error {
tem.AddInternalShortcode("inside.html", `<div{{with .Get "class"}} class="{{.}}"{{end}}>{{ .Inner }}</div>`) tem.AddInternalShortcode("inside.html", `<div{{with .Get "class"}} class="{{.}}"{{end}}>{{ .Inner }}</div>`)
return nil return nil
@ -220,6 +200,7 @@ func TestInnerSC(t *testing.T) {
} }
func TestInnerSCWithMarkdown(t *testing.T) { func TestInnerSCWithMarkdown(t *testing.T) {
t.Parallel()
wt := func(tem tplapi.Template) error { wt := func(tem tplapi.Template) error {
tem.AddInternalShortcode("inside.html", `<div{{with .Get "class"}} class="{{.}}"{{end}}>{{ .Inner }}</div>`) tem.AddInternalShortcode("inside.html", `<div{{with .Get "class"}} class="{{.}}"{{end}}>{{ .Inner }}</div>`)
return nil return nil
@ -233,6 +214,7 @@ func TestInnerSCWithMarkdown(t *testing.T) {
} }
func TestInnerSCWithAndWithoutMarkdown(t *testing.T) { func TestInnerSCWithAndWithoutMarkdown(t *testing.T) {
t.Parallel()
wt := func(tem tplapi.Template) error { wt := func(tem tplapi.Template) error {
tem.AddInternalShortcode("inside.html", `<div{{with .Get "class"}} class="{{.}}"{{end}}>{{ .Inner }}</div>`) tem.AddInternalShortcode("inside.html", `<div{{with .Get "class"}} class="{{.}}"{{end}}>{{ .Inner }}</div>`)
return nil return nil
@ -256,12 +238,14 @@ This is **plain** text.
} }
func TestEmbeddedSC(t *testing.T) { func TestEmbeddedSC(t *testing.T) {
t.Parallel()
CheckShortCodeMatch(t, "{{% test %}}", "This is a simple Test", nil) CheckShortCodeMatch(t, "{{% test %}}", "This is a simple Test", nil)
CheckShortCodeMatch(t, `{{% figure src="/found/here" class="bananas orange" %}}`, "\n<figure class=\"bananas orange\">\n \n <img src=\"/found/here\" />\n \n \n</figure>\n", nil) CheckShortCodeMatch(t, `{{% figure src="/found/here" class="bananas orange" %}}`, "\n<figure class=\"bananas orange\">\n \n <img src=\"/found/here\" />\n \n \n</figure>\n", nil)
CheckShortCodeMatch(t, `{{% figure src="/found/here" class="bananas orange" caption="This is a caption" %}}`, "\n<figure class=\"bananas orange\">\n \n <img src=\"/found/here\" alt=\"This is a caption\" />\n \n \n <figcaption>\n <p>\n This is a caption\n \n \n \n </p> \n </figcaption>\n \n</figure>\n", nil) CheckShortCodeMatch(t, `{{% figure src="/found/here" class="bananas orange" caption="This is a caption" %}}`, "\n<figure class=\"bananas orange\">\n \n <img src=\"/found/here\" alt=\"This is a caption\" />\n \n \n <figcaption>\n <p>\n This is a caption\n \n \n \n </p> \n </figcaption>\n \n</figure>\n", nil)
} }
func TestNestedSC(t *testing.T) { func TestNestedSC(t *testing.T) {
t.Parallel()
wt := func(tem tplapi.Template) error { wt := func(tem tplapi.Template) error {
tem.AddInternalShortcode("scn1.html", `<div>Outer, inner is {{ .Inner }}</div>`) tem.AddInternalShortcode("scn1.html", `<div>Outer, inner is {{ .Inner }}</div>`)
tem.AddInternalShortcode("scn2.html", `<div>SC2</div>`) tem.AddInternalShortcode("scn2.html", `<div>SC2</div>`)
@ -273,6 +257,7 @@ func TestNestedSC(t *testing.T) {
} }
func TestNestedComplexSC(t *testing.T) { func TestNestedComplexSC(t *testing.T) {
t.Parallel()
wt := func(tem tplapi.Template) error { wt := func(tem tplapi.Template) error {
tem.AddInternalShortcode("row.html", `-row-{{ .Inner}}-rowStop-`) tem.AddInternalShortcode("row.html", `-row-{{ .Inner}}-rowStop-`)
tem.AddInternalShortcode("column.html", `-col-{{.Inner }}-colStop-`) tem.AddInternalShortcode("column.html", `-col-{{.Inner }}-colStop-`)
@ -288,6 +273,7 @@ func TestNestedComplexSC(t *testing.T) {
} }
func TestParentShortcode(t *testing.T) { func TestParentShortcode(t *testing.T) {
t.Parallel()
wt := func(tem tplapi.Template) error { wt := func(tem tplapi.Template) error {
tem.AddInternalShortcode("r1.html", `1: {{ .Get "pr1" }} {{ .Inner }}`) tem.AddInternalShortcode("r1.html", `1: {{ .Get "pr1" }} {{ .Inner }}`)
tem.AddInternalShortcode("r2.html", `2: {{ .Parent.Get "pr1" }}{{ .Get "pr2" }} {{ .Inner }}`) tem.AddInternalShortcode("r2.html", `2: {{ .Parent.Get "pr1" }}{{ .Get "pr2" }} {{ .Inner }}`)
@ -300,43 +286,14 @@ func TestParentShortcode(t *testing.T) {
} }
func TestFigureImgWidth(t *testing.T) { func TestFigureImgWidth(t *testing.T) {
t.Parallel()
CheckShortCodeMatch(t, `{{% figure src="/found/here" class="bananas orange" alt="apple" width="100px" %}}`, "\n<figure class=\"bananas orange\">\n \n <img src=\"/found/here\" alt=\"apple\" width=\"100px\" />\n \n \n</figure>\n", nil) CheckShortCodeMatch(t, `{{% figure src="/found/here" class="bananas orange" alt="apple" width="100px" %}}`, "\n<figure class=\"bananas orange\">\n \n <img src=\"/found/here\" alt=\"apple\" width=\"100px\" />\n \n \n</figure>\n", nil)
} }
func TestHighlight(t *testing.T) {
testCommonResetState()
if !helpers.HasPygments() {
t.Skip("Skip test as Pygments is not installed")
}
viper.Set("pygmentsStyle", "bw")
viper.Set("pygmentsUseClasses", false)
code := `
{{< highlight java >}}
void do();
{{< /highlight >}}`
p, _ := pageFromString(simplePage, "simple.md")
output, err := HandleShortcodes(code, p)
if err != nil {
t.Fatal("Handle shortcode error", err)
}
matched, err := regexp.MatchString("(?s)^\n<div class=\"highlight\" style=\"background: #ffffff\"><pre style=\"line-height: 125%\">.*?void</span> do().*?</pre></div>\n$", output)
if err != nil {
t.Fatal("Regexp error", err)
}
if !matched {
t.Errorf("Hightlight mismatch, got (escaped to see invisible chars)\n%+q", output)
}
}
const testScPlaceholderRegexp = "HAHAHUGOSHORTCODE-\\d+HBHB" const testScPlaceholderRegexp = "HAHAHUGOSHORTCODE-\\d+HBHB"
func TestExtractShortcodes(t *testing.T) { func TestExtractShortcodes(t *testing.T) {
t.Parallel()
for i, this := range []struct { for i, this := range []struct {
name string name string
input string input string
@ -455,17 +412,8 @@ func TestExtractShortcodes(t *testing.T) {
} }
func TestShortcodesInSite(t *testing.T) { func TestShortcodesInSite(t *testing.T) {
testCommonResetState() t.Parallel()
baseURL := "http://foo/bar" baseURL := "http://foo/bar"
viper.Set("defaultExtension", "html")
viper.Set("defaultContentLanguage", "en")
viper.Set("baseURL", baseURL)
viper.Set("uglyURLs", false)
viper.Set("verbose", true)
viper.Set("pygmentsUseClasses", true)
viper.Set("pygmentsCodefences", true)
tests := []struct { tests := []struct {
contentPath string contentPath string
@ -579,11 +527,21 @@ tags:
} }
fs := hugofs.NewMem() cfg, fs := newTestCfg()
cfg.Set("defaultExtension", "html")
cfg.Set("defaultContentLanguage", "en")
cfg.Set("baseURL", baseURL)
cfg.Set("uglyURLs", false)
cfg.Set("verbose", true)
cfg.Set("pygmentsUseClasses", true)
cfg.Set("pygmentsCodefences", true)
writeSourcesToSource(t, "content", fs, sources...) writeSourcesToSource(t, "content", fs, sources...)
buildSingleSite(t, deps.DepsCfg{WithTemplate: addTemplates, Fs: fs}, BuildCfg{}) s := buildSingleSite(t, deps.DepsCfg{WithTemplate: addTemplates, Fs: fs, Cfg: cfg}, BuildCfg{})
th := testHelper{s.Cfg}
for _, test := range tests { for _, test := range tests {
if strings.HasSuffix(test.contentPath, ".ad") && !helpers.HasAsciidoc() { if strings.HasSuffix(test.contentPath, ".ad") && !helpers.HasAsciidoc() {
@ -597,7 +555,7 @@ tags:
continue continue
} }
assertFileContent(t, fs, test.outFile, true, test.expected) th.assertFileContent(t, fs, test.outFile, true, test.expected)
} }
} }
@ -665,6 +623,7 @@ func BenchmarkReplaceShortcodeTokens(b *testing.B) {
} }
func TestReplaceShortcodeTokens(t *testing.T) { func TestReplaceShortcodeTokens(t *testing.T) {
t.Parallel()
for i, this := range []struct { for i, this := range []struct {
input string input string
prefix string prefix string

View file

@ -152,6 +152,7 @@ var shortCodeLexerTests = []shortCodeLexerTest{
} }
func TestShortcodeLexer(t *testing.T) { func TestShortcodeLexer(t *testing.T) {
t.Parallel()
for i, test := range shortCodeLexerTests { for i, test := range shortCodeLexerTests {
items := collect(&test) items := collect(&test)
if !equal(items, test.items) { if !equal(items, test.items) {

View file

@ -37,11 +37,9 @@ import (
bp "github.com/spf13/hugo/bufferpool" bp "github.com/spf13/hugo/bufferpool"
"github.com/spf13/hugo/deps" "github.com/spf13/hugo/deps"
"github.com/spf13/hugo/helpers" "github.com/spf13/hugo/helpers"
"github.com/spf13/hugo/hugofs"
"github.com/spf13/hugo/parser" "github.com/spf13/hugo/parser"
"github.com/spf13/hugo/source" "github.com/spf13/hugo/source"
"github.com/spf13/hugo/target" "github.com/spf13/hugo/target"
"github.com/spf13/hugo/tpl"
"github.com/spf13/hugo/tplapi" "github.com/spf13/hugo/tplapi"
"github.com/spf13/hugo/transform" "github.com/spf13/hugo/transform"
"github.com/spf13/nitro" "github.com/spf13/nitro"
@ -113,7 +111,7 @@ type Site struct {
// reset returns a new Site prepared for rebuild. // reset returns a new Site prepared for rebuild.
func (s *Site) reset() *Site { func (s *Site) reset() *Site {
return &Site{Deps: s.Deps, owner: s.owner, PageCollections: newPageCollections()} return &Site{Deps: s.Deps, Language: s.Language, owner: s.owner, PageCollections: newPageCollections()}
} }
// newSite creates a new site with the given configuration. // newSite creates a new site with the given configuration.
@ -121,7 +119,7 @@ func newSite(cfg deps.DepsCfg) (*Site, error) {
c := newPageCollections() c := newPageCollections()
if cfg.Language == nil { if cfg.Language == nil {
cfg.Language = helpers.NewDefaultLanguage() cfg.Language = helpers.NewDefaultLanguage(cfg.Cfg)
} }
s := &Site{PageCollections: c, Language: cfg.Language} s := &Site{PageCollections: c, Language: cfg.Language}
@ -148,37 +146,22 @@ func NewSite(cfg deps.DepsCfg) (*Site, error) {
return s, nil return s, nil
} }
// TODO(bep) globals clean below...
// NewSiteDefaultLang creates a new site in the default language. // NewSiteDefaultLang creates a new site in the default language.
// The site will have a template system loaded and ready to use. // The site will have a template system loaded and ready to use.
// Note: This is mainly used in single site tests. // Note: This is mainly used in single site tests.
func NewSiteDefaultLang(withTemplate ...func(templ tplapi.Template) error) (*Site, error) { func NewSiteDefaultLang(withTemplate ...func(templ tplapi.Template) error) (*Site, error) {
return newSiteForLang(helpers.NewDefaultLanguage(), withTemplate...) v := viper.New()
loadDefaultSettingsFor(v)
return newSiteForLang(helpers.NewDefaultLanguage(v), withTemplate...)
} }
// NewEnglishSite creates a new site in English language. // NewEnglishSite creates a new site in English language.
// The site will have a template system loaded and ready to use. // The site will have a template system loaded and ready to use.
// Note: This is mainly used in single site tests. // Note: This is mainly used in single site tests.
func NewEnglishSite(withTemplate ...func(templ tplapi.Template) error) (*Site, error) { func NewEnglishSite(withTemplate ...func(templ tplapi.Template) error) (*Site, error) {
return newSiteForLang(helpers.NewLanguage("en"), withTemplate...) v := viper.New()
} loadDefaultSettingsFor(v)
return newSiteForLang(helpers.NewLanguage("en", v), withTemplate...)
// NewEnglishSite creates a new site in the English language with in-memory Fs.
// The site will have a template system loaded and ready to use.
// Note: This is mainly used in single site tests.
func NewEnglishSiteMem(withTemplate ...func(templ tplapi.Template) error) (*Site, error) {
withTemplates := func(templ tplapi.Template) error {
for _, wt := range withTemplate {
if err := wt(templ); err != nil {
return err
}
}
return nil
}
cfg := deps.DepsCfg{WithTemplate: withTemplates, Language: helpers.NewLanguage("en"), Fs: hugofs.NewMem()}
return newSiteForCfg(cfg)
} }
// newSiteForLang creates a new site in the given language. // newSiteForLang creates a new site in the given language.
@ -191,13 +174,17 @@ func newSiteForLang(lang *helpers.Language, withTemplate ...func(templ tplapi.Te
} }
return nil return nil
} }
cfg := deps.DepsCfg{WithTemplate: withTemplates, Language: lang} cfg := deps.DepsCfg{WithTemplate: withTemplates, Language: lang}
return newSiteForCfg(cfg) return NewSiteForCfg(cfg)
} }
func newSiteForCfg(cfg deps.DepsCfg) (*Site, error) { // NewSiteForCfg creates a new site for the given configuration.
// The site will have a template system loaded and ready to use.
// Note: This is mainly used in single site tests.
func NewSiteForCfg(cfg deps.DepsCfg) (*Site, error) {
s, err := newSite(cfg) s, err := newSite(cfg)
if err != nil { if err != nil {
@ -269,21 +256,20 @@ func (s *SiteInfo) String() string {
// Used in tests. // Used in tests.
type siteBuilderCfg struct { type siteBuilderCfg struct {
language *helpers.Language language *helpers.Language
// TOD(bep) globals fs
s *Site s *Site
fs *hugofs.Fs
pageCollections *PageCollections pageCollections *PageCollections
baseURL string baseURL string
} }
// TODO(bep) globals get rid of this // TODO(bep) get rid of this
func newSiteInfo(cfg siteBuilderCfg) SiteInfo { func newSiteInfo(cfg siteBuilderCfg) SiteInfo {
return SiteInfo{ return SiteInfo{
s: cfg.s, s: cfg.s,
BaseURL: template.URL(cfg.baseURL), BaseURL: template.URL(cfg.baseURL),
multilingual: newMultiLingualForLanguage(cfg.language), multilingual: newMultiLingualForLanguage(cfg.language),
PageCollections: cfg.pageCollections, PageCollections: cfg.pageCollections,
Params: make(map[string]interface{}),
} }
} }
@ -586,12 +572,12 @@ func (s *Site) reProcess(events []fsnotify.Event) (whatChanged, error) {
} }
} }
if len(tmplChanged) > 0 { if len(tmplChanged) > 0 || len(i18nChanged) > 0 {
sites := s.owner.Sites sites := s.owner.Sites
first := sites[0] first := sites[0]
// TOD(bep) globals clean // TOD(bep) globals clean
if err := first.Deps.LoadTemplates(); err != nil { if err := first.Deps.LoadResources(); err != nil {
s.Log.ERROR.Println(err) s.Log.ERROR.Println(err)
} }
@ -613,12 +599,6 @@ func (s *Site) reProcess(events []fsnotify.Event) (whatChanged, error) {
s.readDataFromSourceFS() s.readDataFromSourceFS()
} }
if len(i18nChanged) > 0 {
if err := s.readI18nSources(); err != nil {
s.Log.ERROR.Println(err)
}
}
// If a content file changes, we need to reload only it and re-render the entire site. // If a content file changes, we need to reload only it and re-render the entire site.
// First step is to read the changed files and (re)place them in site.AllPages // First step is to read the changed files and (re)place them in site.AllPages
@ -648,7 +628,7 @@ func (s *Site) reProcess(events []fsnotify.Event) (whatChanged, error) {
wg2.Add(4) wg2.Add(4)
for i := 0; i < 2; i++ { for i := 0; i < 2; i++ {
go fileConverter(s, fileConvChan, convertResults, wg2) go fileConverter(s, fileConvChan, convertResults, wg2)
go pageConverter(s, pageChan, convertResults, wg2) go pageConverter(pageChan, convertResults, wg2)
} }
for _, ev := range sourceChanged { for _, ev := range sourceChanged {
@ -732,7 +712,7 @@ func (s *Site) reProcess(events []fsnotify.Event) (whatChanged, error) {
} }
func (s *Site) loadData(sources []source.Input) (err error) { func (s *Site) loadData(sources []source.Input) (err error) {
s.Log.DEBUG.Printf("Load Data from %q", sources) s.Log.DEBUG.Printf("Load Data from %d source(s)", len(sources))
s.Data = make(map[string]interface{}) s.Data = make(map[string]interface{})
var current map[string]interface{} var current map[string]interface{}
for _, currentSource := range sources { for _, currentSource := range sources {
@ -790,35 +770,19 @@ func (s *Site) readData(f *source.File) (interface{}, error) {
case "toml": case "toml":
return parser.HandleTOMLMetaData(f.Bytes()) return parser.HandleTOMLMetaData(f.Bytes())
default: default:
s.Log.WARN.Printf("Data not supported for extension '%s'", f.Extension()) return nil, fmt.Errorf("Data not supported for extension '%s'", f.Extension())
return nil, nil
} }
} }
func (s *Site) readI18nSources() error {
i18nSources := []source.Input{source.NewFilesystem(s.Fs, s.absI18nDir())}
themeI18nDir, err := s.PathSpec.GetThemeI18nDirPath()
if err == nil {
i18nSources = []source.Input{source.NewFilesystem(s.Fs, themeI18nDir), i18nSources[0]}
}
if err = s.loadI18n(i18nSources); err != nil {
return err
}
return nil
}
func (s *Site) readDataFromSourceFS() error { func (s *Site) readDataFromSourceFS() error {
sp := source.NewSourceSpec(s.Cfg, s.Fs)
dataSources := make([]source.Input, 0, 2) dataSources := make([]source.Input, 0, 2)
dataSources = append(dataSources, source.NewFilesystem(s.Fs, s.absDataDir())) dataSources = append(dataSources, sp.NewFilesystem(s.absDataDir()))
// have to be last - duplicate keys in earlier entries will win // have to be last - duplicate keys in earlier entries will win
themeDataDir, err := s.PathSpec.GetThemeDataDirPath() themeDataDir, err := s.PathSpec.GetThemeDataDirPath()
if err == nil { if err == nil {
dataSources = append(dataSources, source.NewFilesystem(s.Fs, themeDataDir)) dataSources = append(dataSources, sp.NewFilesystem(themeDataDir))
} }
err = s.loadData(dataSources) err = s.loadData(dataSources)
@ -837,10 +801,6 @@ func (s *Site) process(config BuildCfg) (err error) {
return return
} }
if err = s.readI18nSources(); err != nil {
return
}
s.timerStep("load i18n") s.timerStep("load i18n")
return s.createPages() return s.createPages()
@ -858,20 +818,7 @@ func (s *Site) setupPrevNext() {
} }
} }
func (s *Site) setCurrentLanguageConfig() error {
// There are sadly some global template funcs etc. that need the language information.
viper.Set("multilingual", s.multilingualEnabled())
viper.Set("currentContentLanguage", s.Language)
// Cache the current config.
helpers.InitConfigProviderForCurrentContentLanguage()
return tpl.SetTranslateLang(s.Language)
}
func (s *Site) render() (err error) { func (s *Site) render() (err error) {
if err = s.setCurrentLanguageConfig(); err != nil {
return
}
if err = s.preparePages(); err != nil { if err = s.preparePages(); err != nil {
return return
} }
@ -927,9 +874,10 @@ func (s *Site) initialize() (err error) {
return err return err
} }
staticDir := helpers.AbsPathify(viper.GetString("staticDir") + "/") staticDir := s.PathSpec.AbsPathify(s.Cfg.GetString("staticDir") + "/")
s.Source = source.NewFilesystem(s.Fs, s.absContentDir(), staticDir) sp := source.NewSourceSpec(s.Cfg, s.Fs)
s.Source = sp.NewFilesystem(s.absContentDir(), staticDir)
return return
} }
@ -945,7 +893,7 @@ func (s *SiteInfo) HomeAbsURL() string {
// SitemapAbsURL is a convenience method giving the absolute URL to the sitemap. // SitemapAbsURL is a convenience method giving the absolute URL to the sitemap.
func (s *SiteInfo) SitemapAbsURL() string { func (s *SiteInfo) SitemapAbsURL() string {
sitemapDefault := parseSitemap(viper.GetStringMap("sitemap")) sitemapDefault := parseSitemap(s.s.Cfg.GetStringMap("sitemap"))
p := s.HomeAbsURL() p := s.HomeAbsURL()
if !strings.HasSuffix(p, "/") { if !strings.HasSuffix(p, "/") {
p += "/" p += "/"
@ -967,12 +915,12 @@ func (s *Site) initializeSiteInfo() {
params := lang.Params() params := lang.Params()
permalinks := make(PermalinkOverrides) permalinks := make(PermalinkOverrides)
for k, v := range viper.GetStringMapString("permalinks") { for k, v := range s.Cfg.GetStringMapString("permalinks") {
permalinks[k] = pathPattern(v) permalinks[k] = pathPattern(v)
} }
defaultContentInSubDir := viper.GetBool("defaultContentLanguageInSubdir") defaultContentInSubDir := s.Cfg.GetBool("defaultContentLanguageInSubdir")
defaultContentLanguage := viper.GetString("defaultContentLanguage") defaultContentLanguage := s.Cfg.GetString("defaultContentLanguage")
languagePrefix := "" languagePrefix := ""
if s.multilingualEnabled() && (defaultContentInSubDir || lang.Lang != defaultContentLanguage) { if s.multilingualEnabled() && (defaultContentInSubDir || lang.Lang != defaultContentLanguage) {
@ -985,7 +933,7 @@ func (s *Site) initializeSiteInfo() {
} }
s.Info = SiteInfo{ s.Info = SiteInfo{
BaseURL: template.URL(helpers.SanitizeURLKeepTrailingSlash(viper.GetString("baseURL"))), BaseURL: template.URL(helpers.SanitizeURLKeepTrailingSlash(s.Cfg.GetString("baseURL"))),
Title: lang.GetString("title"), Title: lang.GetString("title"),
Author: lang.GetStringMap("author"), Author: lang.GetStringMap("author"),
Social: lang.GetStringMapString("social"), Social: lang.GetStringMapString("social"),
@ -999,8 +947,8 @@ func (s *Site) initializeSiteInfo() {
defaultContentLanguageInSubdir: defaultContentInSubDir, defaultContentLanguageInSubdir: defaultContentInSubDir,
sectionPagesMenu: lang.GetString("sectionPagesMenu"), sectionPagesMenu: lang.GetString("sectionPagesMenu"),
GoogleAnalytics: lang.GetString("googleAnalytics"), GoogleAnalytics: lang.GetString("googleAnalytics"),
BuildDrafts: viper.GetBool("buildDrafts"), BuildDrafts: s.Cfg.GetBool("buildDrafts"),
canonifyURLs: viper.GetBool("canonifyURLs"), canonifyURLs: s.Cfg.GetBool("canonifyURLs"),
preserveTaxonomyNames: lang.GetBool("preserveTaxonomyNames"), preserveTaxonomyNames: lang.GetBool("preserveTaxonomyNames"),
PageCollections: s.PageCollections, PageCollections: s.PageCollections,
Files: &s.Files, Files: &s.Files,
@ -1016,22 +964,22 @@ func (s *Site) initializeSiteInfo() {
} }
func (s *Site) hasTheme() bool { func (s *Site) hasTheme() bool {
return viper.GetString("theme") != "" return s.Cfg.GetString("theme") != ""
} }
func (s *Site) dataDir() string { func (s *Site) dataDir() string {
return viper.GetString("dataDir") return s.Cfg.GetString("dataDir")
} }
func (s *Site) absDataDir() string { func (s *Site) absDataDir() string {
return helpers.AbsPathify(s.dataDir()) return s.PathSpec.AbsPathify(s.dataDir())
} }
func (s *Site) i18nDir() string { func (s *Site) i18nDir() string {
return viper.GetString("i18nDir") return s.Cfg.GetString("i18nDir")
} }
func (s *Site) absI18nDir() string { func (s *Site) absI18nDir() string {
return helpers.AbsPathify(s.i18nDir()) return s.PathSpec.AbsPathify(s.i18nDir())
} }
func (s *Site) isI18nEvent(e fsnotify.Event) bool { func (s *Site) isI18nEvent(e fsnotify.Event) bool {
@ -1049,7 +997,7 @@ func (s *Site) getThemeI18nDir(path string) string {
if !s.hasTheme() { if !s.hasTheme() {
return "" return ""
} }
return s.getRealDir(helpers.AbsPathify(filepath.Join(s.themeDir(), s.i18nDir())), path) return s.getRealDir(s.PathSpec.AbsPathify(filepath.Join(s.themeDir(), s.i18nDir())), path)
} }
func (s *Site) isDataDirEvent(e fsnotify.Event) bool { func (s *Site) isDataDirEvent(e fsnotify.Event) bool {
@ -1067,23 +1015,23 @@ func (s *Site) getThemeDataDir(path string) string {
if !s.hasTheme() { if !s.hasTheme() {
return "" return ""
} }
return s.getRealDir(helpers.AbsPathify(filepath.Join(s.themeDir(), s.dataDir())), path) return s.getRealDir(s.PathSpec.AbsPathify(filepath.Join(s.themeDir(), s.dataDir())), path)
} }
func (s *Site) themeDir() string { func (s *Site) themeDir() string {
return viper.GetString("themesDir") + "/" + viper.GetString("theme") return s.Cfg.GetString("themesDir") + "/" + s.Cfg.GetString("theme")
} }
func (s *Site) absThemeDir() string { func (s *Site) absThemeDir() string {
return helpers.AbsPathify(s.themeDir()) return s.PathSpec.AbsPathify(s.themeDir())
} }
func (s *Site) layoutDir() string { func (s *Site) layoutDir() string {
return viper.GetString("layoutDir") return s.Cfg.GetString("layoutDir")
} }
func (s *Site) absLayoutDir() string { func (s *Site) absLayoutDir() string {
return helpers.AbsPathify(s.layoutDir()) return s.PathSpec.AbsPathify(s.layoutDir())
} }
func (s *Site) isLayoutDirEvent(e fsnotify.Event) bool { func (s *Site) isLayoutDirEvent(e fsnotify.Event) bool {
@ -1101,11 +1049,11 @@ func (s *Site) getThemeLayoutDir(path string) string {
if !s.hasTheme() { if !s.hasTheme() {
return "" return ""
} }
return s.getRealDir(helpers.AbsPathify(filepath.Join(s.themeDir(), s.layoutDir())), path) return s.getRealDir(s.PathSpec.AbsPathify(filepath.Join(s.themeDir(), s.layoutDir())), path)
} }
func (s *Site) absContentDir() string { func (s *Site) absContentDir() string {
return helpers.AbsPathify(viper.GetString("contentDir")) return s.PathSpec.AbsPathify(s.Cfg.GetString("contentDir"))
} }
func (s *Site) isContentDirEvent(e fsnotify.Event) bool { func (s *Site) isContentDirEvent(e fsnotify.Event) bool {
@ -1141,7 +1089,7 @@ func (s *Site) getRealDir(base, path string) string {
} }
func (s *Site) absPublishDir() string { func (s *Site) absPublishDir() string {
return helpers.AbsPathify(viper.GetString("publishDir")) return s.PathSpec.AbsPathify(s.Cfg.GetString("publishDir"))
} }
func (s *Site) checkDirectories() (err error) { func (s *Site) checkDirectories() (err error) {
@ -1160,7 +1108,9 @@ func (s *Site) reReadFile(absFilePath string) (*source.File, error) {
if err != nil { if err != nil {
return nil, err return nil, err
} }
file, err = source.NewFileFromAbs(s.getContentDir(absFilePath), absFilePath, reader)
sp := source.NewSourceSpec(s.Cfg, s.Fs)
file, err = sp.NewFileFromAbs(s.getContentDir(absFilePath), absFilePath, reader)
if err != nil { if err != nil {
return nil, err return nil, err
@ -1219,7 +1169,7 @@ func (s *Site) convertSource() chan error {
wg.Add(2 * procs * 4) wg.Add(2 * procs * 4)
for i := 0; i < procs*4; i++ { for i := 0; i < procs*4; i++ {
go fileConverter(s, fileConvChan, results, wg) go fileConverter(s, fileConvChan, results, wg)
go pageConverter(s, pageChan, results, wg) go pageConverter(pageChan, results, wg)
} }
go converterCollator(s, results, errs) go converterCollator(s, results, errs)
@ -1278,7 +1228,7 @@ func readSourceFile(s *Site, file *source.File, results chan<- HandledResult) {
} }
} }
func pageConverter(s *Site, pages <-chan *Page, results HandleResults, wg *sync.WaitGroup) { func pageConverter(pages <-chan *Page, results HandleResults, wg *sync.WaitGroup) {
defer wg.Done() defer wg.Done()
for page := range pages { for page := range pages {
var h *MetaHandle var h *MetaHandle
@ -1288,7 +1238,10 @@ func pageConverter(s *Site, pages <-chan *Page, results HandleResults, wg *sync.
h = NewMetaHandler(page.File.Extension()) h = NewMetaHandler(page.File.Extension())
} }
if h != nil { if h != nil {
h.Convert(page, s, results) // Note that we convert pages from the site's rawAllPages collection
// Which may contain pages from multiple sites, so we use the Page's site
// for the conversion.
h.Convert(page, page.s, results)
} }
} }
} }
@ -1478,7 +1431,6 @@ func (s *Site) assembleMenus() {
//creating flat hash //creating flat hash
pages := s.Pages pages := s.Pages
for _, p := range pages { for _, p := range pages {
if sectionPagesMenu != "" { if sectionPagesMenu != "" {
if _, ok := sectionPagesMenus[p.Section()]; !ok { if _, ok := sectionPagesMenus[p.Section()]; !ok {
if p.Section() != "" { if p.Section() != "" {
@ -1750,7 +1702,7 @@ func (s *SiteInfo) permalink(plink string) string {
func (s *SiteInfo) permalinkStr(plink string) string { func (s *SiteInfo) permalinkStr(plink string) string {
return helpers.MakePermalink( return helpers.MakePermalink(
viper.GetString("baseURL"), s.s.Cfg.GetString("baseURL"),
s.s.PathSpec.URLizeAndPrep(plink)).String() s.s.PathSpec.URLizeAndPrep(plink)).String()
} }
@ -1770,10 +1722,10 @@ func (s *Site) renderAndWriteXML(name string, dest string, d interface{}, layout
defer bp.PutBuffer(outBuffer) defer bp.PutBuffer(outBuffer)
var path []byte var path []byte
if viper.GetBool("relativeURLs") { if s.Cfg.GetBool("relativeURLs") {
path = []byte(helpers.GetDottedRelativePath(dest)) path = []byte(helpers.GetDottedRelativePath(dest))
} else { } else {
s := viper.GetString("baseURL") s := s.Cfg.GetString("baseURL")
if !strings.HasSuffix(s, "/") { if !strings.HasSuffix(s, "/") {
s += "/" s += "/"
} }
@ -1811,31 +1763,31 @@ func (s *Site) renderAndWritePage(name string, dest string, d interface{}, layou
transformLinks := transform.NewEmptyTransforms() transformLinks := transform.NewEmptyTransforms()
if viper.GetBool("relativeURLs") || viper.GetBool("canonifyURLs") { if s.Cfg.GetBool("relativeURLs") || s.Cfg.GetBool("canonifyURLs") {
transformLinks = append(transformLinks, transform.AbsURL) transformLinks = append(transformLinks, transform.AbsURL)
} }
if s.running() && viper.GetBool("watch") && !viper.GetBool("disableLiveReload") { if s.running() && s.Cfg.GetBool("watch") && !s.Cfg.GetBool("disableLiveReload") {
transformLinks = append(transformLinks, transform.LiveReloadInject) transformLinks = append(transformLinks, transform.LiveReloadInject(s.Cfg.GetInt("port")))
} }
// For performance reasons we only inject the Hugo generator tag on the home page. // For performance reasons we only inject the Hugo generator tag on the home page.
if n, ok := d.(*Page); ok && n.IsHome() { if n, ok := d.(*Page); ok && n.IsHome() {
if !viper.GetBool("disableHugoGeneratorInject") { if !s.Cfg.GetBool("disableHugoGeneratorInject") {
transformLinks = append(transformLinks, transform.HugoGeneratorInject) transformLinks = append(transformLinks, transform.HugoGeneratorInject)
} }
} }
var path []byte var path []byte
if viper.GetBool("relativeURLs") { if s.Cfg.GetBool("relativeURLs") {
translated, err := pageTarget.(target.OptionalTranslator).TranslateRelative(dest) translated, err := pageTarget.(target.OptionalTranslator).TranslateRelative(dest)
if err != nil { if err != nil {
return err return err
} }
path = []byte(helpers.GetDottedRelativePath(translated)) path = []byte(helpers.GetDottedRelativePath(translated))
} else if viper.GetBool("canonifyURLs") { } else if s.Cfg.GetBool("canonifyURLs") {
s := viper.GetString("baseURL") s := s.Cfg.GetString("baseURL")
if !strings.HasSuffix(s, "/") { if !strings.HasSuffix(s, "/") {
s += "/" s += "/"
} }
@ -1850,7 +1802,7 @@ func (s *Site) renderAndWritePage(name string, dest string, d interface{}, layou
s.Log.WARN.Printf("%s is rendered empty\n", dest) s.Log.WARN.Printf("%s is rendered empty\n", dest)
if dest == "/" { if dest == "/" {
debugAddend := "" debugAddend := ""
if !viper.GetBool("verbose") { if !s.Cfg.GetBool("verbose") {
debugAddend = "* For more debugging information, run \"hugo -v\"" debugAddend = "* For more debugging information, run \"hugo -v\""
} }
distinctFeedbackLogger.Printf(`============================================================= distinctFeedbackLogger.Printf(`=============================================================
@ -1860,7 +1812,7 @@ Your rendered home page is blank: /index.html is zero-length
%s %s
=============================================================`, =============================================================`,
filepath.Base(viper.ConfigFileUsed()), filepath.Base(viper.ConfigFileUsed()),
viper.GetString("theme"), s.Cfg.GetString("theme"),
debugAddend) debugAddend)
} }
@ -1956,7 +1908,7 @@ func (s *Site) initTargetList() {
s.targets.page = &target.PagePub{ s.targets.page = &target.PagePub{
Fs: s.Fs, Fs: s.Fs,
PublishDir: s.absPublishDir(), PublishDir: s.absPublishDir(),
UglyURLs: viper.GetBool("uglyURLs"), UglyURLs: s.Cfg.GetBool("uglyURLs"),
LangDir: langDir, LangDir: langDir,
} }
} }
@ -2007,9 +1959,9 @@ func (s *Site) writeDestAlias(path, permalink string, p *Page) (err error) {
} }
func (s *Site) publishDestAlias(aliasPublisher target.AliasPublisher, path, permalink string, p *Page) (err error) { func (s *Site) publishDestAlias(aliasPublisher target.AliasPublisher, path, permalink string, p *Page) (err error) {
if viper.GetBool("relativeURLs") { if s.Cfg.GetBool("relativeURLs") {
// convert `permalink` into URI relative to location of `path` // convert `permalink` into URI relative to location of `path`
baseURL := helpers.SanitizeURLKeepTrailingSlash(viper.GetString("baseURL")) baseURL := helpers.SanitizeURLKeepTrailingSlash(s.Cfg.GetString("baseURL"))
if strings.HasPrefix(permalink, baseURL) { if strings.HasPrefix(permalink, baseURL) {
permalink = "/" + strings.TrimPrefix(permalink, baseURL) permalink = "/" + strings.TrimPrefix(permalink, baseURL)
} }
@ -2035,7 +1987,7 @@ func (s *Site) draftStats() string {
msg = fmt.Sprintf("%d drafts rendered", s.draftCount) msg = fmt.Sprintf("%d drafts rendered", s.draftCount)
} }
if viper.GetBool("buildDrafts") { if s.Cfg.GetBool("buildDrafts") {
return fmt.Sprintf("%d of ", s.draftCount) + msg return fmt.Sprintf("%d of ", s.draftCount) + msg
} }
@ -2054,7 +2006,7 @@ func (s *Site) futureStats() string {
msg = fmt.Sprintf("%d futures rendered", s.futureCount) msg = fmt.Sprintf("%d futures rendered", s.futureCount)
} }
if viper.GetBool("buildFuture") { if s.Cfg.GetBool("buildFuture") {
return fmt.Sprintf("%d of ", s.futureCount) + msg return fmt.Sprintf("%d of ", s.futureCount) + msg
} }
@ -2073,7 +2025,7 @@ func (s *Site) expiredStats() string {
msg = fmt.Sprintf("%d expired rendered", s.expiredCount) msg = fmt.Sprintf("%d expired rendered", s.expiredCount)
} }
if viper.GetBool("buildExpired") { if s.Cfg.GetBool("buildExpired") {
return fmt.Sprintf("%d of ", s.expiredCount) + msg return fmt.Sprintf("%d of ", s.expiredCount) + msg
} }
@ -2091,11 +2043,11 @@ func getGoMaxProcs() int {
func (s *Site) newNodePage(typ string) *Page { func (s *Site) newNodePage(typ string) *Page {
return &Page{ return &Page{
language: s.Language,
pageInit: &pageInit{}, pageInit: &pageInit{},
Kind: typ, Kind: typ,
Data: make(map[string]interface{}), Data: make(map[string]interface{}),
Site: &s.Info, Site: &s.Info,
language: s.Language,
s: s} s: s}
} }
@ -2148,7 +2100,7 @@ func (s *Site) newSectionPage(name string, section WeightedPages) *Page {
} }
sectionName = helpers.FirstUpper(sectionName) sectionName = helpers.FirstUpper(sectionName)
if viper.GetBool("pluralizeListTitles") { if s.Cfg.GetBool("pluralizeListTitles") {
p.Title = inflect.Pluralize(sectionName) p.Title = inflect.Pluralize(sectionName)
} else { } else {
p.Title = sectionName p.Title = sectionName

View file

@ -20,15 +20,14 @@ import (
"path/filepath" "path/filepath"
"github.com/spf13/hugo/deps" "github.com/spf13/hugo/deps"
"github.com/spf13/hugo/hugofs"
) )
// Issue #1123 // Issue #1123
// Testing prevention of cyclic refs in JSON encoding // Testing prevention of cyclic refs in JSON encoding
// May be smart to run with: -timeout 4000ms // May be smart to run with: -timeout 4000ms
func TestEncodePage(t *testing.T) { func TestEncodePage(t *testing.T) {
t.Parallel()
fs := hugofs.NewMem() cfg, fs := newTestCfg()
// borrowed from menu_test.go // borrowed from menu_test.go
for _, src := range menuPageSources { for _, src := range menuPageSources {
@ -36,7 +35,7 @@ func TestEncodePage(t *testing.T) {
} }
s := buildSingleSite(t, deps.DepsCfg{Fs: fs}, BuildCfg{}) s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{})
_, err := json.Marshal(s) _, err := json.Marshal(s)
check(t, err) check(t, err)

View file

@ -21,8 +21,6 @@ import (
"time" "time"
bp "github.com/spf13/hugo/bufferpool" bp "github.com/spf13/hugo/bufferpool"
"github.com/spf13/hugo/helpers"
"github.com/spf13/viper"
) )
// renderPages renders pages each corresponding to a markdown file. // renderPages renders pages each corresponding to a markdown file.
@ -65,6 +63,7 @@ func pageRenderer(s *Site, pages <-chan *Page, results chan<- error, wg *sync.Wa
defer wg.Done() defer wg.Done()
for p := range pages { for p := range pages {
targetPath := p.TargetPath() targetPath := p.TargetPath()
layouts := p.layouts() layouts := p.layouts()
s.Log.DEBUG.Printf("Render %s to %q with layouts %q", p.Kind, targetPath, layouts) s.Log.DEBUG.Printf("Render %s to %q with layouts %q", p.Kind, targetPath, layouts)
@ -89,12 +88,12 @@ func pageRenderer(s *Site, pages <-chan *Page, results chan<- error, wg *sync.Wa
func (s *Site) renderPaginator(p *Page) error { func (s *Site) renderPaginator(p *Page) error {
if p.paginator != nil { if p.paginator != nil {
s.Log.DEBUG.Printf("Render paginator for page %q", p.Path()) s.Log.DEBUG.Printf("Render paginator for page %q", p.Path())
paginatePath := helpers.Config().GetString("paginatePath") paginatePath := s.Cfg.GetString("paginatePath")
// write alias for page 1 // write alias for page 1
// TODO(bep) ml all of these n.addLang ... fix. // TODO(bep) ml all of these n.addLang ... fix.
aliasPath := p.addLangPathPrefix(helpers.PaginateAliasPath(path.Join(p.sections...), 1)) aliasPath := p.addLangPathPrefix(s.PathSpec.PaginateAliasPath(path.Join(p.sections...), 1))
link := p.Permalink() link := p.Permalink()
s.writeDestAlias(aliasPath, link, nil) s.writeDestAlias(aliasPath, link, nil)
@ -131,7 +130,7 @@ func (s *Site) renderPaginator(p *Page) error {
func (s *Site) renderRSS(p *Page) error { func (s *Site) renderRSS(p *Page) error {
if viper.GetBool("disableRSS") { if s.Cfg.GetBool("disableRSS") {
return nil return nil
} }
@ -168,7 +167,7 @@ func (s *Site) renderRSS(p *Page) error {
} }
func (s *Site) render404() error { func (s *Site) render404() error {
if viper.GetBool("disable404") { if s.Cfg.GetBool("disable404") {
return nil return nil
} }
@ -185,11 +184,11 @@ func (s *Site) render404() error {
} }
func (s *Site) renderSitemap() error { func (s *Site) renderSitemap() error {
if viper.GetBool("disableSitemap") { if s.Cfg.GetBool("disableSitemap") {
return nil return nil
} }
sitemapDefault := parseSitemap(viper.GetStringMap("sitemap")) sitemapDefault := parseSitemap(s.Cfg.GetStringMap("sitemap"))
n := s.newNodePage(kindSitemap) n := s.newNodePage(kindSitemap)
@ -228,7 +227,7 @@ func (s *Site) renderSitemap() error {
} }
func (s *Site) renderRobotsTXT() error { func (s *Site) renderRobotsTXT() error {
if !viper.GetBool("enableRobotsTXT") { if !s.Cfg.GetBool("enableRobotsTXT") {
return nil return nil
} }
@ -265,9 +264,9 @@ func (s *Site) renderAliases() error {
} }
if s.owner.multilingual.enabled() { if s.owner.multilingual.enabled() {
mainLang := s.owner.multilingual.DefaultLang.Lang mainLang := s.owner.multilingual.DefaultLang
if s.Info.defaultContentLanguageInSubdir { if s.Info.defaultContentLanguageInSubdir {
mainLangURL := s.PathSpec.AbsURL(mainLang, false) mainLangURL := s.PathSpec.AbsURL(mainLang.Lang, false)
s.Log.DEBUG.Printf("Write redirect to main language %s: %s", mainLang, mainLangURL) s.Log.DEBUG.Printf("Write redirect to main language %s: %s", mainLang, mainLangURL)
if err := s.publishDestAlias(s.languageAliasTarget(), "/", mainLangURL, nil); err != nil { if err := s.publishDestAlias(s.languageAliasTarget(), "/", mainLangURL, nil); err != nil {
return err return err
@ -275,7 +274,7 @@ func (s *Site) renderAliases() error {
} else { } else {
mainLangURL := s.PathSpec.AbsURL("", false) mainLangURL := s.PathSpec.AbsURL("", false)
s.Log.DEBUG.Printf("Write redirect to main language %s: %s", mainLang, mainLangURL) s.Log.DEBUG.Printf("Write redirect to main language %s: %s", mainLang, mainLangURL)
if err := s.publishDestAlias(s.languageAliasTarget(), mainLang, mainLangURL, nil); err != nil { if err := s.publishDestAlias(s.languageAliasTarget(), mainLang.Lang, mainLangURL, nil); err != nil {
return err return err
} }
} }

View file

@ -27,7 +27,6 @@ import (
"github.com/spf13/hugo/deps" "github.com/spf13/hugo/deps"
"github.com/spf13/hugo/hugofs" "github.com/spf13/hugo/hugofs"
"github.com/spf13/viper"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
@ -54,12 +53,12 @@ func pageMust(p *Page, err error) *Page {
} }
func TestDegenerateRenderThingMissingTemplate(t *testing.T) { func TestDegenerateRenderThingMissingTemplate(t *testing.T) {
t.Parallel()
fs := hugofs.NewMem() cfg, fs := newTestCfg()
writeSource(t, fs, filepath.Join("content", "a", "file.md"), pageSimpleTitle) writeSource(t, fs, filepath.Join("content", "a", "file.md"), pageSimpleTitle)
s := buildSingleSite(t, deps.DepsCfg{Fs: fs}, BuildCfg{}) s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{})
require.Len(t, s.RegularPages, 1) require.Len(t, s.RegularPages, 1)
@ -72,18 +71,18 @@ func TestDegenerateRenderThingMissingTemplate(t *testing.T) {
} }
func TestRenderWithInvalidTemplate(t *testing.T) { func TestRenderWithInvalidTemplate(t *testing.T) {
t.Parallel()
fs := hugofs.NewMem() cfg, fs := newTestCfg()
writeSource(t, fs, filepath.Join("content", "foo.md"), "foo") writeSource(t, fs, filepath.Join("content", "foo.md"), "foo")
withTemplate := createWithTemplateFromNameValues("missing", templateMissingFunc) withTemplate := createWithTemplateFromNameValues("missing", templateMissingFunc)
s := buildSingleSite(t, deps.DepsCfg{Fs: fs, WithTemplate: withTemplate}, BuildCfg{}) s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg, WithTemplate: withTemplate}, BuildCfg{})
errCount := s.Log.LogCountForLevelsGreaterThanorEqualTo(jww.LevelError) errCount := s.Log.LogCountForLevelsGreaterThanorEqualTo(jww.LevelError)
// TODO(bep) globals clean up the template error handling // TODO(bep) clean up the template error handling
// The template errors are stored in a slice etc. so we get 4 log entries // The template errors are stored in a slice etc. so we get 4 log entries
// When we should get only 1 // When we should get only 1
if errCount == 0 { if errCount == 0 {
@ -92,8 +91,7 @@ func TestRenderWithInvalidTemplate(t *testing.T) {
} }
func TestDraftAndFutureRender(t *testing.T) { func TestDraftAndFutureRender(t *testing.T) {
testCommonResetState() t.Parallel()
sources := []source.ByteSource{ sources := []source.ByteSource{
{Name: filepath.FromSlash("sect/doc1.md"), Content: []byte("---\ntitle: doc1\ndraft: true\npublishdate: \"2414-05-29\"\n---\n# doc1\n*some content*")}, {Name: filepath.FromSlash("sect/doc1.md"), Content: []byte("---\ntitle: doc1\ndraft: true\npublishdate: \"2414-05-29\"\n---\n# doc1\n*some content*")},
{Name: filepath.FromSlash("sect/doc2.md"), Content: []byte("---\ntitle: doc2\ndraft: true\npublishdate: \"2012-05-29\"\n---\n# doc2\n*some content*")}, {Name: filepath.FromSlash("sect/doc2.md"), Content: []byte("---\ntitle: doc2\ndraft: true\npublishdate: \"2012-05-29\"\n---\n# doc2\n*some content*")},
@ -101,19 +99,23 @@ func TestDraftAndFutureRender(t *testing.T) {
{Name: filepath.FromSlash("sect/doc4.md"), Content: []byte("---\ntitle: doc4\ndraft: false\npublishdate: \"2012-05-29\"\n---\n# doc4\n*some content*")}, {Name: filepath.FromSlash("sect/doc4.md"), Content: []byte("---\ntitle: doc4\ndraft: false\npublishdate: \"2012-05-29\"\n---\n# doc4\n*some content*")},
} }
siteSetup := func(t *testing.T) *Site { siteSetup := func(t *testing.T, configKeyValues ...interface{}) *Site {
fs := hugofs.NewMem() cfg, fs := newTestCfg()
cfg.Set("baseURL", "http://auth/bub")
for i := 0; i < len(configKeyValues); i += 2 {
cfg.Set(configKeyValues[i].(string), configKeyValues[i+1])
}
for _, src := range sources { for _, src := range sources {
writeSource(t, fs, filepath.Join("content", src.Name), string(src.Content)) writeSource(t, fs, filepath.Join("content", src.Name), string(src.Content))
} }
return buildSingleSite(t, deps.DepsCfg{Fs: fs}, BuildCfg{}) return buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{})
} }
viper.Set("baseURL", "http://auth/bub")
// Testing Defaults.. Only draft:true and publishDate in the past should be rendered // Testing Defaults.. Only draft:true and publishDate in the past should be rendered
s := siteSetup(t) s := siteSetup(t)
if len(s.RegularPages) != 1 { if len(s.RegularPages) != 1 {
@ -121,54 +123,50 @@ func TestDraftAndFutureRender(t *testing.T) {
} }
// only publishDate in the past should be rendered // only publishDate in the past should be rendered
viper.Set("buildDrafts", true) s = siteSetup(t, "buildDrafts", true)
s = siteSetup(t)
if len(s.RegularPages) != 2 { if len(s.RegularPages) != 2 {
t.Fatal("Future Dated Posts published unexpectedly") t.Fatal("Future Dated Posts published unexpectedly")
} }
// drafts should not be rendered, but all dates should // drafts should not be rendered, but all dates should
viper.Set("buildDrafts", false) s = siteSetup(t,
viper.Set("buildFuture", true) "buildDrafts", false,
s = siteSetup(t) "buildFuture", true)
if len(s.RegularPages) != 2 { if len(s.RegularPages) != 2 {
t.Fatal("Draft posts published unexpectedly") t.Fatal("Draft posts published unexpectedly")
} }
// all 4 should be included // all 4 should be included
viper.Set("buildDrafts", true) s = siteSetup(t,
viper.Set("buildFuture", true) "buildDrafts", true,
s = siteSetup(t) "buildFuture", true)
if len(s.RegularPages) != 4 { if len(s.RegularPages) != 4 {
t.Fatal("Drafts or Future posts not included as expected") t.Fatal("Drafts or Future posts not included as expected")
} }
//setting defaults back
viper.Set("buildDrafts", false)
viper.Set("buildFuture", false)
} }
func TestFutureExpirationRender(t *testing.T) { func TestFutureExpirationRender(t *testing.T) {
testCommonResetState() t.Parallel()
sources := []source.ByteSource{ sources := []source.ByteSource{
{Name: filepath.FromSlash("sect/doc3.md"), Content: []byte("---\ntitle: doc1\nexpirydate: \"2400-05-29\"\n---\n# doc1\n*some content*")}, {Name: filepath.FromSlash("sect/doc3.md"), Content: []byte("---\ntitle: doc1\nexpirydate: \"2400-05-29\"\n---\n# doc1\n*some content*")},
{Name: filepath.FromSlash("sect/doc4.md"), Content: []byte("---\ntitle: doc2\nexpirydate: \"2000-05-29\"\n---\n# doc2\n*some content*")}, {Name: filepath.FromSlash("sect/doc4.md"), Content: []byte("---\ntitle: doc2\nexpirydate: \"2000-05-29\"\n---\n# doc2\n*some content*")},
} }
siteSetup := func(t *testing.T) *Site { siteSetup := func(t *testing.T) *Site {
fs := hugofs.NewMem() cfg, fs := newTestCfg()
cfg.Set("baseURL", "http://auth/bub")
for _, src := range sources { for _, src := range sources {
writeSource(t, fs, filepath.Join("content", src.Name), string(src.Content)) writeSource(t, fs, filepath.Join("content", src.Name), string(src.Content))
} }
return buildSingleSite(t, deps.DepsCfg{Fs: fs}, BuildCfg{}) return buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{})
} }
viper.Set("baseURL", "http://auth/bub")
s := siteSetup(t) s := siteSetup(t)
if len(s.AllPages) != 1 { if len(s.AllPages) != 1 {
@ -188,6 +186,7 @@ func TestFutureExpirationRender(t *testing.T) {
// Issue #957 // Issue #957
func TestCrossrefs(t *testing.T) { func TestCrossrefs(t *testing.T) {
t.Parallel()
for _, uglyURLs := range []bool{true, false} { for _, uglyURLs := range []bool{true, false} {
for _, relative := range []bool{true, false} { for _, relative := range []bool{true, false} {
doTestCrossrefs(t, relative, uglyURLs) doTestCrossrefs(t, relative, uglyURLs)
@ -196,13 +195,8 @@ func TestCrossrefs(t *testing.T) {
} }
func doTestCrossrefs(t *testing.T, relative, uglyURLs bool) { func doTestCrossrefs(t *testing.T, relative, uglyURLs bool) {
testCommonResetState()
baseURL := "http://foo/bar" baseURL := "http://foo/bar"
viper.Set("defaultExtension", "html")
viper.Set("baseURL", baseURL)
viper.Set("uglyURLs", uglyURLs)
viper.Set("verbose", true)
var refShortcode string var refShortcode string
var expectedBase string var expectedBase string
@ -246,7 +240,12 @@ THE END.`, refShortcode)),
}, },
} }
fs := hugofs.NewMem() cfg, fs := newTestCfg()
cfg.Set("defaultExtension", "html")
cfg.Set("baseURL", baseURL)
cfg.Set("uglyURLs", uglyURLs)
cfg.Set("verbose", true)
for _, src := range sources { for _, src := range sources {
writeSource(t, fs, filepath.Join("content", src.Name), string(src.Content)) writeSource(t, fs, filepath.Join("content", src.Name), string(src.Content))
@ -256,6 +255,7 @@ THE END.`, refShortcode)),
t, t,
deps.DepsCfg{ deps.DepsCfg{
Fs: fs, Fs: fs,
Cfg: cfg,
WithTemplate: createWithTemplateFromNameValues("_default/single.html", "{{.Content}}")}, WithTemplate: createWithTemplateFromNameValues("_default/single.html", "{{.Content}}")},
BuildCfg{}) BuildCfg{})
@ -263,6 +263,8 @@ THE END.`, refShortcode)),
t.Fatalf("Expected 3 got %d pages", len(s.AllPages)) t.Fatalf("Expected 3 got %d pages", len(s.AllPages))
} }
th := testHelper{s.Cfg}
tests := []struct { tests := []struct {
doc string doc string
expected string expected string
@ -273,7 +275,7 @@ THE END.`, refShortcode)),
} }
for _, test := range tests { for _, test := range tests {
assertFileContent(t, fs, test.doc, true, test.expected) th.assertFileContent(t, fs, test.doc, true, test.expected)
} }
@ -282,33 +284,33 @@ THE END.`, refShortcode)),
// Issue #939 // Issue #939
// Issue #1923 // Issue #1923
func TestShouldAlwaysHaveUglyURLs(t *testing.T) { func TestShouldAlwaysHaveUglyURLs(t *testing.T) {
t.Parallel()
for _, uglyURLs := range []bool{true, false} { for _, uglyURLs := range []bool{true, false} {
doTestShouldAlwaysHaveUglyURLs(t, uglyURLs) doTestShouldAlwaysHaveUglyURLs(t, uglyURLs)
} }
} }
func doTestShouldAlwaysHaveUglyURLs(t *testing.T, uglyURLs bool) { func doTestShouldAlwaysHaveUglyURLs(t *testing.T, uglyURLs bool) {
testCommonResetState()
viper.Set("defaultExtension", "html") cfg, fs := newTestCfg()
viper.Set("verbose", true)
viper.Set("baseURL", "http://auth/bub") cfg.Set("defaultExtension", "html")
viper.Set("disableSitemap", false) cfg.Set("verbose", true)
viper.Set("disableRSS", false) cfg.Set("baseURL", "http://auth/bub")
viper.Set("rssURI", "index.xml") cfg.Set("disableSitemap", false)
viper.Set("blackfriday", cfg.Set("disableRSS", false)
cfg.Set("rssURI", "index.xml")
cfg.Set("blackfriday",
map[string]interface{}{ map[string]interface{}{
"plainIDAnchors": true}) "plainIDAnchors": true})
viper.Set("uglyURLs", uglyURLs) cfg.Set("uglyURLs", uglyURLs)
sources := []source.ByteSource{ sources := []source.ByteSource{
{Name: filepath.FromSlash("sect/doc1.md"), Content: []byte("---\nmarkup: markdown\n---\n# title\nsome *content*")}, {Name: filepath.FromSlash("sect/doc1.md"), Content: []byte("---\nmarkup: markdown\n---\n# title\nsome *content*")},
{Name: filepath.FromSlash("sect/doc2.md"), Content: []byte("---\nurl: /ugly.html\nmarkup: markdown\n---\n# title\ndoc2 *content*")}, {Name: filepath.FromSlash("sect/doc2.md"), Content: []byte("---\nurl: /ugly.html\nmarkup: markdown\n---\n# title\ndoc2 *content*")},
} }
fs := hugofs.NewMem()
for _, src := range sources { for _, src := range sources {
writeSource(t, fs, filepath.Join("content", src.Name), string(src.Content)) writeSource(t, fs, filepath.Join("content", src.Name), string(src.Content))
} }
@ -319,7 +321,7 @@ func doTestShouldAlwaysHaveUglyURLs(t *testing.T, uglyURLs bool) {
writeSource(t, fs, filepath.Join("layouts", "rss.xml"), "<root>RSS</root>") writeSource(t, fs, filepath.Join("layouts", "rss.xml"), "<root>RSS</root>")
writeSource(t, fs, filepath.Join("layouts", "sitemap.xml"), "<root>SITEMAP</root>") writeSource(t, fs, filepath.Join("layouts", "sitemap.xml"), "<root>SITEMAP</root>")
s := buildSingleSite(t, deps.DepsCfg{Fs: fs}, BuildCfg{}) s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{})
var expectedPagePath string var expectedPagePath string
if uglyURLs { if uglyURLs {
@ -356,6 +358,7 @@ func doTestShouldAlwaysHaveUglyURLs(t *testing.T, uglyURLs bool) {
} }
func TestNewSiteDefaultLang(t *testing.T) { func TestNewSiteDefaultLang(t *testing.T) {
t.Parallel()
s, err := NewSiteDefaultLang() s, err := NewSiteDefaultLang()
require.NoError(t, err) require.NoError(t, err)
require.Equal(t, hugofs.Os, s.Fs.Source) require.Equal(t, hugofs.Os, s.Fs.Source)
@ -364,7 +367,7 @@ func TestNewSiteDefaultLang(t *testing.T) {
// Issue #1176 // Issue #1176
func TestSectionNaming(t *testing.T) { func TestSectionNaming(t *testing.T) {
t.Parallel()
for _, canonify := range []bool{true, false} { for _, canonify := range []bool{true, false} {
for _, uglify := range []bool{true, false} { for _, uglify := range []bool{true, false} {
for _, pluralize := range []bool{true, false} { for _, pluralize := range []bool{true, false} {
@ -375,13 +378,6 @@ func TestSectionNaming(t *testing.T) {
} }
func doTestSectionNaming(t *testing.T, canonify, uglify, pluralize bool) { func doTestSectionNaming(t *testing.T, canonify, uglify, pluralize bool) {
testCommonResetState()
viper.Set("baseURL", "http://auth/sub/")
viper.Set("defaultExtension", "html")
viper.Set("uglyURLs", uglify)
viper.Set("pluralizeListTitles", pluralize)
viper.Set("canonifyURLs", canonify)
var expectedPathSuffix string var expectedPathSuffix string
@ -397,7 +393,13 @@ func doTestSectionNaming(t *testing.T, canonify, uglify, pluralize bool) {
{Name: filepath.FromSlash("ラーメン/doc3.html"), Content: []byte("doc3")}, {Name: filepath.FromSlash("ラーメン/doc3.html"), Content: []byte("doc3")},
} }
fs := hugofs.NewMem() cfg, fs := newTestCfg()
cfg.Set("baseURL", "http://auth/sub/")
cfg.Set("defaultExtension", "html")
cfg.Set("uglyURLs", uglify)
cfg.Set("pluralizeListTitles", pluralize)
cfg.Set("canonifyURLs", canonify)
for _, source := range sources { for _, source := range sources {
writeSource(t, fs, filepath.Join("content", source.Name), string(source.Content)) writeSource(t, fs, filepath.Join("content", source.Name), string(source.Content))
@ -406,8 +408,8 @@ func doTestSectionNaming(t *testing.T, canonify, uglify, pluralize bool) {
writeSource(t, fs, filepath.Join("layouts", "_default/single.html"), "{{.Content}}") writeSource(t, fs, filepath.Join("layouts", "_default/single.html"), "{{.Content}}")
writeSource(t, fs, filepath.Join("layouts", "_default/list.html"), "{{.Title}}") writeSource(t, fs, filepath.Join("layouts", "_default/list.html"), "{{.Title}}")
buildSingleSite(t, deps.DepsCfg{Fs: fs}, BuildCfg{}) s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{})
th := testHelper{s.Cfg}
tests := []struct { tests := []struct {
doc string doc string
pluralAware bool pluralAware bool
@ -427,13 +429,12 @@ func doTestSectionNaming(t *testing.T, canonify, uglify, pluralize bool) {
test.expected = inflect.Pluralize(test.expected) test.expected = inflect.Pluralize(test.expected)
} }
assertFileContent(t, fs, filepath.Join("public", test.doc), true, test.expected) th.assertFileContent(t, fs, filepath.Join("public", test.doc), true, test.expected)
} }
} }
func TestSkipRender(t *testing.T) { func TestSkipRender(t *testing.T) {
testCommonResetState() t.Parallel()
sources := []source.ByteSource{ sources := []source.ByteSource{
{Name: filepath.FromSlash("sect/doc1.html"), Content: []byte("---\nmarkup: markdown\n---\n# title\nsome *content*")}, {Name: filepath.FromSlash("sect/doc1.html"), Content: []byte("---\nmarkup: markdown\n---\n# title\nsome *content*")},
{Name: filepath.FromSlash("sect/doc2.html"), Content: []byte("<!doctype html><html><body>more content</body></html>")}, {Name: filepath.FromSlash("sect/doc2.html"), Content: []byte("<!doctype html><html><body>more content</body></html>")},
@ -447,13 +448,13 @@ func TestSkipRender(t *testing.T) {
{Name: filepath.FromSlash("doc9.html"), Content: []byte("<html><body>doc9: {{< myshortcode >}}</body></html>")}, {Name: filepath.FromSlash("doc9.html"), Content: []byte("<html><body>doc9: {{< myshortcode >}}</body></html>")},
} }
viper.Set("defaultExtension", "html") cfg, fs := newTestCfg()
viper.Set("verbose", true)
viper.Set("canonifyURLs", true)
viper.Set("uglyURLs", true)
viper.Set("baseURL", "http://auth/bub")
fs := hugofs.NewMem() cfg.Set("defaultExtension", "html")
cfg.Set("verbose", true)
cfg.Set("canonifyURLs", true)
cfg.Set("uglyURLs", true)
cfg.Set("baseURL", "http://auth/bub")
for _, src := range sources { for _, src := range sources {
writeSource(t, fs, filepath.Join("content", src.Name), string(src.Content)) writeSource(t, fs, filepath.Join("content", src.Name), string(src.Content))
@ -465,7 +466,7 @@ func TestSkipRender(t *testing.T) {
writeSource(t, fs, filepath.Join("layouts", "head_abs"), "<head><script src=\"/script.js\"></script></head>") writeSource(t, fs, filepath.Join("layouts", "head_abs"), "<head><script src=\"/script.js\"></script></head>")
writeSource(t, fs, filepath.Join("layouts", "shortcodes", "myshortcode.html"), "SHORT") writeSource(t, fs, filepath.Join("layouts", "shortcodes", "myshortcode.html"), "SHORT")
buildSingleSite(t, deps.DepsCfg{Fs: fs}, BuildCfg{}) buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{})
tests := []struct { tests := []struct {
doc string doc string
@ -497,21 +498,20 @@ func TestSkipRender(t *testing.T) {
} }
func TestAbsURLify(t *testing.T) { func TestAbsURLify(t *testing.T) {
testCommonResetState() t.Parallel()
viper.Set("defaultExtension", "html")
viper.Set("uglyURLs", true)
sources := []source.ByteSource{ sources := []source.ByteSource{
{Name: filepath.FromSlash("sect/doc1.html"), Content: []byte("<!doctype html><html><head></head><body><a href=\"#frag1\">link</a></body></html>")}, {Name: filepath.FromSlash("sect/doc1.html"), Content: []byte("<!doctype html><html><head></head><body><a href=\"#frag1\">link</a></body></html>")},
{Name: filepath.FromSlash("blue/doc2.html"), Content: []byte("---\nf: t\n---\n<!doctype html><html><body>more content</body></html>")}, {Name: filepath.FromSlash("blue/doc2.html"), Content: []byte("---\nf: t\n---\n<!doctype html><html><body>more content</body></html>")},
} }
for _, baseURL := range []string{"http://auth/bub", "http://base", "//base"} { for _, baseURL := range []string{"http://auth/bub", "http://base", "//base"} {
for _, canonify := range []bool{true, false} { for _, canonify := range []bool{true, false} {
viper.Set("canonifyURLs", canonify)
viper.Set("baseURL", baseURL)
fs := hugofs.NewMem() cfg, fs := newTestCfg()
cfg.Set("defaultExtension", "html")
cfg.Set("uglyURLs", true)
cfg.Set("canonifyURLs", canonify)
cfg.Set("baseURL", baseURL)
for _, src := range sources { for _, src := range sources {
writeSource(t, fs, filepath.Join("content", src.Name), string(src.Content)) writeSource(t, fs, filepath.Join("content", src.Name), string(src.Content))
@ -520,7 +520,8 @@ func TestAbsURLify(t *testing.T) {
writeSource(t, fs, filepath.Join("layouts", "blue/single.html"), templateWithURLAbs) writeSource(t, fs, filepath.Join("layouts", "blue/single.html"), templateWithURLAbs)
buildSingleSite(t, deps.DepsCfg{Fs: fs}, BuildCfg{}) s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{})
th := testHelper{s.Cfg}
tests := []struct { tests := []struct {
file, expected string file, expected string
@ -541,7 +542,7 @@ func TestAbsURLify(t *testing.T) {
expected = strings.Replace(expected, baseURL, "", -1) expected = strings.Replace(expected, baseURL, "", -1)
} }
assertFileContent(t, fs, test.file, true, expected) th.assertFileContent(t, fs, test.file, true, expected)
} }
} }
@ -594,18 +595,16 @@ var weightedSources = []source.ByteSource{
} }
func TestOrderedPages(t *testing.T) { func TestOrderedPages(t *testing.T) {
testCommonResetState() t.Parallel()
cfg, fs := newTestCfg()
viper.Set("baseURL", "http://auth/bub") cfg.Set("baseURL", "http://auth/bub")
fs := hugofs.NewMem()
for _, src := range weightedSources { for _, src := range weightedSources {
writeSource(t, fs, filepath.Join("content", src.Name), string(src.Content)) writeSource(t, fs, filepath.Join("content", src.Name), string(src.Content))
} }
s := buildSingleSite(t, deps.DepsCfg{Fs: fs}, BuildCfg{SkipRender: true}) s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{SkipRender: true})
if s.Sections["sect"][0].Weight != 2 || s.Sections["sect"][3].Weight != 6 { if s.Sections["sect"][0].Weight != 2 || s.Sections["sect"][3].Weight != 6 {
t.Errorf("Pages in unexpected order. First should be '%d', got '%d'", 2, s.Sections["sect"][0].Weight) t.Errorf("Pages in unexpected order. First should be '%d', got '%d'", 2, s.Sections["sect"][0].Weight)
@ -656,19 +655,18 @@ var groupedSources = []source.ByteSource{
} }
func TestGroupedPages(t *testing.T) { func TestGroupedPages(t *testing.T) {
testCommonResetState() t.Parallel()
defer func() { defer func() {
if r := recover(); r != nil { if r := recover(); r != nil {
fmt.Println("Recovered in f", r) fmt.Println("Recovered in f", r)
} }
}() }()
viper.Set("baseURL", "http://auth/bub") cfg, fs := newTestCfg()
cfg.Set("baseURL", "http://auth/bub")
fs := hugofs.NewMem()
writeSourcesToSource(t, "content", fs, groupedSources...) writeSourcesToSource(t, "content", fs, groupedSources...)
s := buildSingleSite(t, deps.DepsCfg{Fs: fs}, BuildCfg{}) s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{})
rbysection, err := s.RegularPages.GroupBy("Section", "desc") rbysection, err := s.RegularPages.GroupBy("Section", "desc")
if err != nil { if err != nil {
@ -832,8 +830,7 @@ date = 2010-05-27T07:32:00Z
Front Matter with weighted tags and categories`) Front Matter with weighted tags and categories`)
func TestWeightedTaxonomies(t *testing.T) { func TestWeightedTaxonomies(t *testing.T) {
testCommonResetState() t.Parallel()
sources := []source.ByteSource{ sources := []source.ByteSource{
{Name: filepath.FromSlash("sect/doc1.md"), Content: pageWithWeightedTaxonomies2}, {Name: filepath.FromSlash("sect/doc1.md"), Content: pageWithWeightedTaxonomies2},
{Name: filepath.FromSlash("sect/doc2.md"), Content: pageWithWeightedTaxonomies1}, {Name: filepath.FromSlash("sect/doc2.md"), Content: pageWithWeightedTaxonomies1},
@ -844,12 +841,13 @@ func TestWeightedTaxonomies(t *testing.T) {
taxonomies["tag"] = "tags" taxonomies["tag"] = "tags"
taxonomies["category"] = "categories" taxonomies["category"] = "categories"
viper.Set("baseURL", "http://auth/bub") cfg, fs := newTestCfg()
viper.Set("taxonomies", taxonomies)
cfg.Set("baseURL", "http://auth/bub")
cfg.Set("taxonomies", taxonomies)
fs := hugofs.NewMem()
writeSourcesToSource(t, "content", fs, sources...) writeSourcesToSource(t, "content", fs, sources...)
s := buildSingleSite(t, deps.DepsCfg{Fs: fs}, BuildCfg{}) s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{})
if s.Taxonomies["tags"]["a"][0].Page.Title != "foo" { if s.Taxonomies["tags"]["a"][0].Page.Title != "foo" {
t.Errorf("Pages in unexpected order, 'foo' expected first, got '%v'", s.Taxonomies["tags"]["a"][0].Page.Title) t.Errorf("Pages in unexpected order, 'foo' expected first, got '%v'", s.Taxonomies["tags"]["a"][0].Page.Title)
@ -865,7 +863,8 @@ func TestWeightedTaxonomies(t *testing.T) {
} }
func findPage(site *Site, f string) *Page { func findPage(site *Site, f string) *Page {
currentPath := source.NewFile(filepath.FromSlash(f)) sp := source.NewSourceSpec(site.Cfg, site.Fs)
currentPath := sp.NewFile(filepath.FromSlash(f))
//t.Logf("looking for currentPath: %s", currentPath.Path()) //t.Logf("looking for currentPath: %s", currentPath.Path())
for _, page := range site.Pages { for _, page := range site.Pages {
@ -901,24 +900,24 @@ func setupLinkingMockSite(t *testing.T) *Site {
{Name: filepath.FromSlash("level2/level3/common.png"), Content: []byte("")}, {Name: filepath.FromSlash("level2/level3/common.png"), Content: []byte("")},
} }
viper.Set("baseURL", "http://auth/") cfg, fs := newTestCfg()
viper.Set("defaultExtension", "html")
viper.Set("uglyURLs", false) cfg.Set("baseURL", "http://auth/")
viper.Set("pluralizeListTitles", false) cfg.Set("defaultExtension", "html")
viper.Set("canonifyURLs", false) cfg.Set("uglyURLs", false)
viper.Set("blackfriday", cfg.Set("pluralizeListTitles", false)
cfg.Set("canonifyURLs", false)
cfg.Set("blackfriday",
map[string]interface{}{ map[string]interface{}{
"sourceRelativeLinksProjectFolder": "/docs"}) "sourceRelativeLinksProjectFolder": "/docs"})
fs := hugofs.NewMem()
writeSourcesToSource(t, "content", fs, sources...) writeSourcesToSource(t, "content", fs, sources...)
return buildSingleSite(t, deps.DepsCfg{Fs: fs}, BuildCfg{}) return buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{})
} }
func TestRefLinking(t *testing.T) { func TestRefLinking(t *testing.T) {
testCommonResetState() t.Parallel()
site := setupLinkingMockSite(t) site := setupLinkingMockSite(t)
currentPage := findPage(site, "level2/level3/index.md") currentPage := findPage(site, "level2/level3/index.md")
@ -941,8 +940,7 @@ func TestRefLinking(t *testing.T) {
} }
func TestSourceRelativeLinksing(t *testing.T) { func TestSourceRelativeLinksing(t *testing.T) {
testCommonResetState() t.Parallel()
site := setupLinkingMockSite(t) site := setupLinkingMockSite(t)
type resultMap map[string]string type resultMap map[string]string
@ -1077,8 +1075,7 @@ func TestSourceRelativeLinksing(t *testing.T) {
} }
func TestSourceRelativeLinkFileing(t *testing.T) { func TestSourceRelativeLinkFileing(t *testing.T) {
testCommonResetState() t.Parallel()
site := setupLinkingMockSite(t) site := setupLinkingMockSite(t)
type resultMap map[string]string type resultMap map[string]string

View file

@ -20,9 +20,7 @@ import (
"html/template" "html/template"
"github.com/spf13/hugo/deps" "github.com/spf13/hugo/deps"
"github.com/spf13/hugo/hugofs"
"github.com/spf13/hugo/source" "github.com/spf13/hugo/source"
"github.com/spf13/viper"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
@ -35,14 +33,6 @@ slug: slug-doc-2
slug doc 2 content slug doc 2 content
` `
const indexTemplate = "{{ range .Data.Pages }}.{{ end }}"
func must(err error) {
if err != nil {
panic(err)
}
}
var urlFakeSource = []source.ByteSource{ var urlFakeSource = []source.ByteSource{
{Name: filepath.FromSlash("content/blue/doc1.md"), Content: []byte(slugDoc1)}, {Name: filepath.FromSlash("content/blue/doc1.md"), Content: []byte(slugDoc1)},
{Name: filepath.FromSlash("content/blue/doc2.md"), Content: []byte(slugDoc2)}, {Name: filepath.FromSlash("content/blue/doc2.md"), Content: []byte(slugDoc2)},
@ -50,8 +40,7 @@ var urlFakeSource = []source.ByteSource{
// Issue #1105 // Issue #1105
func TestShouldNotAddTrailingSlashToBaseURL(t *testing.T) { func TestShouldNotAddTrailingSlashToBaseURL(t *testing.T) {
testCommonResetState() t.Parallel()
for i, this := range []struct { for i, this := range []struct {
in string in string
expected string expected string
@ -61,8 +50,10 @@ func TestShouldNotAddTrailingSlashToBaseURL(t *testing.T) {
{"http://base.com/sub", "http://base.com/sub"}, {"http://base.com/sub", "http://base.com/sub"},
{"http://base.com", "http://base.com"}} { {"http://base.com", "http://base.com"}} {
viper.Set("baseURL", this.in) cfg, fs := newTestCfg()
s, err := NewSiteDefaultLang() cfg.Set("baseURL", this.in)
d := deps.DepsCfg{Cfg: cfg, Fs: fs}
s, err := NewSiteForCfg(d)
require.NoError(t, err) require.NoError(t, err)
s.initializeSiteInfo() s.initializeSiteInfo()
@ -70,18 +61,16 @@ func TestShouldNotAddTrailingSlashToBaseURL(t *testing.T) {
t.Errorf("[%d] got %s expected %s", i, s.Info.BaseURL, this.expected) t.Errorf("[%d] got %s expected %s", i, s.Info.BaseURL, this.expected)
} }
} }
} }
func TestPageCount(t *testing.T) { func TestPageCount(t *testing.T) {
testCommonResetState() t.Parallel()
cfg, fs := newTestCfg()
cfg.Set("uglyURLs", false)
cfg.Set("paginate", 10)
viper.Set("uglyURLs", false)
viper.Set("paginate", 10)
fs := hugofs.NewMem()
writeSourcesToSource(t, "content", fs, urlFakeSource...) writeSourcesToSource(t, "content", fs, urlFakeSource...)
s := buildSingleSite(t, deps.DepsCfg{Fs: fs}, BuildCfg{}) s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{})
_, err := s.Fs.Destination.Open("public/blue") _, err := s.Fs.Destination.Open("public/blue")
if err != nil { if err != nil {

View file

@ -19,9 +19,7 @@ import (
"reflect" "reflect"
"github.com/spf13/hugo/deps" "github.com/spf13/hugo/deps"
"github.com/spf13/hugo/hugofs"
"github.com/spf13/hugo/tplapi" "github.com/spf13/hugo/tplapi"
"github.com/spf13/viper"
) )
const sitemapTemplate = `<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9"> const sitemapTemplate = `<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
@ -36,19 +34,18 @@ const sitemapTemplate = `<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/
</urlset>` </urlset>`
func TestSitemapOutput(t *testing.T) { func TestSitemapOutput(t *testing.T) {
t.Parallel()
for _, internal := range []bool{false, true} { for _, internal := range []bool{false, true} {
doTestSitemapOutput(t, internal) doTestSitemapOutput(t, internal)
} }
} }
func doTestSitemapOutput(t *testing.T, internal bool) { func doTestSitemapOutput(t *testing.T, internal bool) {
testCommonResetState()
viper.Set("baseURL", "http://auth/bub/") cfg, fs := newTestCfg()
cfg.Set("baseURL", "http://auth/bub/")
fs := hugofs.NewMem() depsCfg := deps.DepsCfg{Fs: fs, Cfg: cfg}
depsCfg := deps.DepsCfg{Fs: fs}
if !internal { if !internal {
depsCfg.WithTemplate = func(templ tplapi.Template) error { depsCfg.WithTemplate = func(templ tplapi.Template) error {
@ -59,8 +56,9 @@ func doTestSitemapOutput(t *testing.T, internal bool) {
writeSourcesToSource(t, "content", fs, weightedSources...) writeSourcesToSource(t, "content", fs, weightedSources...)
s := buildSingleSite(t, depsCfg, BuildCfg{}) s := buildSingleSite(t, depsCfg, BuildCfg{})
th := testHelper{s.Cfg}
assertFileContent(t, s.Fs, "public/sitemap.xml", true, th.assertFileContent(t, s.Fs, "public/sitemap.xml", true,
// Regular page // Regular page
" <loc>http://auth/bub/sect/doc1/</loc>", " <loc>http://auth/bub/sect/doc1/</loc>",
// Home page // Home page
@ -76,6 +74,7 @@ func doTestSitemapOutput(t *testing.T, internal bool) {
} }
func TestParseSitemap(t *testing.T) { func TestParseSitemap(t *testing.T) {
t.Parallel()
expected := Sitemap{Priority: 3.0, Filename: "doo.xml", ChangeFreq: "3"} expected := Sitemap{Priority: 3.0, Filename: "doo.xml", ChangeFreq: "3"}
input := map[string]interface{}{ input := map[string]interface{}{
"changefreq": "3", "changefreq": "3",

View file

@ -19,26 +19,22 @@ import (
"testing" "testing"
"github.com/spf13/hugo/deps" "github.com/spf13/hugo/deps"
"github.com/spf13/hugo/hugofs"
"github.com/spf13/viper"
) )
func TestByCountOrderOfTaxonomies(t *testing.T) { func TestByCountOrderOfTaxonomies(t *testing.T) {
defer testCommonResetState() t.Parallel()
taxonomies := make(map[string]string) taxonomies := make(map[string]string)
taxonomies["tag"] = "tags" taxonomies["tag"] = "tags"
taxonomies["category"] = "categories" taxonomies["category"] = "categories"
viper.Set("taxonomies", taxonomies) cfg, fs := newTestCfg()
fs := hugofs.NewMem() cfg.Set("taxonomies", taxonomies)
writeSource(t, fs, filepath.Join("content", "page.md"), pageYamlWithTaxonomiesA) writeSource(t, fs, filepath.Join("content", "page.md"), pageYamlWithTaxonomiesA)
s := buildSingleSite(t, deps.DepsCfg{Fs: fs}, BuildCfg{}) s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{})
st := make([]string, 0) st := make([]string, 0)
for _, t := range s.Taxonomies["tags"].ByCount() { for _, t := range s.Taxonomies["tags"].ByCount() {

View file

@ -20,13 +20,11 @@ import (
"strings" "strings"
"github.com/spf13/viper"
"github.com/spf13/hugo/deps" "github.com/spf13/hugo/deps"
"github.com/spf13/hugo/hugofs"
) )
func TestAllTemplateEngines(t *testing.T) { func TestAllTemplateEngines(t *testing.T) {
t.Parallel()
noOp := func(s string) string { noOp := func(s string) string {
return s return s
} }
@ -57,10 +55,7 @@ func TestAllTemplateEngines(t *testing.T) {
func doTestTemplateEngine(t *testing.T, suffix string, templateFixer func(s string) string) { func doTestTemplateEngine(t *testing.T, suffix string, templateFixer func(s string) string) {
testCommonResetState() cfg, fs := newTestCfg()
fs := hugofs.NewMem()
viper.SetFs(fs.Source)
writeSource(t, fs, filepath.Join("content", "p.md"), ` writeSource(t, fs, filepath.Join("content", "p.md"), `
--- ---
@ -88,9 +83,10 @@ p
writeSource(t, fs, filepath.Join("layouts", "_default", fmt.Sprintf("single.%s", suffix)), templ) writeSource(t, fs, filepath.Join("layouts", "_default", fmt.Sprintf("single.%s", suffix)), templ)
buildSingleSite(t, deps.DepsCfg{Fs: fs}, BuildCfg{}) s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{})
th := testHelper{s.Cfg}
assertFileContent(t, fs, filepath.Join("public", "p", "index.html"), true, th.assertFileContent(t, fs, filepath.Join("public", "p", "index.html"), true,
"Page Title: My Title", "Page Title: My Title",
"My Content", "My Content",
"Hello World", "Hello World",

View file

@ -24,8 +24,12 @@ import (
) )
func TestBaseGoTemplate(t *testing.T) { func TestBaseGoTemplate(t *testing.T) {
t.Parallel()
var fs *hugofs.Fs var (
fs *hugofs.Fs
cfg *viper.Viper
th testHelper
)
// Variants: // Variants:
// 1. <current-path>/<template-name>-baseof.<suffix>, e.g. list-baseof.<suffix>. // 1. <current-path>/<template-name>-baseof.<suffix>, e.g. list-baseof.<suffix>.
@ -44,7 +48,7 @@ func TestBaseGoTemplate(t *testing.T) {
}, },
func(t *testing.T) { func(t *testing.T) {
assertFileContent(t, fs, filepath.Join("public", "sect", "index.html"), false, "Base: sect") th.assertFileContent(t, fs, filepath.Join("public", "sect", "index.html"), false, "Base: sect")
}, },
}, },
{ {
@ -55,7 +59,7 @@ func TestBaseGoTemplate(t *testing.T) {
}, },
func(t *testing.T) { func(t *testing.T) {
assertFileContent(t, fs, filepath.Join("public", "index.html"), false, "Base: index") th.assertFileContent(t, fs, filepath.Join("public", "index.html"), false, "Base: index")
}, },
}, },
{ {
@ -66,7 +70,7 @@ func TestBaseGoTemplate(t *testing.T) {
}, },
func(t *testing.T) { func(t *testing.T) {
assertFileContent(t, fs, filepath.Join("public", "sect", "index.html"), false, "Base: list") th.assertFileContent(t, fs, filepath.Join("public", "sect", "index.html"), false, "Base: list")
}, },
}, },
{ {
@ -77,64 +81,63 @@ func TestBaseGoTemplate(t *testing.T) {
}, },
func(t *testing.T) { func(t *testing.T) {
assertFileContent(t, fs, filepath.Join("public", "sect", "index.html"), false, "Base: list") th.assertFileContent(t, fs, filepath.Join("public", "sect", "index.html"), false, "Base: list")
}, },
}, },
{ {
// Variant 1, theme, use project's base // Variant 1, theme, use project's base
func(t *testing.T) { func(t *testing.T) {
viper.Set("theme", "mytheme") cfg.Set("theme", "mytheme")
writeSource(t, fs, filepath.Join("layouts", "section", "sect-baseof.html"), `Base: {{block "main" .}}block{{end}}`) writeSource(t, fs, filepath.Join("layouts", "section", "sect-baseof.html"), `Base: {{block "main" .}}block{{end}}`)
writeSource(t, fs, filepath.Join("themes", "mytheme", "layouts", "section", "sect-baseof.html"), `Base Theme: {{block "main" .}}block{{end}}`) writeSource(t, fs, filepath.Join("themes", "mytheme", "layouts", "section", "sect-baseof.html"), `Base Theme: {{block "main" .}}block{{end}}`)
writeSource(t, fs, filepath.Join("layouts", "section", "sect.html"), `{{define "main"}}sect{{ end }}`) writeSource(t, fs, filepath.Join("layouts", "section", "sect.html"), `{{define "main"}}sect{{ end }}`)
}, },
func(t *testing.T) { func(t *testing.T) {
assertFileContent(t, fs, filepath.Join("public", "sect", "index.html"), false, "Base: sect") th.assertFileContent(t, fs, filepath.Join("public", "sect", "index.html"), false, "Base: sect")
}, },
}, },
{ {
// Variant 1, theme, use theme's base // Variant 1, theme, use theme's base
func(t *testing.T) { func(t *testing.T) {
viper.Set("theme", "mytheme") cfg.Set("theme", "mytheme")
writeSource(t, fs, filepath.Join("themes", "mytheme", "layouts", "section", "sect-baseof.html"), `Base Theme: {{block "main" .}}block{{end}}`) writeSource(t, fs, filepath.Join("themes", "mytheme", "layouts", "section", "sect-baseof.html"), `Base Theme: {{block "main" .}}block{{end}}`)
writeSource(t, fs, filepath.Join("layouts", "section", "sect.html"), `{{define "main"}}sect{{ end }}`) writeSource(t, fs, filepath.Join("layouts", "section", "sect.html"), `{{define "main"}}sect{{ end }}`)
}, },
func(t *testing.T) { func(t *testing.T) {
assertFileContent(t, fs, filepath.Join("public", "sect", "index.html"), false, "Base Theme: sect") th.assertFileContent(t, fs, filepath.Join("public", "sect", "index.html"), false, "Base Theme: sect")
}, },
}, },
{ {
// Variant 4, theme, use project's base // Variant 4, theme, use project's base
func(t *testing.T) { func(t *testing.T) {
viper.Set("theme", "mytheme") cfg.Set("theme", "mytheme")
writeSource(t, fs, filepath.Join("layouts", "_default", "baseof.html"), `Base: {{block "main" .}}block{{end}}`) writeSource(t, fs, filepath.Join("layouts", "_default", "baseof.html"), `Base: {{block "main" .}}block{{end}}`)
writeSource(t, fs, filepath.Join("themes", "mytheme", "layouts", "_default", "baseof.html"), `Base Theme: {{block "main" .}}block{{end}}`) writeSource(t, fs, filepath.Join("themes", "mytheme", "layouts", "_default", "baseof.html"), `Base Theme: {{block "main" .}}block{{end}}`)
writeSource(t, fs, filepath.Join("themes", "mytheme", "layouts", "_default", "list.html"), `{{define "main"}}list{{ end }}`) writeSource(t, fs, filepath.Join("themes", "mytheme", "layouts", "_default", "list.html"), `{{define "main"}}list{{ end }}`)
}, },
func(t *testing.T) { func(t *testing.T) {
assertFileContent(t, fs, filepath.Join("public", "sect", "index.html"), false, "Base: list") th.assertFileContent(t, fs, filepath.Join("public", "sect", "index.html"), false, "Base: list")
}, },
}, },
{ {
// Variant 4, theme, use themes's base // Variant 4, theme, use themes's base
func(t *testing.T) { func(t *testing.T) {
viper.Set("theme", "mytheme") cfg.Set("theme", "mytheme")
writeSource(t, fs, filepath.Join("themes", "mytheme", "layouts", "_default", "baseof.html"), `Base Theme: {{block "main" .}}block{{end}}`) writeSource(t, fs, filepath.Join("themes", "mytheme", "layouts", "_default", "baseof.html"), `Base Theme: {{block "main" .}}block{{end}}`)
writeSource(t, fs, filepath.Join("themes", "mytheme", "layouts", "_default", "list.html"), `{{define "main"}}list{{ end }}`) writeSource(t, fs, filepath.Join("themes", "mytheme", "layouts", "_default", "list.html"), `{{define "main"}}list{{ end }}`)
}, },
func(t *testing.T) { func(t *testing.T) {
assertFileContent(t, fs, filepath.Join("public", "sect", "index.html"), false, "Base Theme: list") th.assertFileContent(t, fs, filepath.Join("public", "sect", "index.html"), false, "Base Theme: list")
}, },
}, },
} { } {
testCommonResetState() cfg, fs = newTestCfg()
th = testHelper{cfg}
fs = hugofs.NewMem()
writeSource(t, fs, filepath.Join("content", "sect", "page.md"), `--- writeSource(t, fs, filepath.Join("content", "sect", "page.md"), `---
title: Template test title: Template test
@ -143,7 +146,7 @@ Some content
`) `)
this.setup(t) this.setup(t)
buildSingleSite(t, deps.DepsCfg{Fs: fs}, BuildCfg{}) buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{})
this.assert(t) this.assert(t)

View file

@ -6,20 +6,64 @@ import (
"github.com/spf13/hugo/deps" "github.com/spf13/hugo/deps"
"github.com/spf13/hugo/helpers" "github.com/spf13/hugo/helpers"
"github.com/spf13/hugo/hugofs"
"github.com/spf13/hugo/source" "github.com/spf13/hugo/source"
"github.com/spf13/hugo/tplapi" "github.com/spf13/hugo/tplapi"
"github.com/spf13/viper" "github.com/spf13/viper"
"io/ioutil"
"os"
"log"
"github.com/spf13/hugo/hugofs"
jww "github.com/spf13/jwalterweatherman"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
func newTestDepsConfig() deps.DepsCfg { func newTestPathSpec(fs *hugofs.Fs, v *viper.Viper) *helpers.PathSpec {
return deps.DepsCfg{Fs: hugofs.NewMem()} l := helpers.NewDefaultLanguage(v)
return helpers.NewPathSpec(fs, l)
} }
func newTestPathSpec() *helpers.PathSpec { func newTestCfg() (*viper.Viper, *hugofs.Fs) {
return helpers.NewPathSpec(hugofs.NewMem(), viper.GetViper())
v := viper.New()
fs := hugofs.NewMem(v)
v.SetFs(fs.Source)
loadDefaultSettingsFor(v)
// Default is false, but true is easier to use as default in tests
v.Set("defaultContentLanguageInSubdir", true)
return v, fs
}
// newTestSite creates a new site in the English language with in-memory Fs.
// The site will have a template system loaded and ready to use.
// Note: This is only used in single site tests.
func newTestSite(t testing.TB, configKeyValues ...interface{}) *Site {
cfg, fs := newTestCfg()
for i := 0; i < len(configKeyValues); i += 2 {
cfg.Set(configKeyValues[i].(string), configKeyValues[i+1])
}
d := deps.DepsCfg{Language: helpers.NewLanguage("en", cfg), Fs: fs}
s, err := NewSiteForCfg(d)
if err != nil {
t.Fatalf("Failed to create Site: %s", err)
}
return s
}
func newDebugLogger() *jww.Notepad {
return jww.NewNotepad(jww.LevelDebug, jww.LevelError, os.Stdout, ioutil.Discard, "", log.Ldate|log.Ltime)
} }
func createWithTemplateFromNameValues(additionalTemplates ...string) func(templ tplapi.Template) error { func createWithTemplateFromNameValues(additionalTemplates ...string) func(templ tplapi.Template) error {
@ -36,11 +80,21 @@ func createWithTemplateFromNameValues(additionalTemplates ...string) func(templ
} }
func buildSingleSite(t *testing.T, depsCfg deps.DepsCfg, buildCfg BuildCfg) *Site { func buildSingleSite(t *testing.T, depsCfg deps.DepsCfg, buildCfg BuildCfg) *Site {
h, err := NewHugoSitesFromConfiguration(depsCfg) return buildSingleSiteExpected(t, false, depsCfg, buildCfg)
}
func buildSingleSiteExpected(t *testing.T, expectBuildError bool, depsCfg deps.DepsCfg, buildCfg BuildCfg) *Site {
h, err := NewHugoSites(depsCfg)
require.NoError(t, err) require.NoError(t, err)
require.Len(t, h.Sites, 1) require.Len(t, h.Sites, 1)
if expectBuildError {
require.Error(t, h.Build(buildCfg))
return nil
}
require.NoError(t, h.Build(buildCfg)) require.NoError(t, h.Build(buildCfg))
return h.Sites[0] return h.Sites[0]

View file

@ -1,4 +1,4 @@
// Copyright 2015 The Hugo Authors. All rights reserved. // Copyright 2017 The Hugo Authors. All rights reserved.
// //
// Licensed under the Apache License, Version 2.0 (the "License"); // Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. // you may not use this file except in compliance with the License.
@ -11,48 +11,52 @@
// See the License for the specific language governing permissions and // See the License for the specific language governing permissions and
// limitations under the License. // limitations under the License.
package tpl package i18n
import ( import (
"github.com/nicksnyder/go-i18n/i18n/bundle" "github.com/nicksnyder/go-i18n/i18n/bundle"
"github.com/spf13/hugo/config"
"github.com/spf13/hugo/helpers" "github.com/spf13/hugo/helpers"
jww "github.com/spf13/jwalterweatherman" jww "github.com/spf13/jwalterweatherman"
"github.com/spf13/viper"
) )
var ( var (
// Logi18nWarnings set to true to print warnings about missing language strings
Logi18nWarnings bool
i18nWarningLogger = helpers.NewDistinctFeedbackLogger() i18nWarningLogger = helpers.NewDistinctFeedbackLogger()
currentLanguage *helpers.Language
) )
type translate struct { // Translator handles i18n translations.
type Translator struct {
translateFuncs map[string]bundle.TranslateFunc translateFuncs map[string]bundle.TranslateFunc
cfg config.Provider
current bundle.TranslateFunc logger *jww.Notepad
} }
// TODO(bep) global translator // NewTranslator creates a new Translator for the given language bundle and configuration.
var translator *translate func NewTranslator(b *bundle.Bundle, cfg config.Provider, logger *jww.Notepad) Translator {
t := Translator{cfg: cfg, logger: logger, translateFuncs: make(map[string]bundle.TranslateFunc)}
t.initFuncs(b)
return t
}
// SetTranslateLang sets the translations language to use during template processing. // Func gets the translate func for the given language, or for the default
// This construction is unfortunate, but the template system is currently global. // configured language if not found.
func SetTranslateLang(language *helpers.Language) error { func (t Translator) Func(lang string) bundle.TranslateFunc {
currentLanguage = language if f, ok := t.translateFuncs[lang]; ok {
if f, ok := translator.translateFuncs[language.Lang]; ok { return f
translator.current = f
} else {
jww.WARN.Printf("Translation func for language %v not found, use default.", language.Lang)
translator.current = translator.translateFuncs[viper.GetString("defaultContentLanguage")]
} }
return nil t.logger.WARN.Printf("Translation func for language %v not found, use default.", lang)
if f, ok := t.translateFuncs[t.cfg.GetString("defaultContentLanguage")]; ok {
return f
}
t.logger.WARN.Println("i18n not initialized, check that you have language file (in i18n) that matches the site language or the default language.")
return func(translationID string, args ...interface{}) string {
return ""
}
} }
// SetI18nTfuncs sets the language bundle to be used for i18n. func (t Translator) initFuncs(bndl *bundle.Bundle) {
func SetI18nTfuncs(bndl *bundle.Bundle) { defaultContentLanguage := t.cfg.GetString("defaultContentLanguage")
translator = &translate{translateFuncs: make(map[string]bundle.TranslateFunc)}
defaultContentLanguage := viper.GetString("defaultContentLanguage")
var ( var (
defaultT bundle.TranslateFunc defaultT bundle.TranslateFunc
err error err error
@ -64,18 +68,18 @@ func SetI18nTfuncs(bndl *bundle.Bundle) {
jww.WARN.Printf("No translation bundle found for default language %q", defaultContentLanguage) jww.WARN.Printf("No translation bundle found for default language %q", defaultContentLanguage)
} }
enableMissingTranslationPlaceholders := viper.GetBool("enableMissingTranslationPlaceholders") enableMissingTranslationPlaceholders := t.cfg.GetBool("enableMissingTranslationPlaceholders")
for _, lang := range bndl.LanguageTags() { for _, lang := range bndl.LanguageTags() {
currentLang := lang currentLang := lang
translator.translateFuncs[currentLang] = func(translationID string, args ...interface{}) string { t.translateFuncs[currentLang] = func(translationID string, args ...interface{}) string {
tFunc, err := bndl.Tfunc(currentLang) tFunc, err := bndl.Tfunc(currentLang)
if err != nil { if err != nil {
jww.WARN.Printf("could not load translations for language %q (%s), will use default content language.\n", lang, err) jww.WARN.Printf("could not load translations for language %q (%s), will use default content language.\n", lang, err)
} else if translated := tFunc(translationID, args...); translated != translationID { } else if translated := tFunc(translationID, args...); translated != translationID {
return translated return translated
} }
if Logi18nWarnings { if t.cfg.GetBool("logI18nWarnings") {
i18nWarningLogger.Printf("i18n|MISSING_TRANSLATION|%s|%s", currentLang, translationID) i18nWarningLogger.Printf("i18n|MISSING_TRANSLATION|%s|%s", currentLang, translationID)
} }
if enableMissingTranslationPlaceholders { if enableMissingTranslationPlaceholders {
@ -90,11 +94,3 @@ func SetI18nTfuncs(bndl *bundle.Bundle) {
} }
} }
} }
func i18nTranslate(id string, args ...interface{}) (string, error) {
if translator == nil || translator.current == nil {
helpers.DistinctErrorLog.Printf("i18n not initialized, check that you have language file (in i18n) that matches the site language or the default language.")
return "", nil
}
return translator.current(id, args...), nil
}

View file

@ -1,4 +1,4 @@
// Copyright 2015 The Hugo Authors. All rights reserved. // Copyright 2017 The Hugo Authors. All rights reserved.
// //
// Licensed under the Apache License, Version 2.0 (the "License"); // Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. // you may not use this file except in compliance with the License.
@ -11,17 +11,25 @@
// See the License for the specific language governing permissions and // See the License for the specific language governing permissions and
// limitations under the License. // limitations under the License.
package tpl package i18n
import ( import (
"testing" "testing"
"io/ioutil"
"os"
"log"
"github.com/nicksnyder/go-i18n/i18n/bundle" "github.com/nicksnyder/go-i18n/i18n/bundle"
"github.com/spf13/hugo/helpers" "github.com/spf13/hugo/config"
jww "github.com/spf13/jwalterweatherman"
"github.com/spf13/viper" "github.com/spf13/viper"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/require"
) )
var logger = jww.NewNotepad(jww.LevelError, jww.LevelError, os.Stdout, ioutil.Discard, "", log.Ldate|log.Ltime)
type i18nTest struct { type i18nTest struct {
data map[string][]byte data map[string][]byte
args interface{} args interface{}
@ -106,7 +114,7 @@ var i18nTests = []i18nTest{
}, },
} }
func doTestI18nTranslate(t *testing.T, data map[string][]byte, lang, id string, args interface{}) string { func doTestI18nTranslate(t *testing.T, data map[string][]byte, lang, id string, args interface{}, cfg config.Provider) string {
i18nBundle := bundle.New() i18nBundle := bundle.New()
for file, content := range data { for file, content := range data {
@ -116,25 +124,23 @@ func doTestI18nTranslate(t *testing.T, data map[string][]byte, lang, id string,
} }
} }
SetI18nTfuncs(i18nBundle) translator := NewTranslator(i18nBundle, cfg, logger)
SetTranslateLang(helpers.NewLanguage(lang))
f := translator.Func(lang)
translated := f(id, args)
translated, err := i18nTranslate(id, args)
if err != nil {
t.Errorf("Error translating '%s': %s", id, err)
}
return translated return translated
} }
func TestI18nTranslate(t *testing.T) { func TestI18nTranslate(t *testing.T) {
var actual, expected string var actual, expected string
v := viper.New()
viper.SetDefault("defaultContentLanguage", "en") v.SetDefault("defaultContentLanguage", "en")
viper.Set("currentContentLanguage", helpers.NewLanguage("en"))
// Test without and with placeholders // Test without and with placeholders
for _, enablePlaceholders := range []bool{false, true} { for _, enablePlaceholders := range []bool{false, true} {
viper.Set("enableMissingTranslationPlaceholders", enablePlaceholders) v.Set("enableMissingTranslationPlaceholders", enablePlaceholders)
for _, test := range i18nTests { for _, test := range i18nTests {
if enablePlaceholders { if enablePlaceholders {
@ -142,8 +148,8 @@ func TestI18nTranslate(t *testing.T) {
} else { } else {
expected = test.expected expected = test.expected
} }
actual = doTestI18nTranslate(t, test.data, test.lang, test.id, test.args) actual = doTestI18nTranslate(t, test.data, test.lang, test.id, test.args, v)
assert.Equal(t, expected, actual) require.Equal(t, expected, actual)
} }
} }
} }

View file

@ -0,0 +1,73 @@
// Copyright 2017 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package i18n
import (
"fmt"
"github.com/nicksnyder/go-i18n/i18n/bundle"
"github.com/spf13/hugo/deps"
"github.com/spf13/hugo/source"
)
// TranslationProvider provides translation handling, i.e. loading
// of bundles etc.
type TranslationProvider struct {
t Translator
}
// NewTranslationProvider creates a new translation provider.
func NewTranslationProvider() *TranslationProvider {
return &TranslationProvider{}
}
// Update updates the i18n func in the provided Deps.
func (tp *TranslationProvider) Update(d *deps.Deps) error {
dir := d.PathSpec.AbsPathify(d.Cfg.GetString("i18nDir"))
sp := source.NewSourceSpec(d.Cfg, d.Fs)
sources := []source.Input{sp.NewFilesystem(dir)}
themeI18nDir, err := d.PathSpec.GetThemeI18nDirPath()
if err == nil {
sources = []source.Input{sp.NewFilesystem(themeI18nDir), sources[0]}
}
d.Log.DEBUG.Printf("Load I18n from %q", sources)
i18nBundle := bundle.New()
for _, currentSource := range sources {
for _, r := range currentSource.Files() {
err := i18nBundle.ParseTranslationFileBytes(r.LogicalName(), r.Bytes())
if err != nil {
return fmt.Errorf("Failed to load translations in file %q: %s", r.LogicalName(), err)
}
}
}
tp.t = NewTranslator(i18nBundle, d.Cfg, d.Log)
d.Translate = tp.t.Func(d.Language.Lang)
return nil
}
// Clone sets the language func for the new language.
func (tp *TranslationProvider) Clone(d *deps.Deps) error {
d.Translate = tp.t.Func(d.Language.Lang)
return nil
}

View file

@ -14,13 +14,13 @@
package source package source
import ( import (
"github.com/spf13/viper"
"testing" "testing"
"github.com/spf13/hugo/hugofs"
"github.com/spf13/viper"
) )
func TestIgnoreDotFilesAndDirectories(t *testing.T) { func TestIgnoreDotFilesAndDirectories(t *testing.T) {
viper.Reset()
defer viper.Reset()
tests := []struct { tests := []struct {
path string path string
@ -49,9 +49,12 @@ func TestIgnoreDotFilesAndDirectories(t *testing.T) {
for _, test := range tests { for _, test := range tests {
viper.Set("ignoreFiles", test.ignoreFilesRegexpes) v := viper.New()
v.Set("ignoreFiles", test.ignoreFilesRegexpes)
if ignored := isNonProcessablePath(test.path); test.ignore != ignored { s := NewSourceSpec(v, hugofs.NewMem(v))
if ignored := s.isNonProcessablePath(test.path); test.ignore != ignored {
t.Errorf("File not ignored. Expected: %t, got: %t", test.ignore, ignored) t.Errorf("File not ignored. Expected: %t, got: %t", test.ignore, ignored)
} }
} }

View file

@ -18,10 +18,21 @@ import (
"path/filepath" "path/filepath"
"strings" "strings"
"github.com/spf13/hugo/hugofs"
"github.com/spf13/hugo/config"
"github.com/spf13/hugo/helpers" "github.com/spf13/hugo/helpers"
"github.com/spf13/viper"
) )
type SourceSpec struct {
Cfg config.Provider
Fs *hugofs.Fs
}
func NewSourceSpec(cfg config.Provider, fs *hugofs.Fs) SourceSpec {
return SourceSpec{Cfg: cfg, Fs: fs}
}
// File represents a source content file. // File represents a source content file.
// All paths are relative from the source directory base // All paths are relative from the source directory base
type File struct { type File struct {
@ -110,15 +121,15 @@ func (f *File) Path() string {
// NewFileWithContents creates a new File pointer with the given relative path and // NewFileWithContents creates a new File pointer with the given relative path and
// content. The language defaults to "en". // content. The language defaults to "en".
func NewFileWithContents(relpath string, content io.Reader) *File { func (sp SourceSpec) NewFileWithContents(relpath string, content io.Reader) *File {
file := NewFile(relpath) file := sp.NewFile(relpath)
file.Contents = content file.Contents = content
file.lang = "en" file.lang = "en"
return file return file
} }
// NewFile creates a new File pointer with the given relative path. // NewFile creates a new File pointer with the given relative path.
func NewFile(relpath string) *File { func (sp SourceSpec) NewFile(relpath string) *File {
f := &File{ f := &File{
relpath: relpath, relpath: relpath,
} }
@ -128,8 +139,8 @@ func NewFile(relpath string) *File {
f.baseName = helpers.Filename(f.LogicalName()) f.baseName = helpers.Filename(f.LogicalName())
lang := strings.TrimPrefix(filepath.Ext(f.baseName), ".") lang := strings.TrimPrefix(filepath.Ext(f.baseName), ".")
if _, ok := viper.GetStringMap("languages")[lang]; lang == "" || !ok { if _, ok := sp.Cfg.GetStringMap("languages")[lang]; lang == "" || !ok {
f.lang = viper.GetString("defaultContentLanguage") f.lang = sp.Cfg.GetString("defaultContentLanguage")
f.translationBaseName = f.baseName f.translationBaseName = f.baseName
} else { } else {
f.lang = lang f.lang = lang
@ -144,11 +155,11 @@ func NewFile(relpath string) *File {
// NewFileFromAbs creates a new File pointer with the given full file path path and // NewFileFromAbs creates a new File pointer with the given full file path path and
// content. // content.
func NewFileFromAbs(base, fullpath string, content io.Reader) (f *File, err error) { func (sp SourceSpec) NewFileFromAbs(base, fullpath string, content io.Reader) (f *File, err error) {
var name string var name string
if name, err = helpers.GetRelativePath(fullpath, base); err != nil { if name, err = helpers.GetRelativePath(fullpath, base); err != nil {
return nil, err return nil, err
} }
return NewFileWithContents(name, content), nil return sp.NewFileWithContents(name, content), nil
} }

View file

@ -18,28 +18,40 @@ import (
"strings" "strings"
"testing" "testing"
"github.com/spf13/hugo/hugofs"
"github.com/spf13/viper"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
) )
func TestFileUniqueID(t *testing.T) { func TestFileUniqueID(t *testing.T) {
ss := newTestSourceSpec()
f1 := File{uniqueID: "123"} f1 := File{uniqueID: "123"}
f2 := NewFile("a") f2 := ss.NewFile("a")
assert.Equal(t, "123", f1.UniqueID()) assert.Equal(t, "123", f1.UniqueID())
assert.Equal(t, "0cc175b9c0f1b6a831c399e269772661", f2.UniqueID()) assert.Equal(t, "0cc175b9c0f1b6a831c399e269772661", f2.UniqueID())
f3 := NewFile(filepath.FromSlash("test1/index.md")) f3 := ss.NewFile(filepath.FromSlash("test1/index.md"))
f4 := NewFile(filepath.FromSlash("test2/index.md")) f4 := ss.NewFile(filepath.FromSlash("test2/index.md"))
assert.NotEqual(t, f3.UniqueID(), f4.UniqueID()) assert.NotEqual(t, f3.UniqueID(), f4.UniqueID())
} }
func TestFileString(t *testing.T) { func TestFileString(t *testing.T) {
assert.Equal(t, "abc", NewFileWithContents("a", strings.NewReader("abc")).String()) ss := newTestSourceSpec()
assert.Equal(t, "", NewFile("a").String()) assert.Equal(t, "abc", ss.NewFileWithContents("a", strings.NewReader("abc")).String())
assert.Equal(t, "", ss.NewFile("a").String())
} }
func TestFileBytes(t *testing.T) { func TestFileBytes(t *testing.T) {
assert.Equal(t, []byte("abc"), NewFileWithContents("a", strings.NewReader("abc")).Bytes()) ss := newTestSourceSpec()
assert.Equal(t, []byte(""), NewFile("a").Bytes()) assert.Equal(t, []byte("abc"), ss.NewFileWithContents("a", strings.NewReader("abc")).Bytes())
assert.Equal(t, []byte(""), ss.NewFile("a").Bytes())
}
func newTestSourceSpec() SourceSpec {
v := viper.New()
return SourceSpec{Fs: hugofs.NewMem(v), Cfg: v}
} }

View file

@ -21,13 +21,10 @@ import (
"runtime" "runtime"
"strings" "strings"
"github.com/spf13/hugo/hugofs" "github.com/spf13/cast"
"golang.org/x/text/unicode/norm"
"github.com/spf13/viper"
"github.com/spf13/hugo/helpers" "github.com/spf13/hugo/helpers"
jww "github.com/spf13/jwalterweatherman" jww "github.com/spf13/jwalterweatherman"
"golang.org/x/text/unicode/norm"
) )
type Input interface { type Input interface {
@ -39,11 +36,11 @@ type Filesystem struct {
Base string Base string
AvoidPaths []string AvoidPaths []string
fs *hugofs.Fs SourceSpec
} }
func NewFilesystem(fs *hugofs.Fs, base string, avoidPaths ...string) *Filesystem { func (sp SourceSpec) NewFilesystem(base string, avoidPaths ...string) *Filesystem {
return &Filesystem{fs: fs, Base: base, AvoidPaths: avoidPaths} return &Filesystem{SourceSpec: sp, Base: base, AvoidPaths: avoidPaths}
} }
func (f *Filesystem) FilesByExts(exts ...string) []*File { func (f *Filesystem) FilesByExts(exts ...string) []*File {
@ -79,7 +76,7 @@ func (f *Filesystem) add(name string, reader io.Reader) (err error) {
name = norm.NFC.String(name) name = norm.NFC.String(name)
} }
file, err = NewFileFromAbs(f.Base, name, reader) file, err = f.SourceSpec.NewFileFromAbs(f.Base, name, reader)
if err == nil { if err == nil {
f.files = append(f.files, file) f.files = append(f.files, file)
@ -98,7 +95,7 @@ func (f *Filesystem) captureFiles() {
return err return err
} }
if b { if b {
rd, err := NewLazyFileReader(f.fs.Source, filePath) rd, err := NewLazyFileReader(f.Fs.Source, filePath)
if err != nil { if err != nil {
return err return err
} }
@ -107,10 +104,10 @@ func (f *Filesystem) captureFiles() {
return err return err
} }
if f.fs == nil { if f.Fs == nil {
panic("Must have a fs") panic("Must have a fs")
} }
err := helpers.SymbolicWalk(f.fs.Source, f.Base, walker) err := helpers.SymbolicWalk(f.Fs.Source, f.Base, walker)
if err != nil { if err != nil {
jww.ERROR.Println(err) jww.ERROR.Println(err)
@ -128,7 +125,7 @@ func (f *Filesystem) shouldRead(filePath string, fi os.FileInfo) (bool, error) {
jww.ERROR.Printf("Cannot read symbolic link '%s', error was: %s", filePath, err) jww.ERROR.Printf("Cannot read symbolic link '%s', error was: %s", filePath, err)
return false, nil return false, nil
} }
linkfi, err := f.fs.Source.Stat(link) linkfi, err := f.Fs.Source.Stat(link)
if err != nil { if err != nil {
jww.ERROR.Printf("Cannot stat '%s', error was: %s", link, err) jww.ERROR.Printf("Cannot stat '%s', error was: %s", link, err)
return false, nil return false, nil
@ -140,13 +137,13 @@ func (f *Filesystem) shouldRead(filePath string, fi os.FileInfo) (bool, error) {
} }
if fi.IsDir() { if fi.IsDir() {
if f.avoid(filePath) || isNonProcessablePath(filePath) { if f.avoid(filePath) || f.isNonProcessablePath(filePath) {
return false, filepath.SkipDir return false, filepath.SkipDir
} }
return false, nil return false, nil
} }
if isNonProcessablePath(filePath) { if f.isNonProcessablePath(filePath) {
return false, nil return false, nil
} }
return true, nil return true, nil
@ -161,14 +158,14 @@ func (f *Filesystem) avoid(filePath string) bool {
return false return false
} }
func isNonProcessablePath(filePath string) bool { func (s SourceSpec) isNonProcessablePath(filePath string) bool {
base := filepath.Base(filePath) base := filepath.Base(filePath)
if strings.HasPrefix(base, ".") || if strings.HasPrefix(base, ".") ||
strings.HasPrefix(base, "#") || strings.HasPrefix(base, "#") ||
strings.HasSuffix(base, "~") { strings.HasSuffix(base, "~") {
return true return true
} }
ignoreFiles := viper.GetStringSlice("ignoreFiles") ignoreFiles := cast.ToStringSlice(s.Cfg.Get("ignoreFiles"))
if len(ignoreFiles) > 0 { if len(ignoreFiles) > 0 {
for _, ignorePattern := range ignoreFiles { for _, ignorePattern := range ignoreFiles {
match, err := regexp.MatchString(ignorePattern, filePath) match, err := regexp.MatchString(ignorePattern, filePath)

View file

@ -19,12 +19,11 @@ import (
"runtime" "runtime"
"strings" "strings"
"testing" "testing"
"github.com/spf13/hugo/hugofs"
) )
func TestEmptySourceFilesystem(t *testing.T) { func TestEmptySourceFilesystem(t *testing.T) {
src := NewFilesystem(hugofs.NewMem(), "Empty") ss := newTestSourceSpec()
src := ss.NewFilesystem("Empty")
if len(src.Files()) != 0 { if len(src.Files()) != 0 {
t.Errorf("new filesystem should contain 0 files.") t.Errorf("new filesystem should contain 0 files.")
} }
@ -39,12 +38,12 @@ type TestPath struct {
} }
func TestAddFile(t *testing.T) { func TestAddFile(t *testing.T) {
fs := hugofs.NewMem() ss := newTestSourceSpec()
tests := platformPaths tests := platformPaths
for _, test := range tests { for _, test := range tests {
base := platformBase base := platformBase
srcDefault := NewFilesystem(fs, "") srcDefault := ss.NewFilesystem("")
srcWithBase := NewFilesystem(fs, base) srcWithBase := ss.NewFilesystem(base)
for _, src := range []*Filesystem{srcDefault, srcWithBase} { for _, src := range []*Filesystem{srcDefault, srcWithBase} {
@ -100,10 +99,10 @@ func TestUnicodeNorm(t *testing.T) {
{NFC: "é", NFD: "\x65\xcc\x81"}, {NFC: "é", NFD: "\x65\xcc\x81"},
} }
fs := hugofs.NewMem() ss := newTestSourceSpec()
for _, path := range paths { for _, path := range paths {
src := NewFilesystem(fs, "") src := ss.NewFilesystem("")
_ = src.add(path.NFD, strings.NewReader("")) _ = src.add(path.NFD, strings.NewReader(""))
f := src.Files()[0] f := src.Files()[0]
if f.BaseFileName() != path.NFC { if f.BaseFileName() != path.NFC {

View file

@ -13,8 +13,6 @@
package source package source
import "bytes"
type ByteSource struct { type ByteSource struct {
Name string Name string
Content []byte Content []byte
@ -23,15 +21,3 @@ type ByteSource struct {
func (b *ByteSource) String() string { func (b *ByteSource) String() string {
return b.Name + " " + string(b.Content) return b.Name + " " + string(b.Content)
} }
type InMemorySource struct {
ByteSource []ByteSource
}
func (i *InMemorySource) Files() (files []*File) {
files = make([]*File, len(i.ByteSource))
for i, fake := range i.ByteSource {
files[i] = NewFileWithContents(fake.Name, bytes.NewReader(fake.Content))
}
return
}

View file

@ -18,10 +18,11 @@ import (
"testing" "testing"
"github.com/spf13/hugo/hugofs" "github.com/spf13/hugo/hugofs"
"github.com/spf13/viper"
) )
func TestPageTranslator(t *testing.T) { func TestPageTranslator(t *testing.T) {
fs := hugofs.NewMem() fs := hugofs.NewMem(viper.New())
tests := []struct { tests := []struct {
content string content string

View file

@ -21,6 +21,8 @@ import (
"path/filepath" "path/filepath"
"strings" "strings"
"sync"
"github.com/eknkc/amber" "github.com/eknkc/amber"
"github.com/spf13/afero" "github.com/spf13/afero"
bp "github.com/spf13/hugo/bufferpool" bp "github.com/spf13/hugo/bufferpool"
@ -31,6 +33,9 @@ import (
// TODO(bep) globals get rid of the rest of the jww.ERR etc. // TODO(bep) globals get rid of the rest of the jww.ERR etc.
// Protecting global map access (Amber)
var amberMu sync.Mutex
type templateErr struct { type templateErr struct {
name string name string
err error err error
@ -132,6 +137,7 @@ func (t *GoHTMLTemplate) initFuncs(d *deps.Deps) {
t.amberFuncMap = template.FuncMap{} t.amberFuncMap = template.FuncMap{}
amberMu.Lock()
for k, v := range amber.FuncMap { for k, v := range amber.FuncMap {
t.amberFuncMap[k] = v t.amberFuncMap[k] = v
} }
@ -143,6 +149,7 @@ func (t *GoHTMLTemplate) initFuncs(d *deps.Deps) {
panic("should never be invoked") panic("should never be invoked")
} }
} }
amberMu.Unlock()
} }
@ -362,7 +369,9 @@ func (t *GoHTMLTemplate) AddTemplateFile(name, baseTemplatePath, path string) er
return err return err
} }
amberMu.Lock()
templ, err := t.CompileAmberWithTemplate(b, path, t.New(templateName)) templ, err := t.CompileAmberWithTemplate(b, path, t.New(templateName))
amberMu.Unlock()
if err != nil { if err != nil {
return err return err
} }
@ -482,11 +491,11 @@ func (t *GoHTMLTemplate) loadTemplates(absPath string, prefix string) {
} }
if needsBase { if needsBase {
layoutDir := helpers.GetLayoutDirPath() layoutDir := t.PathSpec.GetLayoutDirPath()
currBaseFilename := fmt.Sprintf("%s-%s", helpers.Filename(path), baseFileName) currBaseFilename := fmt.Sprintf("%s-%s", helpers.Filename(path), baseFileName)
templateDir := filepath.Dir(path) templateDir := filepath.Dir(path)
themeDir := filepath.Join(helpers.GetThemeDir()) themeDir := filepath.Join(t.PathSpec.GetThemeDir())
relativeThemeLayoutsDir := filepath.Join(helpers.GetRelativeThemeDir(), "layouts") relativeThemeLayoutsDir := filepath.Join(t.PathSpec.GetRelativeThemeDir(), "layouts")
var baseTemplatedDir string var baseTemplatedDir string

View file

@ -113,6 +113,7 @@ F3: {{ Echo (printf "themes/%s-theme" .Site.Params.LOWER) }}
) )
func TestParamsKeysToLower(t *testing.T) { func TestParamsKeysToLower(t *testing.T) {
t.Parallel()
require.Error(t, applyTemplateTransformers(nil)) require.Error(t, applyTemplateTransformers(nil))
@ -190,6 +191,7 @@ func BenchmarkTemplateParamsKeysToLower(b *testing.B) {
} }
func TestParamsKeysToLowerVars(t *testing.T) { func TestParamsKeysToLowerVars(t *testing.T) {
t.Parallel()
var ( var (
ctx = map[string]interface{}{ ctx = map[string]interface{}{
"Params": map[string]interface{}{ "Params": map[string]interface{}{
@ -227,6 +229,7 @@ Blue: {{ $__amber_1.Blue}}
} }
func TestParamsKeysToLowerInBlockTemplate(t *testing.T) { func TestParamsKeysToLowerInBlockTemplate(t *testing.T) {
t.Parallel()
var ( var (
ctx = map[string]interface{}{ ctx = map[string]interface{}{

View file

@ -21,6 +21,7 @@ import (
) )
func TestTruncate(t *testing.T) { func TestTruncate(t *testing.T) {
t.Parallel()
var err error var err error
cases := []struct { cases := []struct {
v1 interface{} v1 interface{}

View file

@ -46,7 +46,6 @@ import (
"github.com/spf13/hugo/deps" "github.com/spf13/hugo/deps"
"github.com/spf13/hugo/helpers" "github.com/spf13/hugo/helpers"
jww "github.com/spf13/jwalterweatherman" jww "github.com/spf13/jwalterweatherman"
"github.com/spf13/viper"
// Importing image codecs for image.DecodeConfig // Importing image codecs for image.DecodeConfig
_ "image/gif" _ "image/gif"
@ -58,7 +57,6 @@ import (
type templateFuncster struct { type templateFuncster struct {
funcMap template.FuncMap funcMap template.FuncMap
cachedPartials partialCache cachedPartials partialCache
*deps.Deps *deps.Deps
} }
@ -398,6 +396,7 @@ func intersect(l1, l2 interface{}) (interface{}, error) {
} }
// ResetCaches resets all caches that might be used during build. // ResetCaches resets all caches that might be used during build.
// TODO(bep) globals move image config cache to funcster
func ResetCaches() { func ResetCaches() {
resetImageConfigCache() resetImageConfigCache()
} }
@ -1357,31 +1356,29 @@ func returnWhenSet(a, k interface{}) interface{} {
} }
// highlight returns an HTML string with syntax highlighting applied. // highlight returns an HTML string with syntax highlighting applied.
func highlight(in interface{}, lang, opts string) (template.HTML, error) { func (t *templateFuncster) highlight(in interface{}, lang, opts string) (template.HTML, error) {
str, err := cast.ToStringE(in) str, err := cast.ToStringE(in)
if err != nil { if err != nil {
return "", err return "", err
} }
return template.HTML(helpers.Highlight(html.UnescapeString(str), lang, opts)), nil return template.HTML(helpers.Highlight(t.Cfg, html.UnescapeString(str), lang, opts)), nil
} }
var markdownTrimPrefix = []byte("<p>") var markdownTrimPrefix = []byte("<p>")
var markdownTrimSuffix = []byte("</p>\n") var markdownTrimSuffix = []byte("</p>\n")
// markdownify renders a given string from Markdown to HTML. // markdownify renders a given string from Markdown to HTML.
func markdownify(in interface{}) (template.HTML, error) { func (t *templateFuncster) markdownify(in interface{}) (template.HTML, error) {
text, err := cast.ToStringE(in) text, err := cast.ToStringE(in)
if err != nil { if err != nil {
return "", err return "", err
} }
language := viper.Get("currentContentLanguage").(*helpers.Language) m := t.ContentSpec.RenderBytes(&helpers.RenderingContext{
Cfg: t.Cfg,
m := helpers.RenderBytes(&helpers.RenderingContext{ Content: []byte(text), PageFmt: "markdown"})
ConfigProvider: language,
Content: []byte(text), PageFmt: "markdown"})
m = bytes.TrimPrefix(m, markdownTrimPrefix) m = bytes.TrimPrefix(m, markdownTrimPrefix)
m = bytes.TrimSuffix(m, markdownTrimSuffix) m = bytes.TrimSuffix(m, markdownTrimSuffix)
return template.HTML(m), nil return template.HTML(m), nil
@ -2143,7 +2140,7 @@ func (t *templateFuncster) initFuncMap() {
"getenv": getenv, "getenv": getenv,
"gt": gt, "gt": gt,
"hasPrefix": hasPrefix, "hasPrefix": hasPrefix,
"highlight": highlight, "highlight": t.highlight,
"htmlEscape": htmlEscape, "htmlEscape": htmlEscape,
"htmlUnescape": htmlUnescape, "htmlUnescape": htmlUnescape,
"humanize": humanize, "humanize": humanize,
@ -2159,7 +2156,7 @@ func (t *templateFuncster) initFuncMap() {
"le": le, "le": le,
"lower": lower, "lower": lower,
"lt": lt, "lt": lt,
"markdownify": markdownify, "markdownify": t.markdownify,
"md5": md5, "md5": md5,
"mod": mod, "mod": mod,
"modBool": modBool, "modBool": modBool,
@ -2211,8 +2208,8 @@ func (t *templateFuncster) initFuncMap() {
"upper": upper, "upper": upper,
"urlize": t.PathSpec.URLize, "urlize": t.PathSpec.URLize,
"where": where, "where": where,
"i18n": i18nTranslate, "i18n": t.Translate,
"T": i18nTranslate, "T": t.Translate,
} }
t.funcMap = funcMap t.funcMap = funcMap

View file

@ -42,7 +42,9 @@ import (
"github.com/spf13/afero" "github.com/spf13/afero"
"github.com/spf13/cast" "github.com/spf13/cast"
"github.com/spf13/hugo/config"
"github.com/spf13/hugo/hugofs" "github.com/spf13/hugo/hugofs"
"github.com/spf13/hugo/i18n"
jww "github.com/spf13/jwalterweatherman" jww "github.com/spf13/jwalterweatherman"
"github.com/spf13/viper" "github.com/spf13/viper"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
@ -53,12 +55,16 @@ var (
logger = jww.NewNotepad(jww.LevelFatal, jww.LevelFatal, os.Stdout, ioutil.Discard, "", log.Ldate|log.Ltime) logger = jww.NewNotepad(jww.LevelFatal, jww.LevelFatal, os.Stdout, ioutil.Discard, "", log.Ldate|log.Ltime)
) )
func newDefaultDepsCfg() deps.DepsCfg { func newDepsConfig(cfg config.Provider) deps.DepsCfg {
l := helpers.NewLanguage("en", cfg)
l.Set("i18nDir", "i18n")
return deps.DepsCfg{ return deps.DepsCfg{
Language: helpers.NewLanguage("en"), Language: l,
Fs: hugofs.NewMem(), Cfg: cfg,
Logger: logger, Fs: hugofs.NewMem(l),
TemplateProvider: DefaultTemplateProvider, Logger: logger,
TemplateProvider: DefaultTemplateProvider,
TranslationProvider: i18n.NewTranslationProvider(),
} }
} }
@ -88,22 +94,17 @@ func tstIsLt(tp tstCompareType) bool {
return tp == tstLt || tp == tstLe return tp == tstLt || tp == tstLe
} }
func tstInitTemplates() {
viper.Set("CurrentContentLanguage", helpers.NewLanguage("en"))
helpers.ResetConfigProvider()
}
func TestFuncsInTemplate(t *testing.T) { func TestFuncsInTemplate(t *testing.T) {
t.Parallel()
testReset()
workingDir := "/home/hugo" workingDir := "/home/hugo"
viper.Set("workingDir", workingDir) v := viper.New()
viper.Set("currentContentLanguage", helpers.NewDefaultLanguage())
viper.Set("multilingual", true)
fs := hugofs.NewMem() v.Set("workingDir", workingDir)
v.Set("multilingual", true)
fs := hugofs.NewMem(v)
afero.WriteFile(fs.Source, filepath.Join(workingDir, "README.txt"), []byte("Hugo Rocks!"), 0755) afero.WriteFile(fs.Source, filepath.Join(workingDir, "README.txt"), []byte("Hugo Rocks!"), 0755)
@ -268,11 +269,10 @@ urlize: bat-man
data.Section = "blog" data.Section = "blog"
data.Params = map[string]interface{}{"langCode": "en"} data.Params = map[string]interface{}{"langCode": "en"}
viper.Set("baseURL", "http://mysite.com/hugo/") v.Set("baseURL", "http://mysite.com/hugo/")
v.Set("CurrentContentLanguage", helpers.NewLanguage("en", v))
tstInitTemplates() config := newDepsConfig(v)
config := newDefaultDepsCfg()
config.WithTemplate = func(templ tplapi.Template) error { config.WithTemplate = func(templ tplapi.Template) error {
if _, err := templ.New("test").Parse(in); err != nil { if _, err := templ.New("test").Parse(in); err != nil {
t.Fatal("Got error on parse", err) t.Fatal("Got error on parse", err)
@ -282,7 +282,7 @@ urlize: bat-man
config.Fs = fs config.Fs = fs
d := deps.New(config) d := deps.New(config)
if err := d.LoadTemplates(); err != nil { if err := d.LoadResources(); err != nil {
t.Fatal(err) t.Fatal(err)
} }
@ -300,6 +300,7 @@ urlize: bat-man
} }
func TestCompare(t *testing.T) { func TestCompare(t *testing.T) {
t.Parallel()
for _, this := range []struct { for _, this := range []struct {
tstCompareType tstCompareType
funcUnderTest func(a, b interface{}) bool funcUnderTest func(a, b interface{}) bool
@ -370,6 +371,7 @@ func doTestCompare(t *testing.T, tp tstCompareType, funcUnderTest func(a, b inte
} }
func TestMod(t *testing.T) { func TestMod(t *testing.T) {
t.Parallel()
for i, this := range []struct { for i, this := range []struct {
a interface{} a interface{}
b interface{} b interface{}
@ -405,6 +407,7 @@ func TestMod(t *testing.T) {
} }
func TestModBool(t *testing.T) { func TestModBool(t *testing.T) {
t.Parallel()
for i, this := range []struct { for i, this := range []struct {
a interface{} a interface{}
b interface{} b interface{}
@ -445,6 +448,7 @@ func TestModBool(t *testing.T) {
} }
func TestFirst(t *testing.T) { func TestFirst(t *testing.T) {
t.Parallel()
for i, this := range []struct { for i, this := range []struct {
count interface{} count interface{}
sequence interface{} sequence interface{}
@ -480,6 +484,7 @@ func TestFirst(t *testing.T) {
} }
func TestLast(t *testing.T) { func TestLast(t *testing.T) {
t.Parallel()
for i, this := range []struct { for i, this := range []struct {
count interface{} count interface{}
sequence interface{} sequence interface{}
@ -515,6 +520,7 @@ func TestLast(t *testing.T) {
} }
func TestAfter(t *testing.T) { func TestAfter(t *testing.T) {
t.Parallel()
for i, this := range []struct { for i, this := range []struct {
count interface{} count interface{}
sequence interface{} sequence interface{}
@ -550,6 +556,7 @@ func TestAfter(t *testing.T) {
} }
func TestShuffleInputAndOutputFormat(t *testing.T) { func TestShuffleInputAndOutputFormat(t *testing.T) {
t.Parallel()
for i, this := range []struct { for i, this := range []struct {
sequence interface{} sequence interface{}
success bool success bool
@ -588,6 +595,7 @@ func TestShuffleInputAndOutputFormat(t *testing.T) {
} }
func TestShuffleRandomising(t *testing.T) { func TestShuffleRandomising(t *testing.T) {
t.Parallel()
// Note that this test can fail with false negative result if the shuffle // Note that this test can fail with false negative result if the shuffle
// of the sequence happens to be the same as the original sequence. However // of the sequence happens to be the same as the original sequence. However
// the propability of the event is 10^-158 which is negligible. // the propability of the event is 10^-158 which is negligible.
@ -615,6 +623,7 @@ func TestShuffleRandomising(t *testing.T) {
} }
func TestDictionary(t *testing.T) { func TestDictionary(t *testing.T) {
t.Parallel()
for i, this := range []struct { for i, this := range []struct {
v1 []interface{} v1 []interface{}
expecterr bool expecterr bool
@ -647,13 +656,15 @@ func blankImage(width, height int) []byte {
} }
func TestImageConfig(t *testing.T) { func TestImageConfig(t *testing.T) {
testReset() t.Parallel()
workingDir := "/home/hugo" workingDir := "/home/hugo"
viper.Set("workingDir", workingDir) v := viper.New()
f := newTestFuncster() v.Set("workingDir", workingDir)
f := newTestFuncsterWithViper(v)
for i, this := range []struct { for i, this := range []struct {
resetCache bool resetCache bool
@ -754,6 +765,7 @@ func TestImageConfig(t *testing.T) {
} }
func TestIn(t *testing.T) { func TestIn(t *testing.T) {
t.Parallel()
for i, this := range []struct { for i, this := range []struct {
v1 interface{} v1 interface{}
v2 interface{} v2 interface{}
@ -783,6 +795,7 @@ func TestIn(t *testing.T) {
} }
func TestSlicestr(t *testing.T) { func TestSlicestr(t *testing.T) {
t.Parallel()
var err error var err error
for i, this := range []struct { for i, this := range []struct {
v1 interface{} v1 interface{}
@ -848,6 +861,7 @@ func TestSlicestr(t *testing.T) {
} }
func TestHasPrefix(t *testing.T) { func TestHasPrefix(t *testing.T) {
t.Parallel()
cases := []struct { cases := []struct {
s interface{} s interface{}
prefix interface{} prefix interface{}
@ -875,6 +889,7 @@ func TestHasPrefix(t *testing.T) {
} }
func TestSubstr(t *testing.T) { func TestSubstr(t *testing.T) {
t.Parallel()
var err error var err error
var n int var n int
for i, this := range []struct { for i, this := range []struct {
@ -952,6 +967,7 @@ func TestSubstr(t *testing.T) {
} }
func TestSplit(t *testing.T) { func TestSplit(t *testing.T) {
t.Parallel()
for i, this := range []struct { for i, this := range []struct {
v1 interface{} v1 interface{}
v2 string v2 string
@ -982,6 +998,7 @@ func TestSplit(t *testing.T) {
} }
func TestIntersect(t *testing.T) { func TestIntersect(t *testing.T) {
t.Parallel()
for i, this := range []struct { for i, this := range []struct {
sequence1 interface{} sequence1 interface{}
sequence2 interface{} sequence2 interface{}
@ -1025,6 +1042,7 @@ func TestIntersect(t *testing.T) {
} }
func TestIsSet(t *testing.T) { func TestIsSet(t *testing.T) {
t.Parallel()
aSlice := []interface{}{1, 2, 3, 5} aSlice := []interface{}{1, 2, 3, 5}
aMap := map[string]interface{}{"a": 1, "b": 2} aMap := map[string]interface{}{"a": 1, "b": 2}
@ -1074,6 +1092,7 @@ type TstX struct {
} }
func TestTimeUnix(t *testing.T) { func TestTimeUnix(t *testing.T) {
t.Parallel()
var sec int64 = 1234567890 var sec int64 = 1234567890
tv := reflect.ValueOf(time.Unix(sec, 0)) tv := reflect.ValueOf(time.Unix(sec, 0))
i := 1 i := 1
@ -1096,6 +1115,7 @@ func TestTimeUnix(t *testing.T) {
} }
func TestEvaluateSubElem(t *testing.T) { func TestEvaluateSubElem(t *testing.T) {
t.Parallel()
tstx := TstX{A: "foo", B: "bar"} tstx := TstX{A: "foo", B: "bar"}
var inner struct { var inner struct {
S fmt.Stringer S fmt.Stringer
@ -1146,6 +1166,7 @@ func TestEvaluateSubElem(t *testing.T) {
} }
func TestCheckCondition(t *testing.T) { func TestCheckCondition(t *testing.T) {
t.Parallel()
type expect struct { type expect struct {
result bool result bool
isError bool isError bool
@ -1266,6 +1287,7 @@ func TestCheckCondition(t *testing.T) {
} }
func TestWhere(t *testing.T) { func TestWhere(t *testing.T) {
t.Parallel()
type Mid struct { type Mid struct {
Tst TstX Tst TstX
@ -1671,6 +1693,7 @@ func TestWhere(t *testing.T) {
} }
func TestDelimit(t *testing.T) { func TestDelimit(t *testing.T) {
t.Parallel()
for i, this := range []struct { for i, this := range []struct {
sequence interface{} sequence interface{}
delimiter interface{} delimiter interface{}
@ -1720,6 +1743,7 @@ func TestDelimit(t *testing.T) {
} }
func TestSort(t *testing.T) { func TestSort(t *testing.T) {
t.Parallel()
type ts struct { type ts struct {
MyInt int MyInt int
MyFloat float64 MyFloat float64
@ -1932,6 +1956,7 @@ func TestSort(t *testing.T) {
} }
func TestReturnWhenSet(t *testing.T) { func TestReturnWhenSet(t *testing.T) {
t.Parallel()
for i, this := range []struct { for i, this := range []struct {
data interface{} data interface{}
key interface{} key interface{}
@ -1957,7 +1982,10 @@ func TestReturnWhenSet(t *testing.T) {
} }
func TestMarkdownify(t *testing.T) { func TestMarkdownify(t *testing.T) {
viper.Set("currentContentLanguage", helpers.NewDefaultLanguage()) t.Parallel()
v := viper.New()
f := newTestFuncsterWithViper(v)
for i, this := range []struct { for i, this := range []struct {
in interface{} in interface{}
@ -1966,7 +1994,7 @@ func TestMarkdownify(t *testing.T) {
{"Hello **World!**", template.HTML("Hello <strong>World!</strong>")}, {"Hello **World!**", template.HTML("Hello <strong>World!</strong>")},
{[]byte("Hello Bytes **World!**"), template.HTML("Hello Bytes <strong>World!</strong>")}, {[]byte("Hello Bytes **World!**"), template.HTML("Hello Bytes <strong>World!</strong>")},
} { } {
result, err := markdownify(this.in) result, err := f.markdownify(this.in)
if err != nil { if err != nil {
t.Fatalf("[%d] unexpected error in markdownify: %s", i, err) t.Fatalf("[%d] unexpected error in markdownify: %s", i, err)
} }
@ -1975,12 +2003,13 @@ func TestMarkdownify(t *testing.T) {
} }
} }
if _, err := markdownify(t); err == nil { if _, err := f.markdownify(t); err == nil {
t.Fatalf("markdownify should have errored") t.Fatalf("markdownify should have errored")
} }
} }
func TestApply(t *testing.T) { func TestApply(t *testing.T) {
t.Parallel()
f := newTestFuncster() f := newTestFuncster()
@ -2024,6 +2053,7 @@ func TestApply(t *testing.T) {
} }
func TestChomp(t *testing.T) { func TestChomp(t *testing.T) {
t.Parallel()
base := "\n This is\na story " base := "\n This is\na story "
for i, item := range []string{ for i, item := range []string{
"\n", "\n\n", "\n", "\n\n",
@ -2046,6 +2076,7 @@ func TestChomp(t *testing.T) {
} }
func TestLower(t *testing.T) { func TestLower(t *testing.T) {
t.Parallel()
cases := []struct { cases := []struct {
s interface{} s interface{}
want string want string
@ -2069,6 +2100,7 @@ func TestLower(t *testing.T) {
} }
func TestTitle(t *testing.T) { func TestTitle(t *testing.T) {
t.Parallel()
cases := []struct { cases := []struct {
s interface{} s interface{}
want string want string
@ -2092,6 +2124,7 @@ func TestTitle(t *testing.T) {
} }
func TestUpper(t *testing.T) { func TestUpper(t *testing.T) {
t.Parallel()
cases := []struct { cases := []struct {
s interface{} s interface{}
want string want string
@ -2115,8 +2148,12 @@ func TestUpper(t *testing.T) {
} }
func TestHighlight(t *testing.T) { func TestHighlight(t *testing.T) {
t.Parallel()
code := "func boo() {}" code := "func boo() {}"
highlighted, err := highlight(code, "go", "")
f := newTestFuncster()
highlighted, err := f.highlight(code, "go", "")
if err != nil { if err != nil {
t.Fatal("Highlight returned error:", err) t.Fatal("Highlight returned error:", err)
@ -2127,7 +2164,7 @@ func TestHighlight(t *testing.T) {
t.Errorf("Highlight mismatch, got %v", highlighted) t.Errorf("Highlight mismatch, got %v", highlighted)
} }
_, err = highlight(t, "go", "") _, err = f.highlight(t, "go", "")
if err == nil { if err == nil {
t.Error("Expected highlight error") t.Error("Expected highlight error")
@ -2135,6 +2172,7 @@ func TestHighlight(t *testing.T) {
} }
func TestInflect(t *testing.T) { func TestInflect(t *testing.T) {
t.Parallel()
for i, this := range []struct { for i, this := range []struct {
inflectFunc func(i interface{}) (string, error) inflectFunc func(i interface{}) (string, error)
in interface{} in interface{}
@ -2169,6 +2207,7 @@ func TestInflect(t *testing.T) {
} }
func TestCounterFuncs(t *testing.T) { func TestCounterFuncs(t *testing.T) {
t.Parallel()
for i, this := range []struct { for i, this := range []struct {
countFunc func(i interface{}) (int, error) countFunc func(i interface{}) (int, error)
in string in string
@ -2195,6 +2234,7 @@ func TestCounterFuncs(t *testing.T) {
} }
func TestReplace(t *testing.T) { func TestReplace(t *testing.T) {
t.Parallel()
v, _ := replace("aab", "a", "b") v, _ := replace("aab", "a", "b")
assert.Equal(t, "bbb", v) assert.Equal(t, "bbb", v)
v, _ = replace("11a11", 1, 2) v, _ = replace("11a11", 1, 2)
@ -2210,6 +2250,7 @@ func TestReplace(t *testing.T) {
} }
func TestReplaceRE(t *testing.T) { func TestReplaceRE(t *testing.T) {
t.Parallel()
for i, val := range []struct { for i, val := range []struct {
pattern interface{} pattern interface{}
repl interface{} repl interface{}
@ -2234,6 +2275,7 @@ func TestReplaceRE(t *testing.T) {
} }
func TestFindRE(t *testing.T) { func TestFindRE(t *testing.T) {
t.Parallel()
for i, this := range []struct { for i, this := range []struct {
expr string expr string
content interface{} content interface{}
@ -2264,6 +2306,7 @@ func TestFindRE(t *testing.T) {
} }
func TestTrim(t *testing.T) { func TestTrim(t *testing.T) {
t.Parallel()
for i, this := range []struct { for i, this := range []struct {
v1 interface{} v1 interface{}
@ -2294,6 +2337,7 @@ func TestTrim(t *testing.T) {
} }
func TestDateFormat(t *testing.T) { func TestDateFormat(t *testing.T) {
t.Parallel()
for i, this := range []struct { for i, this := range []struct {
layout string layout string
value interface{} value interface{}
@ -2328,6 +2372,7 @@ func TestDateFormat(t *testing.T) {
} }
func TestDefaultFunc(t *testing.T) { func TestDefaultFunc(t *testing.T) {
t.Parallel()
then := time.Now() then := time.Now()
now := time.Now() now := time.Now()
@ -2385,6 +2430,7 @@ func TestDefaultFunc(t *testing.T) {
} }
func TestDefault(t *testing.T) { func TestDefault(t *testing.T) {
t.Parallel()
for i, this := range []struct { for i, this := range []struct {
input interface{} input interface{}
tpl string tpl string
@ -2414,6 +2460,7 @@ func TestDefault(t *testing.T) {
} }
func TestSafeHTML(t *testing.T) { func TestSafeHTML(t *testing.T) {
t.Parallel()
for i, this := range []struct { for i, this := range []struct {
str string str string
tmplStr string tmplStr string
@ -2454,6 +2501,7 @@ func TestSafeHTML(t *testing.T) {
} }
func TestSafeHTMLAttr(t *testing.T) { func TestSafeHTMLAttr(t *testing.T) {
t.Parallel()
for i, this := range []struct { for i, this := range []struct {
str string str string
tmplStr string tmplStr string
@ -2494,6 +2542,7 @@ func TestSafeHTMLAttr(t *testing.T) {
} }
func TestSafeCSS(t *testing.T) { func TestSafeCSS(t *testing.T) {
t.Parallel()
for i, this := range []struct { for i, this := range []struct {
str string str string
tmplStr string tmplStr string
@ -2535,6 +2584,7 @@ func TestSafeCSS(t *testing.T) {
// TODO(bep) what is this? Also look above. // TODO(bep) what is this? Also look above.
func TestSafeJS(t *testing.T) { func TestSafeJS(t *testing.T) {
t.Parallel()
for i, this := range []struct { for i, this := range []struct {
str string str string
tmplStr string tmplStr string
@ -2576,6 +2626,7 @@ func TestSafeJS(t *testing.T) {
// TODO(bep) what is this? // TODO(bep) what is this?
func TestSafeURL(t *testing.T) { func TestSafeURL(t *testing.T) {
t.Parallel()
for i, this := range []struct { for i, this := range []struct {
str string str string
tmplStr string tmplStr string
@ -2616,6 +2667,7 @@ func TestSafeURL(t *testing.T) {
} }
func TestBase64Decode(t *testing.T) { func TestBase64Decode(t *testing.T) {
t.Parallel()
testStr := "abc123!?$*&()'-=@~" testStr := "abc123!?$*&()'-=@~"
enc := base64.StdEncoding.EncodeToString([]byte(testStr)) enc := base64.StdEncoding.EncodeToString([]byte(testStr))
result, err := base64Decode(enc) result, err := base64Decode(enc)
@ -2635,6 +2687,7 @@ func TestBase64Decode(t *testing.T) {
} }
func TestBase64Encode(t *testing.T) { func TestBase64Encode(t *testing.T) {
t.Parallel()
testStr := "YWJjMTIzIT8kKiYoKSctPUB+" testStr := "YWJjMTIzIT8kKiYoKSctPUB+"
dec, err := base64.StdEncoding.DecodeString(testStr) dec, err := base64.StdEncoding.DecodeString(testStr)
@ -2659,6 +2712,7 @@ func TestBase64Encode(t *testing.T) {
} }
func TestMD5(t *testing.T) { func TestMD5(t *testing.T) {
t.Parallel()
for i, this := range []struct { for i, this := range []struct {
input string input string
expectedHash string expectedHash string
@ -2683,6 +2737,7 @@ func TestMD5(t *testing.T) {
} }
func TestSHA1(t *testing.T) { func TestSHA1(t *testing.T) {
t.Parallel()
for i, this := range []struct { for i, this := range []struct {
input string input string
expectedHash string expectedHash string
@ -2707,6 +2762,7 @@ func TestSHA1(t *testing.T) {
} }
func TestSHA256(t *testing.T) { func TestSHA256(t *testing.T) {
t.Parallel()
for i, this := range []struct { for i, this := range []struct {
input string input string
expectedHash string expectedHash string
@ -2731,13 +2787,15 @@ func TestSHA256(t *testing.T) {
} }
func TestReadFile(t *testing.T) { func TestReadFile(t *testing.T) {
testReset() t.Parallel()
workingDir := "/home/hugo" workingDir := "/home/hugo"
viper.Set("workingDir", workingDir) v := viper.New()
f := newTestFuncster() v.Set("workingDir", workingDir)
f := newTestFuncsterWithViper(v)
afero.WriteFile(f.Fs.Source, filepath.Join(workingDir, "/f/f1.txt"), []byte("f1-content"), 0755) afero.WriteFile(f.Fs.Source, filepath.Join(workingDir, "/f/f1.txt"), []byte("f1-content"), 0755)
afero.WriteFile(f.Fs.Source, filepath.Join("/home", "f2.txt"), []byte("f2-content"), 0755) afero.WriteFile(f.Fs.Source, filepath.Join("/home", "f2.txt"), []byte("f2-content"), 0755)
@ -2770,6 +2828,7 @@ func TestReadFile(t *testing.T) {
} }
func TestPartialCached(t *testing.T) { func TestPartialCached(t *testing.T) {
t.Parallel()
testCases := []struct { testCases := []struct {
name string name string
partial string partial string
@ -2793,7 +2852,6 @@ func TestPartialCached(t *testing.T) {
data.Section = "blog" data.Section = "blog"
data.Params = map[string]interface{}{"langCode": "en"} data.Params = map[string]interface{}{"langCode": "en"}
tstInitTemplates()
for i, tc := range testCases { for i, tc := range testCases {
var tmp string var tmp string
if tc.variant != "" { if tc.variant != "" {
@ -2802,9 +2860,9 @@ func TestPartialCached(t *testing.T) {
tmp = tc.tmpl tmp = tc.tmpl
} }
cfg := newDefaultDepsCfg() config := newDepsConfig(viper.New())
cfg.WithTemplate = func(templ tplapi.Template) error { config.WithTemplate = func(templ tplapi.Template) error {
err := templ.AddTemplate("testroot", tmp) err := templ.AddTemplate("testroot", tmp)
if err != nil { if err != nil {
return err return err
@ -2817,8 +2875,8 @@ func TestPartialCached(t *testing.T) {
return nil return nil
} }
de := deps.New(cfg) de := deps.New(config)
require.NoError(t, de.LoadTemplates()) require.NoError(t, de.LoadResources())
buf := new(bytes.Buffer) buf := new(bytes.Buffer)
templ := de.Tmpl.Lookup("testroot") templ := de.Tmpl.Lookup("testroot")
@ -2842,8 +2900,8 @@ func TestPartialCached(t *testing.T) {
} }
func BenchmarkPartial(b *testing.B) { func BenchmarkPartial(b *testing.B) {
cfg := newDefaultDepsCfg() config := newDepsConfig(viper.New())
cfg.WithTemplate = func(templ tplapi.Template) error { config.WithTemplate = func(templ tplapi.Template) error {
err := templ.AddTemplate("testroot", `{{ partial "bench1" . }}`) err := templ.AddTemplate("testroot", `{{ partial "bench1" . }}`)
if err != nil { if err != nil {
return err return err
@ -2856,8 +2914,8 @@ func BenchmarkPartial(b *testing.B) {
return nil return nil
} }
de := deps.New(cfg) de := deps.New(config)
require.NoError(b, de.LoadTemplates()) require.NoError(b, de.LoadResources())
buf := new(bytes.Buffer) buf := new(bytes.Buffer)
tmpl := de.Tmpl.Lookup("testroot") tmpl := de.Tmpl.Lookup("testroot")
@ -2873,8 +2931,8 @@ func BenchmarkPartial(b *testing.B) {
} }
func BenchmarkPartialCached(b *testing.B) { func BenchmarkPartialCached(b *testing.B) {
cfg := newDefaultDepsCfg() config := newDepsConfig(viper.New())
cfg.WithTemplate = func(templ tplapi.Template) error { config.WithTemplate = func(templ tplapi.Template) error {
err := templ.AddTemplate("testroot", `{{ partialCached "bench1" . }}`) err := templ.AddTemplate("testroot", `{{ partialCached "bench1" . }}`)
if err != nil { if err != nil {
return err return err
@ -2887,8 +2945,8 @@ func BenchmarkPartialCached(b *testing.B) {
return nil return nil
} }
de := deps.New(cfg) de := deps.New(config)
require.NoError(b, de.LoadTemplates()) require.NoError(b, de.LoadResources())
buf := new(bytes.Buffer) buf := new(bytes.Buffer)
tmpl := de.Tmpl.Lookup("testroot") tmpl := de.Tmpl.Lookup("testroot")
@ -2904,9 +2962,14 @@ func BenchmarkPartialCached(b *testing.B) {
} }
func newTestFuncster() *templateFuncster { func newTestFuncster() *templateFuncster {
cfg := newDefaultDepsCfg() return newTestFuncsterWithViper(viper.New())
d := deps.New(cfg) }
if err := d.LoadTemplates(); err != nil {
func newTestFuncsterWithViper(v *viper.Viper) *templateFuncster {
config := newDepsConfig(v)
d := deps.New(config)
if err := d.LoadResources(); err != nil {
panic(err) panic(err)
} }
@ -2914,8 +2977,8 @@ func newTestFuncster() *templateFuncster {
} }
func newTestTemplate(t *testing.T, name, template string) *template.Template { func newTestTemplate(t *testing.T, name, template string) *template.Template {
cfg := newDefaultDepsCfg() config := newDepsConfig(viper.New())
cfg.WithTemplate = func(templ tplapi.Template) error { config.WithTemplate = func(templ tplapi.Template) error {
err := templ.AddTemplate(name, template) err := templ.AddTemplate(name, template)
if err != nil { if err != nil {
return err return err
@ -2923,8 +2986,8 @@ func newTestTemplate(t *testing.T, name, template string) *template.Template {
return nil return nil
} }
de := deps.New(cfg) de := deps.New(config)
require.NoError(t, de.LoadTemplates()) require.NoError(t, de.LoadResources())
return de.Tmpl.Lookup(name) return de.Tmpl.Lookup(name)
} }

View file

@ -27,9 +27,9 @@ import (
"time" "time"
"github.com/spf13/afero" "github.com/spf13/afero"
"github.com/spf13/hugo/config"
"github.com/spf13/hugo/helpers" "github.com/spf13/hugo/helpers"
jww "github.com/spf13/jwalterweatherman" jww "github.com/spf13/jwalterweatherman"
"github.com/spf13/viper"
) )
var ( var (
@ -63,17 +63,17 @@ func (l *remoteLock) URLUnlock(url string) {
} }
// getCacheFileID returns the cache ID for a string // getCacheFileID returns the cache ID for a string
func getCacheFileID(id string) string { func getCacheFileID(cfg config.Provider, id string) string {
return viper.GetString("cacheDir") + url.QueryEscape(id) return cfg.GetString("cacheDir") + url.QueryEscape(id)
} }
// resGetCache returns the content for an ID from the file cache or an error // resGetCache returns the content for an ID from the file cache or an error
// if the file is not found returns nil,nil // if the file is not found returns nil,nil
func resGetCache(id string, fs afero.Fs, ignoreCache bool) ([]byte, error) { func resGetCache(id string, fs afero.Fs, cfg config.Provider, ignoreCache bool) ([]byte, error) {
if ignoreCache { if ignoreCache {
return nil, nil return nil, nil
} }
fID := getCacheFileID(id) fID := getCacheFileID(cfg, id)
isExists, err := helpers.Exists(fID, fs) isExists, err := helpers.Exists(fID, fs)
if err != nil { if err != nil {
return nil, err return nil, err
@ -87,11 +87,11 @@ func resGetCache(id string, fs afero.Fs, ignoreCache bool) ([]byte, error) {
} }
// resWriteCache writes bytes to an ID into the file cache // resWriteCache writes bytes to an ID into the file cache
func resWriteCache(id string, c []byte, fs afero.Fs, ignoreCache bool) error { func resWriteCache(id string, c []byte, fs afero.Fs, cfg config.Provider, ignoreCache bool) error {
if ignoreCache { if ignoreCache {
return nil return nil
} }
fID := getCacheFileID(id) fID := getCacheFileID(cfg, id)
f, err := fs.Create(fID) f, err := fs.Create(fID)
if err != nil { if err != nil {
return errors.New("Error: " + err.Error() + ". Failed to create file: " + fID) return errors.New("Error: " + err.Error() + ". Failed to create file: " + fID)
@ -107,13 +107,13 @@ func resWriteCache(id string, c []byte, fs afero.Fs, ignoreCache bool) error {
return nil return nil
} }
func resDeleteCache(id string, fs afero.Fs) error { func resDeleteCache(id string, fs afero.Fs, cfg config.Provider) error {
return fs.Remove(getCacheFileID(id)) return fs.Remove(getCacheFileID(cfg, id))
} }
// resGetRemote loads the content of a remote file. This method is thread safe. // resGetRemote loads the content of a remote file. This method is thread safe.
func resGetRemote(url string, fs afero.Fs, hc *http.Client) ([]byte, error) { func resGetRemote(url string, fs afero.Fs, cfg config.Provider, hc *http.Client) ([]byte, error) {
c, err := resGetCache(url, fs, viper.GetBool("ignoreCache")) c, err := resGetCache(url, fs, cfg, cfg.GetBool("ignoreCache"))
if c != nil && err == nil { if c != nil && err == nil {
return c, nil return c, nil
} }
@ -126,7 +126,7 @@ func resGetRemote(url string, fs afero.Fs, hc *http.Client) ([]byte, error) {
defer func() { remoteURLLock.URLUnlock(url) }() defer func() { remoteURLLock.URLUnlock(url) }()
// avoid multiple locks due to calling resGetCache twice // avoid multiple locks due to calling resGetCache twice
c, err = resGetCache(url, fs, viper.GetBool("ignoreCache")) c, err = resGetCache(url, fs, cfg, cfg.GetBool("ignoreCache"))
if c != nil && err == nil { if c != nil && err == nil {
return c, nil return c, nil
} }
@ -144,17 +144,17 @@ func resGetRemote(url string, fs afero.Fs, hc *http.Client) ([]byte, error) {
if err != nil { if err != nil {
return nil, err return nil, err
} }
err = resWriteCache(url, c, fs, viper.GetBool("ignoreCache")) err = resWriteCache(url, c, fs, cfg, cfg.GetBool("ignoreCache"))
if err != nil { if err != nil {
return nil, err return nil, err
} }
jww.INFO.Printf("... and cached to: %s", getCacheFileID(url)) jww.INFO.Printf("... and cached to: %s", getCacheFileID(cfg, url))
return c, nil return c, nil
} }
// resGetLocal loads the content of a local file // resGetLocal loads the content of a local file
func resGetLocal(url string, fs afero.Fs) ([]byte, error) { func resGetLocal(url string, fs afero.Fs, cfg config.Provider) ([]byte, error) {
filename := filepath.Join(viper.GetString("workingDir"), url) filename := filepath.Join(cfg.GetString("workingDir"), url)
if e, err := helpers.Exists(filename, fs); !e { if e, err := helpers.Exists(filename, fs); !e {
return nil, err return nil, err
} }
@ -169,9 +169,9 @@ func (t *templateFuncster) resGetResource(url string) ([]byte, error) {
return nil, nil return nil, nil
} }
if strings.Contains(url, "://") { if strings.Contains(url, "://") {
return resGetRemote(url, t.Fs.Source, http.DefaultClient) return resGetRemote(url, t.Fs.Source, t.Cfg, http.DefaultClient)
} }
return resGetLocal(url, t.Fs.Source) return resGetLocal(url, t.Fs.Source, t.Cfg)
} }
// getJSON expects one or n-parts of a URL to a resource which can either be a local or a remote one. // getJSON expects one or n-parts of a URL to a resource which can either be a local or a remote one.
@ -193,7 +193,7 @@ func (t *templateFuncster) getJSON(urlParts ...string) interface{} {
jww.ERROR.Printf("Cannot read json from resource %s with error message %s", url, err) jww.ERROR.Printf("Cannot read json from resource %s with error message %s", url, err)
jww.ERROR.Printf("Retry #%d for %s and sleeping for %s", i, url, resSleep) jww.ERROR.Printf("Retry #%d for %s and sleeping for %s", i, url, resSleep)
time.Sleep(resSleep) time.Sleep(resSleep)
resDeleteCache(url, t.Fs.Source) resDeleteCache(url, t.Fs.Source, t.Cfg)
continue continue
} }
break break
@ -226,7 +226,7 @@ func (t *templateFuncster) getCSV(sep string, urlParts ...string) [][]string {
var clearCacheSleep = func(i int, u string) { var clearCacheSleep = func(i int, u string) {
jww.ERROR.Printf("Retry #%d for %s and sleeping for %s", i, url, resSleep) jww.ERROR.Printf("Retry #%d for %s and sleeping for %s", i, url, resSleep)
time.Sleep(resSleep) time.Sleep(resSleep)
resDeleteCache(url, t.Fs.Source) resDeleteCache(url, t.Fs.Source, t.Cfg)
} }
for i := 0; i <= resRetries; i++ { for i := 0; i <= resRetries; i++ {

View file

@ -19,10 +19,8 @@ import (
"net/http" "net/http"
"net/http/httptest" "net/http/httptest"
"net/url" "net/url"
"os"
"strings" "strings"
"testing" "testing"
"time"
"github.com/spf13/afero" "github.com/spf13/afero"
"github.com/spf13/hugo/helpers" "github.com/spf13/hugo/helpers"
@ -32,6 +30,7 @@ import (
) )
func TestScpCache(t *testing.T) { func TestScpCache(t *testing.T) {
t.Parallel()
tests := []struct { tests := []struct {
path string path string
@ -50,7 +49,8 @@ func TestScpCache(t *testing.T) {
fs := new(afero.MemMapFs) fs := new(afero.MemMapFs)
for _, test := range tests { for _, test := range tests {
c, err := resGetCache(test.path, fs, test.ignore) cfg := viper.New()
c, err := resGetCache(test.path, fs, cfg, test.ignore)
if err != nil { if err != nil {
t.Errorf("Error getting cache: %s", err) t.Errorf("Error getting cache: %s", err)
} }
@ -58,12 +58,12 @@ func TestScpCache(t *testing.T) {
t.Errorf("There is content where there should not be anything: %s", string(c)) t.Errorf("There is content where there should not be anything: %s", string(c))
} }
err = resWriteCache(test.path, test.content, fs, test.ignore) err = resWriteCache(test.path, test.content, fs, cfg, test.ignore)
if err != nil { if err != nil {
t.Errorf("Error writing cache: %s", err) t.Errorf("Error writing cache: %s", err)
} }
c, err = resGetCache(test.path, fs, test.ignore) c, err = resGetCache(test.path, fs, cfg, test.ignore)
if err != nil { if err != nil {
t.Errorf("Error getting cache after writing: %s", err) t.Errorf("Error getting cache after writing: %s", err)
} }
@ -80,8 +80,9 @@ func TestScpCache(t *testing.T) {
} }
func TestScpGetLocal(t *testing.T) { func TestScpGetLocal(t *testing.T) {
testReset() t.Parallel()
fs := hugofs.NewMem() v := viper.New()
fs := hugofs.NewMem(v)
ps := helpers.FilePathSeparator ps := helpers.FilePathSeparator
tests := []struct { tests := []struct {
@ -102,7 +103,7 @@ func TestScpGetLocal(t *testing.T) {
t.Error(err) t.Error(err)
} }
c, err := resGetLocal(test.path, fs.Source) c, err := resGetLocal(test.path, fs.Source, v)
if err != nil { if err != nil {
t.Errorf("Error getting resource content: %s", err) t.Errorf("Error getting resource content: %s", err)
} }
@ -126,6 +127,7 @@ func getTestServer(handler func(w http.ResponseWriter, r *http.Request)) (*httpt
} }
func TestScpGetRemote(t *testing.T) { func TestScpGetRemote(t *testing.T) {
t.Parallel()
fs := new(afero.MemMapFs) fs := new(afero.MemMapFs)
tests := []struct { tests := []struct {
@ -146,14 +148,16 @@ func TestScpGetRemote(t *testing.T) {
}) })
defer func() { srv.Close() }() defer func() { srv.Close() }()
c, err := resGetRemote(test.path, fs, cl) cfg := viper.New()
c, err := resGetRemote(test.path, fs, cfg, cl)
if err != nil { if err != nil {
t.Errorf("Error getting resource content: %s", err) t.Errorf("Error getting resource content: %s", err)
} }
if !bytes.Equal(c, test.content) { if !bytes.Equal(c, test.content) {
t.Errorf("\nNet Expected: %s\nNet Actual: %s\n", string(test.content), string(c)) t.Errorf("\nNet Expected: %s\nNet Actual: %s\n", string(test.content), string(c))
} }
cc, cErr := resGetCache(test.path, fs, test.ignore) cc, cErr := resGetCache(test.path, fs, cfg, test.ignore)
if cErr != nil { if cErr != nil {
t.Error(cErr) t.Error(cErr)
} }
@ -170,6 +174,7 @@ func TestScpGetRemote(t *testing.T) {
} }
func TestParseCSV(t *testing.T) { func TestParseCSV(t *testing.T) {
t.Parallel()
tests := []struct { tests := []struct {
csv []byte csv []byte
@ -208,29 +213,11 @@ func TestParseCSV(t *testing.T) {
} }
} }
// https://twitter.com/francesc/status/603066617124126720
// for the construct: defer testRetryWhenDone().Reset()
type wd struct {
Reset func()
}
func testRetryWhenDone(f *templateFuncster) wd {
cd := viper.GetString("cacheDir")
viper.Set("cacheDir", helpers.GetTempDir("", f.Fs.Source))
var tmpSleep time.Duration
tmpSleep, resSleep = resSleep, time.Millisecond
return wd{func() {
viper.Set("cacheDir", cd)
resSleep = tmpSleep
}}
}
func TestGetJSONFailParse(t *testing.T) { func TestGetJSONFailParse(t *testing.T) {
t.Parallel()
f := newTestFuncster() f := newTestFuncster()
defer testRetryWhenDone(f).Reset()
reqCount := 0 reqCount := 0
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
if reqCount > 0 { if reqCount > 0 {
@ -244,7 +231,6 @@ func TestGetJSONFailParse(t *testing.T) {
})) }))
defer ts.Close() defer ts.Close()
url := ts.URL + "/test.json" url := ts.URL + "/test.json"
defer os.Remove(getCacheFileID(url))
want := map[string]interface{}{"gomeetup": []interface{}{"Sydney", "San Francisco", "Stockholm"}} want := map[string]interface{}{"gomeetup": []interface{}{"Sydney", "San Francisco", "Stockholm"}}
have := f.getJSON(url) have := f.getJSON(url)
@ -255,10 +241,9 @@ func TestGetJSONFailParse(t *testing.T) {
} }
func TestGetCSVFailParseSep(t *testing.T) { func TestGetCSVFailParseSep(t *testing.T) {
t.Parallel()
f := newTestFuncster() f := newTestFuncster()
defer testRetryWhenDone(f).Reset()
reqCount := 0 reqCount := 0
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
if reqCount > 0 { if reqCount > 0 {
@ -275,7 +260,6 @@ func TestGetCSVFailParseSep(t *testing.T) {
})) }))
defer ts.Close() defer ts.Close()
url := ts.URL + "/test.csv" url := ts.URL + "/test.csv"
defer os.Remove(getCacheFileID(url))
want := [][]string{{"gomeetup", "city"}, {"yes", "Sydney"}, {"yes", "San Francisco"}, {"yes", "Stockholm"}} want := [][]string{{"gomeetup", "city"}, {"yes", "Sydney"}, {"yes", "San Francisco"}, {"yes", "Stockholm"}}
have := f.getCSV(",", url) have := f.getCSV(",", url)
@ -286,11 +270,10 @@ func TestGetCSVFailParseSep(t *testing.T) {
} }
func TestGetCSVFailParse(t *testing.T) { func TestGetCSVFailParse(t *testing.T) {
t.Parallel()
f := newTestFuncster() f := newTestFuncster()
defer testRetryWhenDone(f).Reset()
reqCount := 0 reqCount := 0
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.Header().Add("Content-type", "application/json") w.Header().Add("Content-type", "application/json")
@ -309,7 +292,6 @@ func TestGetCSVFailParse(t *testing.T) {
})) }))
defer ts.Close() defer ts.Close()
url := ts.URL + "/test.csv" url := ts.URL + "/test.csv"
defer os.Remove(getCacheFileID(url))
want := [][]string{{"gomeetup", "city"}, {"yes", "Sydney"}, {"yes", "San Francisco"}, {"yes", "Stockholm"}} want := [][]string{{"gomeetup", "city"}, {"yes", "Sydney"}, {"yes", "San Francisco"}, {"yes", "Stockholm"}}
have := f.getCSV(",", url) have := f.getCSV(",", url)

View file

@ -26,21 +26,15 @@ import (
"github.com/spf13/afero" "github.com/spf13/afero"
"github.com/spf13/hugo/deps" "github.com/spf13/hugo/deps"
"github.com/spf13/hugo/helpers"
"github.com/spf13/hugo/tplapi" "github.com/spf13/hugo/tplapi"
"github.com/spf13/viper" "github.com/spf13/viper"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
func testReset() {
viper.Reset()
// TODO(bep) viper-globals
viper.Set("currentContentLanguage", helpers.NewLanguage("en"))
}
// Some tests for Issue #1178 -- Ace // Some tests for Issue #1178 -- Ace
func TestAceTemplates(t *testing.T) { func TestAceTemplates(t *testing.T) {
t.Parallel()
for i, this := range []struct { for i, this := range []struct {
basePath string basePath string
@ -79,7 +73,7 @@ html lang=en
d := "DATA" d := "DATA"
config := newDefaultDepsCfg() config := newDepsConfig(viper.New())
config.WithTemplate = func(templ tplapi.Template) error { config.WithTemplate = func(templ tplapi.Template) error {
return templ.AddAceTemplate("mytemplate.ace", basePath, innerPath, return templ.AddAceTemplate("mytemplate.ace", basePath, innerPath,
[]byte(this.baseContent), []byte(this.innerContent)) []byte(this.baseContent), []byte(this.innerContent))
@ -87,7 +81,7 @@ html lang=en
a := deps.New(config) a := deps.New(config)
if err := a.LoadTemplates(); err != nil { if err := a.LoadResources(); err != nil {
t.Fatal(err) t.Fatal(err)
} }
@ -124,6 +118,7 @@ func isAtLeastGo16() bool {
} }
func TestAddTemplateFileWithMaster(t *testing.T) { func TestAddTemplateFileWithMaster(t *testing.T) {
t.Parallel()
if !isAtLeastGo16() { if !isAtLeastGo16() {
t.Skip("This test only runs on Go >= 1.6") t.Skip("This test only runs on Go >= 1.6")
@ -148,8 +143,8 @@ func TestAddTemplateFileWithMaster(t *testing.T) {
masterTplName := "mt" masterTplName := "mt"
finalTplName := "tp" finalTplName := "tp"
cfg := newDefaultDepsCfg() config := newDepsConfig(viper.New())
cfg.WithTemplate = func(templ tplapi.Template) error { config.WithTemplate = func(templ tplapi.Template) error {
err := templ.AddTemplateFileWithMaster(finalTplName, overlayTplName, masterTplName) err := templ.AddTemplateFileWithMaster(finalTplName, overlayTplName, masterTplName)
@ -189,13 +184,13 @@ func TestAddTemplateFileWithMaster(t *testing.T) {
} }
if this.writeSkipper != 1 { if this.writeSkipper != 1 {
afero.WriteFile(cfg.Fs.Source, masterTplName, []byte(this.masterTplContent), 0644) afero.WriteFile(config.Fs.Source, masterTplName, []byte(this.masterTplContent), 0644)
} }
if this.writeSkipper != 2 { if this.writeSkipper != 2 {
afero.WriteFile(cfg.Fs.Source, overlayTplName, []byte(this.overlayTplContent), 0644) afero.WriteFile(config.Fs.Source, overlayTplName, []byte(this.overlayTplContent), 0644)
} }
deps.New(cfg) deps.New(config)
} }
@ -204,6 +199,7 @@ func TestAddTemplateFileWithMaster(t *testing.T) {
// A Go stdlib test for linux/arm. Will remove later. // A Go stdlib test for linux/arm. Will remove later.
// See #1771 // See #1771
func TestBigIntegerFunc(t *testing.T) { func TestBigIntegerFunc(t *testing.T) {
t.Parallel()
var func1 = func(v int64) error { var func1 = func(v int64) error {
return nil return nil
} }
@ -234,6 +230,7 @@ func (b BI) A(v int64) error {
return nil return nil
} }
func TestBigIntegerMethod(t *testing.T) { func TestBigIntegerMethod(t *testing.T) {
t.Parallel()
data := &BI{} data := &BI{}
@ -253,6 +250,7 @@ func TestBigIntegerMethod(t *testing.T) {
// Test for bugs discovered by https://github.com/dvyukov/go-fuzz // Test for bugs discovered by https://github.com/dvyukov/go-fuzz
func TestTplGoFuzzReports(t *testing.T) { func TestTplGoFuzzReports(t *testing.T) {
t.Parallel()
// The following test case(s) also fail // The following test case(s) also fail
// See https://github.com/golang/go/issues/10634 // See https://github.com/golang/go/issues/10634
@ -284,13 +282,14 @@ func TestTplGoFuzzReports(t *testing.T) {
H: "a,b,c,d,e,f", H: "a,b,c,d,e,f",
} }
cfg := newDefaultDepsCfg() config := newDepsConfig(viper.New())
cfg.WithTemplate = func(templ tplapi.Template) error {
config.WithTemplate = func(templ tplapi.Template) error {
return templ.AddTemplate("fuzz", this.data) return templ.AddTemplate("fuzz", this.data)
} }
de := deps.New(cfg) de := deps.New(config)
require.NoError(t, de.LoadTemplates()) require.NoError(t, de.LoadResources())
templ := de.Tmpl.(*GoHTMLTemplate) templ := de.Tmpl.(*GoHTMLTemplate)

View file

@ -16,24 +16,23 @@ package transform
import ( import (
"bytes" "bytes"
"fmt" "fmt"
"github.com/spf13/viper"
) )
func LiveReloadInject(ct contentTransformer) { func LiveReloadInject(port int) func(ct contentTransformer) {
endBodyTag := "</body>" return func(ct contentTransformer) {
match := []byte(endBodyTag) endBodyTag := "</body>"
port := viper.Get("port")
replaceTemplate := `<script data-no-instant>document.write('<script src="/livereload.js?port=%d&mindelay=10"></' + 'script>')</script>%s`
replace := []byte(fmt.Sprintf(replaceTemplate, port, endBodyTag))
newcontent := bytes.Replace(ct.Content(), match, replace, 1)
if len(newcontent) == len(ct.Content()) {
endBodyTag = "</BODY>"
replace := []byte(fmt.Sprintf(replaceTemplate, port, endBodyTag))
match := []byte(endBodyTag) match := []byte(endBodyTag)
newcontent = bytes.Replace(ct.Content(), match, replace, 1) replaceTemplate := `<script data-no-instant>document.write('<script src="/livereload.js?port=%d&mindelay=10"></' + 'script>')</script>%s`
} replace := []byte(fmt.Sprintf(replaceTemplate, port, endBodyTag))
ct.Write(newcontent) newcontent := bytes.Replace(ct.Content(), match, replace, 1)
if len(newcontent) == len(ct.Content()) {
endBodyTag = "</BODY>"
replace := []byte(fmt.Sprintf(replaceTemplate, port, endBodyTag))
match := []byte(endBodyTag)
newcontent = bytes.Replace(ct.Content(), match, replace, 1)
}
ct.Write(newcontent)
}
} }

View file

@ -18,8 +18,6 @@ import (
"fmt" "fmt"
"strings" "strings"
"testing" "testing"
"github.com/spf13/viper"
) )
func TestLiveReloadInject(t *testing.T) { func TestLiveReloadInject(t *testing.T) {
@ -28,11 +26,10 @@ func TestLiveReloadInject(t *testing.T) {
} }
func doTestLiveReloadInject(t *testing.T, bodyEndTag string) { func doTestLiveReloadInject(t *testing.T, bodyEndTag string) {
viper.Set("port", 1313)
out := new(bytes.Buffer) out := new(bytes.Buffer)
in := strings.NewReader(bodyEndTag) in := strings.NewReader(bodyEndTag)
tr := NewChain(LiveReloadInject) tr := NewChain(LiveReloadInject(1313))
tr.Apply(out, in, []byte("path")) tr.Apply(out, in, []byte("path"))
expected := fmt.Sprintf(`<script data-no-instant>document.write('<script src="/livereload.js?port=1313&mindelay=10"></' + 'script>')</script>%s`, bodyEndTag) expected := fmt.Sprintf(`<script data-no-instant>document.write('<script src="/livereload.js?port=1313&mindelay=10"></' + 'script>')</script>%s`, bodyEndTag)