Support files in content mounts

This commit is a general improvement of handling if single file mounts.

Fixes #6684
Fixes #6696
This commit is contained in:
Bjørn Erik Pedersen 2019-12-30 10:50:00 +01:00
parent aa4ccb8a1e
commit ff6253bc7c
10 changed files with 273 additions and 55 deletions

View file

@ -667,7 +667,7 @@ func (c *commandeer) timeTrack(start time.Time, name string) {
// getDirList provides NewWatcher() with a list of directories to watch for changes. // getDirList provides NewWatcher() with a list of directories to watch for changes.
func (c *commandeer) getDirList() ([]string, error) { func (c *commandeer) getDirList() ([]string, error) {
var dirnames []string var filenames []string
walkFn := func(path string, fi hugofs.FileMetaInfo, err error) error { walkFn := func(path string, fi hugofs.FileMetaInfo, err error) error {
if err != nil { if err != nil {
@ -681,25 +681,29 @@ func (c *commandeer) getDirList() ([]string, error) {
return filepath.SkipDir return filepath.SkipDir
} }
dirnames = append(dirnames, fi.Meta().Filename()) filenames = append(filenames, fi.Meta().Filename())
} }
return nil return nil
} }
watchDirs := c.hugo().PathSpec.BaseFs.WatchDirs() watchFiles := c.hugo().PathSpec.BaseFs.WatchDirs()
for _, watchDir := range watchDirs { for _, fi := range watchFiles {
if !fi.IsDir() {
filenames = append(filenames, fi.Meta().Filename())
continue
}
w := hugofs.NewWalkway(hugofs.WalkwayConfig{Logger: c.logger, Info: watchDir, WalkFn: walkFn}) w := hugofs.NewWalkway(hugofs.WalkwayConfig{Logger: c.logger, Info: fi, WalkFn: walkFn})
if err := w.Walk(); err != nil { if err := w.Walk(); err != nil {
c.logger.ERROR.Println("walker: ", err) c.logger.ERROR.Println("walker: ", err)
} }
} }
dirnames = helpers.UniqueStringsSorted(dirnames) filenames = helpers.UniqueStringsSorted(filenames)
return dirnames, nil return filenames, nil
} }
func (c *commandeer) buildSites() (err error) { func (c *commandeer) buildSites() (err error) {

View file

@ -35,6 +35,9 @@ import (
const ( const (
metaKeyFilename = "filename" metaKeyFilename = "filename"
metaKeyPathFile = "pathFile" // Path of filename relative to a root.
metaKeyIsFileMount = "isFileMount" // Whether the source mount was a file.
metaKeyMountRoot = "mountRoot"
metaKeyOriginalFilename = "originalFilename" metaKeyOriginalFilename = "originalFilename"
metaKeyName = "name" metaKeyName = "name"
metaKeyPath = "path" metaKeyPath = "path"
@ -108,10 +111,34 @@ func (f FileMeta) Lang() string {
return f.stringV(metaKeyLang) return f.stringV(metaKeyLang)
} }
// Path returns the relative file path to where this file is mounted.
func (f FileMeta) Path() string { func (f FileMeta) Path() string {
return f.stringV(metaKeyPath) return f.stringV(metaKeyPath)
} }
// PathFile returns the relative file path for the file source. This
// will in most cases be the same as Path.
func (f FileMeta) PathFile() string {
pf := f.stringV(metaKeyPathFile)
if f.isFileMount() {
return pf
}
mountRoot := f.mountRoot()
if mountRoot == pf {
return f.Path()
}
return pf + (strings.TrimPrefix(f.Path(), mountRoot))
}
func (f FileMeta) mountRoot() string {
return f.stringV(metaKeyMountRoot)
}
func (f FileMeta) isFileMount() bool {
return f.GetBool(metaKeyIsFileMount)
}
func (f FileMeta) Weight() int { func (f FileMeta) Weight() int {
return f.GetInt(metaKeyWeight) return f.GetInt(metaKeyWeight)
} }
@ -129,10 +156,6 @@ func (f FileMeta) IsSymlink() bool {
return f.GetBool(metaKeyIsSymlink) return f.GetBool(metaKeyIsSymlink)
} }
func (f FileMeta) String() string {
return f.Filename()
}
func (f FileMeta) Watch() bool { func (f FileMeta) Watch() bool {
if v, found := f["watch"]; found { if v, found := f["watch"]; found {
return v.(bool) return v.(bool)
@ -210,6 +233,14 @@ func NewFileMetaInfo(fi os.FileInfo, m FileMeta) FileMetaInfo {
return &fileInfoMeta{FileInfo: fi, m: m} return &fileInfoMeta{FileInfo: fi, m: m}
} }
func copyFileMeta(m FileMeta) FileMeta {
c := make(FileMeta)
for k, v := range m {
c[k] = v
}
return c
}
// Merge metadata, last entry wins. // Merge metadata, last entry wins.
func mergeFileMeta(from, to FileMeta) { func mergeFileMeta(from, to FileMeta) {
if from == nil { if from == nil {

View file

@ -35,7 +35,7 @@ var filepathSeparator = string(filepath.Separator)
func NewRootMappingFs(fs afero.Fs, rms ...RootMapping) (*RootMappingFs, error) { func NewRootMappingFs(fs afero.Fs, rms ...RootMapping) (*RootMappingFs, error) {
rootMapToReal := radix.New() rootMapToReal := radix.New()
for _, rm := range rms { for i, rm := range rms {
(&rm).clean() (&rm).clean()
fromBase := files.ResolveComponentFolder(rm.From) fromBase := files.ResolveComponentFolder(rm.From)
@ -47,16 +47,32 @@ func NewRootMappingFs(fs afero.Fs, rms ...RootMapping) (*RootMappingFs, error) {
panic(fmt.Sprintf("invalid root mapping; from/to: %s/%s", rm.From, rm.To)) panic(fmt.Sprintf("invalid root mapping; from/to: %s/%s", rm.From, rm.To))
} }
_, err := fs.Stat(rm.To) fi, err := fs.Stat(rm.To)
if err != nil { if err != nil {
if os.IsNotExist(err) { if os.IsNotExist(err) {
continue continue
} }
return nil, err return nil, err
} }
// Extract "blog" from "content/blog" // Extract "blog" from "content/blog"
rm.path = strings.TrimPrefix(strings.TrimPrefix(rm.From, fromBase), filepathSeparator) rm.path = strings.TrimPrefix(strings.TrimPrefix(rm.From, fromBase), filepathSeparator)
if rm.Meta != nil {
rm.Meta[metaKeyIsFileMount] = !fi.IsDir()
rm.Meta[metaKeyMountRoot] = rm.path
if rm.ToBasedir != "" {
pathFile := strings.TrimPrefix(strings.TrimPrefix(rm.To, rm.ToBasedir), filepathSeparator)
rm.Meta[metaKeyPathFile] = pathFile
}
}
meta := copyFileMeta(rm.Meta)
if !fi.IsDir() {
_, name := filepath.Split(rm.From)
meta[metaKeyName] = name
}
rm.fi = NewFileMetaInfo(fi, meta)
key := rm.rootKey() key := rm.rootKey()
var mappings []RootMapping var mappings []RootMapping
@ -67,6 +83,8 @@ func NewRootMappingFs(fs afero.Fs, rms ...RootMapping) (*RootMappingFs, error) {
} }
mappings = append(mappings, rm) mappings = append(mappings, rm)
rootMapToReal.Insert(key, mappings) rootMapToReal.Insert(key, mappings)
rms[i] = rm
} }
rfs := &RootMappingFs{Fs: fs, rfs := &RootMappingFs{Fs: fs,
@ -91,11 +109,14 @@ func NewRootMappingFsFromFromTo(fs afero.Fs, fromTo ...string) (*RootMappingFs,
} }
type RootMapping struct { type RootMapping struct {
From string From string // The virtual mount.
To string To string // The source directory or file.
ToBasedir string // The base of To. May be empty if an absolute path was provided.
Meta FileMeta // File metadata (lang etc.)
fi FileMetaInfo
path string // The virtual mount point, e.g. "blog".
path string // The virtual mount point, e.g. "blog".
Meta FileMeta // File metadata (lang etc.)
} }
func (rm *RootMapping) clean() { func (rm *RootMapping) clean() {
@ -148,6 +169,11 @@ func (fs *RootMappingFs) Dirs(base string) ([]FileMetaInfo, error) {
if err != nil { if err != nil {
return nil, errors.Wrap(err, "RootMappingFs.Dirs") return nil, errors.Wrap(err, "RootMappingFs.Dirs")
} }
if !fi.IsDir() {
mergeFileMeta(r.Meta, fi.(FileMetaInfo).Meta())
}
fss[i] = fi.(FileMetaInfo) fss[i] = fi.(FileMetaInfo)
} }
@ -168,7 +194,6 @@ func (fs *RootMappingFs) virtualDirOpener(name string, isRoot bool) func() (afer
} }
func (fs *RootMappingFs) doLstat(name string, allowMultiple bool) ([]FileMetaInfo, []FileMetaInfo, bool, error) { func (fs *RootMappingFs) doLstat(name string, allowMultiple bool) ([]FileMetaInfo, []FileMetaInfo, bool, error) {
if fs.isRoot(name) { if fs.isRoot(name) {
return []FileMetaInfo{newDirNameOnlyFileInfo(name, true, fs.virtualDirOpener(name, true))}, nil, false, nil return []FileMetaInfo{newDirNameOnlyFileInfo(name, true, fs.virtualDirOpener(name, true))}, nil, false, nil
} }
@ -210,10 +235,12 @@ func (fs *RootMappingFs) doLstat(name string, allowMultiple bool) ([]FileMetaInf
return nil, nil, false, err return nil, nil, false, err
} }
fim := fi.(FileMetaInfo) fim := fi.(FileMetaInfo)
fis = append(fis, fim) fis = append(fis, fim)
} }
for _, root = range rootsInDir { for _, root = range rootsInDir {
fi, _, err := fs.statRoot(root, "") fi, _, err := fs.statRoot(root, "")
if err != nil { if err != nil {
if os.IsNotExist(err) { if os.IsNotExist(err) {
@ -500,9 +527,9 @@ func (f *rootMappingFile) Name() string {
func (f *rootMappingFile) Readdir(count int) ([]os.FileInfo, error) { func (f *rootMappingFile) Readdir(count int) ([]os.FileInfo, error) {
if f.File == nil { if f.File == nil {
dirsn := make([]os.FileInfo, 0) filesn := make([]os.FileInfo, 0)
roots := f.fs.getRootsWithPrefix(f.name) roots := f.fs.getRootsWithPrefix(f.name)
seen := make(map[string]bool) seen := make(map[string]bool) // Do not return duplicate directories
j := 0 j := 0
for _, rm := range roots { for _, rm := range roots {
@ -510,13 +537,16 @@ func (f *rootMappingFile) Readdir(count int) ([]os.FileInfo, error) {
break break
} }
opener := func() (afero.File, error) { if !rm.fi.IsDir() {
return f.fs.Open(rm.From) // A single file mount
filesn = append(filesn, rm.fi)
continue
} }
name := rm.From from := rm.From
name := from
if !f.isRoot { if !f.isRoot {
_, name = filepath.Split(rm.From) _, name = filepath.Split(from)
} }
if seen[name] { if seen[name] {
@ -524,16 +554,21 @@ func (f *rootMappingFile) Readdir(count int) ([]os.FileInfo, error) {
} }
seen[name] = true seen[name] = true
opener := func() (afero.File, error) {
return f.fs.Open(from)
}
j++ j++
fi := newDirNameOnlyFileInfo(name, false, opener) fi := newDirNameOnlyFileInfo(name, false, opener)
if rm.Meta != nil { if rm.Meta != nil {
mergeFileMeta(rm.Meta, fi.Meta()) mergeFileMeta(rm.Meta, fi.Meta())
} }
dirsn = append(dirsn, fi) filesn = append(filesn, fi)
} }
return dirsn, nil return filesn, nil
} }
if f.File == nil { if f.File == nil {

View file

@ -186,21 +186,40 @@ func TestRootMappingFsMount(t *testing.T) {
c.Assert(afero.WriteFile(fs, filepath.Join("themes/a/myenblogcontent", testfile), []byte("some en content"), 0755), qt.IsNil) c.Assert(afero.WriteFile(fs, filepath.Join("themes/a/myenblogcontent", testfile), []byte("some en content"), 0755), qt.IsNil)
c.Assert(afero.WriteFile(fs, filepath.Join("themes/a/mysvblogcontent", testfile), []byte("some sv content"), 0755), qt.IsNil) c.Assert(afero.WriteFile(fs, filepath.Join("themes/a/mysvblogcontent", testfile), []byte("some sv content"), 0755), qt.IsNil)
c.Assert(afero.WriteFile(fs, filepath.Join("themes/a/mysvblogcontent", "other.txt"), []byte("some sv content"), 0755), qt.IsNil) c.Assert(afero.WriteFile(fs, filepath.Join("themes/a/mysvblogcontent", "other.txt"), []byte("some sv content"), 0755), qt.IsNil)
c.Assert(afero.WriteFile(fs, filepath.Join("themes/a/singlefiles", "no.txt"), []byte("no text"), 0755), qt.IsNil)
c.Assert(afero.WriteFile(fs, filepath.Join("themes/a/singlefiles", "sv.txt"), []byte("sv text"), 0755), qt.IsNil)
bfs := afero.NewBasePathFs(fs, "themes/a").(*afero.BasePathFs) bfs := afero.NewBasePathFs(fs, "themes/a").(*afero.BasePathFs)
rm := []RootMapping{ rm := []RootMapping{
RootMapping{From: "content/blog", // Directories
RootMapping{
From: "content/blog",
To: "mynoblogcontent", To: "mynoblogcontent",
Meta: FileMeta{"lang": "no"}, Meta: FileMeta{"lang": "no"},
}, },
RootMapping{From: "content/blog", RootMapping{
From: "content/blog",
To: "myenblogcontent", To: "myenblogcontent",
Meta: FileMeta{"lang": "en"}, Meta: FileMeta{"lang": "en"},
}, },
RootMapping{From: "content/blog", RootMapping{
From: "content/blog",
To: "mysvblogcontent", To: "mysvblogcontent",
Meta: FileMeta{"lang": "sv"}, Meta: FileMeta{"lang": "sv"},
}, },
// Files
RootMapping{
From: "content/singles/p1.md",
To: "singlefiles/no.txt",
ToBasedir: "singlefiles",
Meta: FileMeta{"lang": "no"},
},
RootMapping{
From: "content/singles/p1.md",
To: "singlefiles/sv.txt",
ToBasedir: "singlefiles",
Meta: FileMeta{"lang": "sv"},
},
} }
rfs, err := NewRootMappingFs(bfs, rm...) rfs, err := NewRootMappingFs(bfs, rm...)
@ -208,6 +227,7 @@ func TestRootMappingFsMount(t *testing.T) {
blog, err := rfs.Stat(filepath.FromSlash("content/blog")) blog, err := rfs.Stat(filepath.FromSlash("content/blog"))
c.Assert(err, qt.IsNil) c.Assert(err, qt.IsNil)
c.Assert(blog.IsDir(), qt.Equals, true)
blogm := blog.(FileMetaInfo).Meta() blogm := blog.(FileMetaInfo).Meta()
c.Assert(blogm.Lang(), qt.Equals, "no") // First match c.Assert(blogm.Lang(), qt.Equals, "no") // First match
@ -236,6 +256,25 @@ func TestRootMappingFsMount(t *testing.T) {
c.Assert(err, qt.IsNil) c.Assert(err, qt.IsNil)
c.Assert(string(b), qt.Equals, "some no content") c.Assert(string(b), qt.Equals, "some no content")
// Check file mappings
single, err := rfs.Stat(filepath.FromSlash("content/singles/p1.md"))
c.Assert(err, qt.IsNil)
c.Assert(single.IsDir(), qt.Equals, false)
singlem := single.(FileMetaInfo).Meta()
c.Assert(singlem.Lang(), qt.Equals, "no") // First match
singlesDir, err := rfs.Open(filepath.FromSlash("content/singles"))
c.Assert(err, qt.IsNil)
defer singlesDir.Close()
singles, err := singlesDir.Readdir(-1)
c.Assert(err, qt.IsNil)
c.Assert(singles, qt.HasLen, 2)
for i, lang := range []string{"no", "sv"} {
fi := singles[i].(FileMetaInfo)
c.Assert(fi.Meta().PathFile(), qt.Equals, lang+".txt")
c.Assert(fi.Meta().Lang(), qt.Equals, lang)
c.Assert(fi.Name(), qt.Equals, "p1.md")
}
} }
func TestRootMappingFsMountOverlap(t *testing.T) { func TestRootMappingFsMountOverlap(t *testing.T) {

View file

@ -18,6 +18,7 @@ package filesystems
import ( import (
"io" "io"
"os" "os"
"path"
"path/filepath" "path/filepath"
"strings" "strings"
"sync" "sync"
@ -55,6 +56,8 @@ type BaseFs struct {
theBigFs *filesystemsCollector theBigFs *filesystemsCollector
} }
// TODO(bep) we can get regular files in here and that is fine, but
// we need to clean up the naming.
func (fs *BaseFs) WatchDirs() []hugofs.FileMetaInfo { func (fs *BaseFs) WatchDirs() []hugofs.FileMetaInfo {
var dirs []hugofs.FileMetaInfo var dirs []hugofs.FileMetaInfo
for _, dir := range fs.AllDirs() { for _, dir := range fs.AllDirs() {
@ -62,7 +65,6 @@ func (fs *BaseFs) WatchDirs() []hugofs.FileMetaInfo {
dirs = append(dirs, dir) dirs = append(dirs, dir)
} }
} }
return dirs return dirs
} }
@ -90,7 +92,7 @@ func (b *BaseFs) RelContentDir(filename string) string {
for _, dir := range b.SourceFilesystems.Content.Dirs { for _, dir := range b.SourceFilesystems.Content.Dirs {
dirname := dir.Meta().Filename() dirname := dir.Meta().Filename()
if strings.HasPrefix(filename, dirname) { if strings.HasPrefix(filename, dirname) {
rel := strings.TrimPrefix(filename, dirname) rel := path.Join(dir.Meta().Path(), strings.TrimPrefix(filename, dirname))
return strings.TrimPrefix(rel, filePathSeparator) return strings.TrimPrefix(rel, filePathSeparator)
} }
} }
@ -298,8 +300,16 @@ func (d *SourceFilesystem) Contains(filename string) bool {
func (d *SourceFilesystem) Path(filename string) string { func (d *SourceFilesystem) Path(filename string) string {
for _, dir := range d.Dirs { for _, dir := range d.Dirs {
meta := dir.Meta() meta := dir.Meta()
if !dir.IsDir() {
if filename == meta.Filename() {
return meta.PathFile()
}
continue
}
if strings.HasPrefix(filename, meta.Filename()) { if strings.HasPrefix(filename, meta.Filename()) {
p := strings.TrimPrefix(strings.TrimPrefix(filename, meta.Filename()), filePathSeparator) p := strings.TrimPrefix(strings.TrimPrefix(filename, meta.Filename()), filePathSeparator)
p = path.Join(meta.PathFile(), p)
return p return p
} }
} }
@ -530,11 +540,11 @@ func (b *sourceFilesystemsBuilder) createModFs(
fromToStatic []hugofs.RootMapping fromToStatic []hugofs.RootMapping
) )
absPathify := func(path string) string { absPathify := func(path string) (string, string) {
if filepath.IsAbs(path) { if filepath.IsAbs(path) {
return path return "", path
} }
return paths.AbsPathify(md.dir, path) return md.dir, paths.AbsPathify(md.dir, path)
} }
for _, mount := range md.Mounts() { for _, mount := range md.Mounts() {
@ -544,9 +554,12 @@ func (b *sourceFilesystemsBuilder) createModFs(
mountWeight++ mountWeight++
} }
base, filename := absPathify(mount.Source)
rm := hugofs.RootMapping{ rm := hugofs.RootMapping{
From: mount.Target, From: mount.Target,
To: absPathify(mount.Source), To: filename,
ToBasedir: base,
Meta: hugofs.FileMeta{ Meta: hugofs.FileMeta{
"watch": md.Watch(), "watch": md.Watch(),
"mountWeight": mountWeight, "mountWeight": mountWeight,
@ -621,7 +634,8 @@ func (b *sourceFilesystemsBuilder) createModFs(
if md.isMainProject { if md.isMainProject {
return b.p.AbsResourcesDir return b.p.AbsResourcesDir
} }
return absPathify(files.FolderResources) _, filename := absPathify(files.FolderResources)
return filename
} }
if collector.overlayMounts == nil { if collector.overlayMounts == nil {

View file

@ -545,6 +545,85 @@ title: "My Page"
b.AssertFileContent("public/mypage/index.html", "Permalink: https://example.org/mypage/") b.AssertFileContent("public/mypage/index.html", "Permalink: https://example.org/mypage/")
} }
// https://github.com/gohugoio/hugo/issues/6684
func TestMountsContentFile(t *testing.T) {
c := qt.New(t)
workingDir, clean, err := htesting.CreateTempDir(hugofs.Os, "hugo-modules-content-file")
c.Assert(err, qt.IsNil)
defer clean()
configTemplate := `
baseURL = "https://example.com"
title = "My Modular Site"
workingDir = %q
[module]
[[module.mounts]]
source = "README.md"
target = "content/_index.md"
[[module.mounts]]
source = "mycontent"
target = "content/blog"
`
config := fmt.Sprintf(configTemplate, workingDir)
b := newTestSitesBuilder(t).Running()
b.Fs = hugofs.NewDefault(viper.New())
b.WithWorkingDir(workingDir).WithConfigFile("toml", config)
b.WithTemplatesAdded("index.html", `
{{ .Title }}
{{ .Content }}
{{ $readme := .Site.GetPage "/README.md" }}
{{ with $readme }}README: {{ .Title }}|Filename: {{ path.Join .File.Filename }}|Path: {{ path.Join .File.Path }}|FilePath: {{ path.Join .File.FileInfo.Meta.PathFile }}|{{ end }}
{{ $mypage := .Site.GetPage "/blog/mypage.md" }}
{{ with $mypage }}MYPAGE: {{ .Title }}|Path: {{ path.Join .File.Path }}|FilePath: {{ path.Join .File.FileInfo.Meta.PathFile }}|{{ end }}
`)
os.Mkdir(filepath.Join(workingDir, "mycontent"), 0777)
b.WithSourceFile("README.md", `---
title: "Readme Title"
---
Readme Content.
`,
filepath.Join("mycontent", "mypage.md"), `
---
title: "My Page"
---
`)
b.Build(BuildCfg{})
b.AssertFileContent("public/index.html", `
README: Readme Title
/README.md|Path: _index.md|FilePath: README.md
Readme Content.
MYPAGE: My Page|Path: blog/mypage.md|FilePath: mycontent/mypage.md|
`)
b.AssertFileContent("public/blog/mypage/index.html", "Single: My Page")
b.EditFiles("README.md", `---
title: "Readme Edit"
---
`)
b.Build(BuildCfg{})
b.AssertFileContent("public/index.html", `
Readme Edit
`)
}
// https://github.com/gohugoio/hugo/issues/6299 // https://github.com/gohugoio/hugo/issues/6299
func TestSiteWithGoModButNoModules(t *testing.T) { func TestSiteWithGoModButNoModules(t *testing.T) {
t.Parallel() t.Parallel()

View file

@ -895,7 +895,7 @@ func (m *contentChangeMap) add(dirname string, tp bundleDirType) {
m.mu.Unlock() m.mu.Unlock()
} }
func (m *contentChangeMap) resolveAndRemove(filename string) (string, string, bundleDirType) { func (m *contentChangeMap) resolveAndRemove(filename string) (string, bundleDirType) {
m.mu.RLock() m.mu.RLock()
defer m.mu.RUnlock() defer m.mu.RUnlock()
@ -908,22 +908,22 @@ func (m *contentChangeMap) resolveAndRemove(filename string) (string, string, bu
if _, found := m.branchBundles[dir]; found { if _, found := m.branchBundles[dir]; found {
delete(m.branchBundles, dir) delete(m.branchBundles, dir)
return dir, dir, bundleBranch return dir, bundleBranch
} }
if key, _, found := m.leafBundles.LongestPrefix(dir); found { if key, _, found := m.leafBundles.LongestPrefix(dir); found {
m.leafBundles.Delete(key) m.leafBundles.Delete(key)
dir = string(key) dir = string(key)
return dir, dir, bundleLeaf return dir, bundleLeaf
} }
fileTp, isContent := classifyBundledFile(name) fileTp, isContent := classifyBundledFile(name)
if isContent && fileTp != bundleNot { if isContent && fileTp != bundleNot {
// A new bundle. // A new bundle.
return dir, dir, fileTp return dir, fileTp
} }
return dir, filename, bundleNot return dir, bundleNot
} }

View file

@ -946,6 +946,24 @@ func (p *pageState) sourceRef() string {
return "" return ""
} }
func (p *pageState) sourceRefs() []string {
refs := []string{p.sourceRef()}
if !p.File().IsZero() {
meta := p.File().FileInfo().Meta()
path := meta.PathFile()
if path != "" {
ref := "/" + path
if ref != refs[0] {
refs = append(refs, ref)
}
}
}
return refs
}
type pageStatePages []*pageState type pageStatePages []*pageState
// Implement sorting. // Implement sorting.

View file

@ -151,12 +151,11 @@ func newPageCollectionsFromPages(pages pageStatePages) *PageCollections {
for _, pageCollection := range []pageStatePages{c.workAllPages, c.headlessPages} { for _, pageCollection := range []pageStatePages{c.workAllPages, c.headlessPages} {
for _, p := range pageCollection { for _, p := range pageCollection {
if p.IsPage() { if p.IsPage() {
sourceRef := p.sourceRef() sourceRefs := p.sourceRefs()
if sourceRef != "" { for _, ref := range sourceRefs {
// index the canonical ref add(ref, p)
// e.g. /section/article.md
add(sourceRef, p)
} }
sourceRef := sourceRefs[0]
// Ref/Relref supports this potentially ambiguous lookup. // Ref/Relref supports this potentially ambiguous lookup.
add(p.File().LogicalName(), p) add(p.File().LogicalName(), p)
@ -177,11 +176,9 @@ func newPageCollectionsFromPages(pages pageStatePages) *PageCollections {
pathWithNoExtensions := path.Join(dir, translationBaseName) pathWithNoExtensions := path.Join(dir, translationBaseName)
add(pathWithNoExtensions, p) add(pathWithNoExtensions, p)
} else { } else {
// index the canonical, unambiguous ref for any backing file sourceRefs := p.sourceRefs()
// e.g. /section/_index.md for _, ref := range sourceRefs {
sourceRef := p.sourceRef() add(ref, p)
if sourceRef != "" {
add(sourceRef, p)
} }
ref := p.SectionsPath() ref := p.SectionsPath()

View file

@ -116,7 +116,7 @@ func (c *pagesCollector) Collect() error {
} else { } else {
dirs := make(map[contentDirKey]bool) dirs := make(map[contentDirKey]bool)
for _, filename := range c.filenames { for _, filename := range c.filenames {
dir, filename, btype := c.tracker.resolveAndRemove(filename) dir, btype := c.tracker.resolveAndRemove(filename)
dirs[contentDirKey{dir, filename, btype}] = true dirs[contentDirKey{dir, filename, btype}] = true
} }
@ -127,7 +127,7 @@ func (c *pagesCollector) Collect() error {
default: default:
// We always start from a directory. // We always start from a directory.
collectErr = c.collectDir(dir.dirname, true, func(fim hugofs.FileMetaInfo) bool { collectErr = c.collectDir(dir.dirname, true, func(fim hugofs.FileMetaInfo) bool {
return strings.HasSuffix(dir.filename, fim.Meta().Path()) return dir.filename == fim.Meta().Filename()
}) })
} }
@ -211,6 +211,7 @@ func (c *pagesCollector) collectDir(dirname string, partial bool, inFilter func(
for _, fi := range readdir { for _, fi := range readdir {
if filter(fi) { if filter(fi) {
filtered = append(filtered, fi) filtered = append(filtered, fi)
if c.tracker != nil { if c.tracker != nil {
// Track symlinks. // Track symlinks.
c.tracker.addSymbolicLinkMapping(fi) c.tracker.addSymbolicLinkMapping(fi)