Re-render shortcode on template or data file change

Fixes  #1971
This commit is contained in:
Bjørn Erik Pedersen 2016-03-17 15:51:52 +01:00 committed by Steve Francia
parent e5e1bcc271
commit de8fc8761a
4 changed files with 108 additions and 77 deletions

View file

@ -134,6 +134,6 @@ func commonConvert(p *Page, t tpl.Template) HandledResult {
p.Content = helpers.BytesToHTML(tmpContent) p.Content = helpers.BytesToHTML(tmpContent)
p.TableOfContents = helpers.BytesToHTML(tmpTableOfContents) p.TableOfContents = helpers.BytesToHTML(tmpTableOfContents)
p.rendered = true
return HandledResult{err: nil} return HandledResult{err: nil}
} }

View file

@ -69,7 +69,8 @@ type Page struct {
linkTitle string linkTitle string
frontmatter []byte frontmatter []byte
rawContent []byte rawContent []byte
contentShortCodes map[string]string contentShortCodes map[string]string // TODO(bep) this shouldn't be needed.
shortcodes map[string]shortcode
plain string // TODO should be []byte plain string // TODO should be []byte
plainWords []string plainWords []string
plainInit sync.Once plainInit sync.Once
@ -82,6 +83,7 @@ type Page struct {
Source Source
Position `json:"-"` Position `json:"-"`
Node Node
rendered bool
} }
type Source struct { type Source struct {
@ -907,9 +909,15 @@ func (p *Page) ProcessShortcodes(t tpl.Template) {
// these short codes aren't used until after Page render, // these short codes aren't used until after Page render,
// but processed here to avoid coupling // but processed here to avoid coupling
tmpContent, tmpContentShortCodes, _ := extractAndRenderShortcodes(string(p.rawContent), p, t) // TODO(bep) Move this and remove p.contentShortCodes
p.rawContent = []byte(tmpContent) if !p.rendered {
p.contentShortCodes = tmpContentShortCodes tmpContent, tmpContentShortCodes, _ := extractAndRenderShortcodes(string(p.rawContent), p, t)
p.rawContent = []byte(tmpContent)
p.contentShortCodes = tmpContentShortCodes
} else {
// shortcode template may have changed, rerender
p.contentShortCodes = renderShortcodes(p.shortcodes, p, t)
}
} }

View file

@ -263,14 +263,30 @@ func renderShortcode(sc shortcode, parent *ShortcodeWithPage, p *Page, t tpl.Tem
func extractAndRenderShortcodes(stringToParse string, p *Page, t tpl.Template) (string, map[string]string, error) { func extractAndRenderShortcodes(stringToParse string, p *Page, t tpl.Template) (string, map[string]string, error) {
if p.rendered {
panic("Illegal state: Page already marked as rendered, please reuse the shortcodes")
}
content, shortcodes, err := extractShortcodes(stringToParse, p, t) content, shortcodes, err := extractShortcodes(stringToParse, p, t)
renderedShortcodes := make(map[string]string)
if err != nil { if err != nil {
// try to render what we have whilst logging the error // try to render what we have whilst logging the error
jww.ERROR.Println(err.Error()) jww.ERROR.Println(err.Error())
} }
// Save for reuse
// TODO(bep) refactor this
p.shortcodes = shortcodes
renderedShortcodes := renderShortcodes(shortcodes, p, t)
return content, renderedShortcodes, err
}
func renderShortcodes(shortcodes map[string]shortcode, p *Page, t tpl.Template) map[string]string {
renderedShortcodes := make(map[string]string)
for key, sc := range shortcodes { for key, sc := range shortcodes {
if sc.err != nil { if sc.err != nil {
// need to have something to replace with // need to have something to replace with
@ -280,8 +296,7 @@ func extractAndRenderShortcodes(stringToParse string, p *Page, t tpl.Template) (
} }
} }
return content, renderedShortcodes, err return renderedShortcodes
} }
var shortCodeIllegalState = errors.New("Illegal shortcode state") var shortCodeIllegalState = errors.New("Illegal shortcode state")

View file

@ -475,91 +475,99 @@ func (s *Site) ReBuild(events []fsnotify.Event) error {
s.resetPageBuildState() s.resetPageBuildState()
// If a content file changes, we need to reload only it and re-render the entire site. // If a content file changes, we need to reload only it and re-render the entire site.
if len(sourceChanged) > 0 {
// First step is to read the changed files and (re)place them in site.Pages // First step is to read the changed files and (re)place them in site.Pages
// This includes processing any meta-data for that content // This includes processing any meta-data for that content
// The second step is to convert the content into HTML // The second step is to convert the content into HTML
// This includes processing any shortcodes that may be present. // This includes processing any shortcodes that may be present.
// We do this in parallel... even though it's likely only one file at a time. // We do this in parallel... even though it's likely only one file at a time.
// We need to process the reading prior to the conversion for each file, but // We need to process the reading prior to the conversion for each file, but
// we can convert one file while another one is still reading. // we can convert one file while another one is still reading.
errs := make(chan error) errs := make(chan error)
readResults := make(chan HandledResult) readResults := make(chan HandledResult)
filechan := make(chan *source.File) filechan := make(chan *source.File)
convertResults := make(chan HandledResult) convertResults := make(chan HandledResult)
pageChan := make(chan *Page) pageChan := make(chan *Page)
fileConvChan := make(chan *source.File) fileConvChan := make(chan *source.File)
coordinator := make(chan bool) coordinator := make(chan bool)
wg := &sync.WaitGroup{} wg := &sync.WaitGroup{}
wg.Add(2) wg.Add(2)
for i := 0; i < 2; i++ { for i := 0; i < 2; i++ {
go sourceReader(s, filechan, readResults, wg) go sourceReader(s, filechan, readResults, wg)
}
wg2 := &sync.WaitGroup{}
wg2.Add(4)
for i := 0; i < 2; i++ {
go fileConverter(s, fileConvChan, convertResults, wg2)
go pageConverter(s, pageChan, convertResults, wg2)
}
go incrementalReadCollator(s, readResults, pageChan, fileConvChan, coordinator, errs)
go converterCollator(s, convertResults, errs)
if len(tmplChanged) > 0 || len(dataChanged) > 0 {
// Do not need to read the files again, but they need conversion
// for shortocde re-rendering.
for _, p := range s.Pages {
pageChan <- p
}
}
for _, ev := range sourceChanged {
if ev.Op&fsnotify.Remove == fsnotify.Remove {
//remove the file & a create will follow
path, _ := helpers.GetRelativePath(ev.Name, s.absContentDir())
s.RemovePageByPath(path)
continue
} }
wg2 := &sync.WaitGroup{} // Some editors (Vim) sometimes issue only a Rename operation when writing an existing file
wg2.Add(4) // Sometimes a rename operation means that file has been renamed other times it means
for i := 0; i < 2; i++ { // it's been updated
go fileConverter(s, fileConvChan, convertResults, wg2) if ev.Op&fsnotify.Rename == fsnotify.Rename {
go pageConverter(s, pageChan, convertResults, wg2) // If the file is still on disk, it's only been updated, if it's not, it's been moved
} if ex, err := afero.Exists(hugofs.SourceFs, ev.Name); !ex || err != nil {
go incrementalReadCollator(s, readResults, pageChan, fileConvChan, coordinator, errs)
go converterCollator(s, convertResults, errs)
for _, ev := range sourceChanged {
if ev.Op&fsnotify.Remove == fsnotify.Remove {
//remove the file & a create will follow
path, _ := helpers.GetRelativePath(ev.Name, s.absContentDir()) path, _ := helpers.GetRelativePath(ev.Name, s.absContentDir())
s.RemovePageByPath(path) s.RemovePageByPath(path)
continue continue
} }
// Some editors (Vim) sometimes issue only a Rename operation when writing an existing file
// Sometimes a rename operation means that file has been renamed other times it means
// it's been updated
if ev.Op&fsnotify.Rename == fsnotify.Rename {
// If the file is still on disk, it's only been updated, if it's not, it's been moved
if ex, err := afero.Exists(hugofs.SourceFs, ev.Name); !ex || err != nil {
path, _ := helpers.GetRelativePath(ev.Name, s.absContentDir())
s.RemovePageByPath(path)
continue
}
}
file, err := s.ReReadFile(ev.Name)
if err != nil {
errs <- err
}
filechan <- file
} }
// we close the filechan as we have sent everything we want to send to it.
// this will tell the sourceReaders to stop iterating on that channel
close(filechan)
// waiting for the sourceReaders to all finish file, err := s.ReReadFile(ev.Name)
wg.Wait() if err != nil {
// Now closing readResults as this will tell the incrementalReadCollator to errs <- err
// stop iterating over that. }
close(readResults)
// once readResults is finished it will close coordinator and move along filechan <- file
<-coordinator }
// allow that routine to finish, then close page & fileconvchan as we've sent // we close the filechan as we have sent everything we want to send to it.
// everything to them we need to. // this will tell the sourceReaders to stop iterating on that channel
close(pageChan) close(filechan)
close(fileConvChan)
wg2.Wait() // waiting for the sourceReaders to all finish
close(convertResults) wg.Wait()
// Now closing readResults as this will tell the incrementalReadCollator to
// stop iterating over that.
close(readResults)
s.timerStep("read & convert pages from source") // once readResults is finished it will close coordinator and move along
<-coordinator
// allow that routine to finish, then close page & fileconvchan as we've sent
// everything to them we need to.
close(pageChan)
close(fileConvChan)
wg2.Wait()
close(convertResults)
s.timerStep("read & convert pages from source")
if len(sourceChanged) > 0 {
s.setupPrevNext() s.setupPrevNext()
if err = s.BuildSiteMeta(); err != nil { if err = s.BuildSiteMeta(); err != nil {
return err return err