From de8fc8761aa507ca25fae9163374ec59c5f585ce Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bj=C3=B8rn=20Erik=20Pedersen?= Date: Thu, 17 Mar 2016 15:51:52 +0100 Subject: [PATCH] Re-render shortcode on template or data file change Fixes #1971 --- hugolib/handler_page.go | 2 +- hugolib/page.go | 16 +++-- hugolib/shortcode.go | 21 +++++- hugolib/site.go | 146 +++++++++++++++++++++------------------- 4 files changed, 108 insertions(+), 77 deletions(-) diff --git a/hugolib/handler_page.go b/hugolib/handler_page.go index ae429d151..bb1f7fb89 100644 --- a/hugolib/handler_page.go +++ b/hugolib/handler_page.go @@ -134,6 +134,6 @@ func commonConvert(p *Page, t tpl.Template) HandledResult { p.Content = helpers.BytesToHTML(tmpContent) p.TableOfContents = helpers.BytesToHTML(tmpTableOfContents) - + p.rendered = true return HandledResult{err: nil} } diff --git a/hugolib/page.go b/hugolib/page.go index 8884bda5d..fa403a421 100644 --- a/hugolib/page.go +++ b/hugolib/page.go @@ -69,7 +69,8 @@ type Page struct { linkTitle string frontmatter []byte rawContent []byte - contentShortCodes map[string]string + contentShortCodes map[string]string // TODO(bep) this shouldn't be needed. + shortcodes map[string]shortcode plain string // TODO should be []byte plainWords []string plainInit sync.Once @@ -82,6 +83,7 @@ type Page struct { Source Position `json:"-"` Node + rendered bool } type Source struct { @@ -907,9 +909,15 @@ func (p *Page) ProcessShortcodes(t tpl.Template) { // these short codes aren't used until after Page render, // but processed here to avoid coupling - tmpContent, tmpContentShortCodes, _ := extractAndRenderShortcodes(string(p.rawContent), p, t) - p.rawContent = []byte(tmpContent) - p.contentShortCodes = tmpContentShortCodes + // TODO(bep) Move this and remove p.contentShortCodes + if !p.rendered { + tmpContent, tmpContentShortCodes, _ := extractAndRenderShortcodes(string(p.rawContent), p, t) + p.rawContent = []byte(tmpContent) + p.contentShortCodes = tmpContentShortCodes + } else { + // shortcode template may have changed, rerender + p.contentShortCodes = renderShortcodes(p.shortcodes, p, t) + } } diff --git a/hugolib/shortcode.go b/hugolib/shortcode.go index 3c0afd2e7..c0993cc2b 100644 --- a/hugolib/shortcode.go +++ b/hugolib/shortcode.go @@ -263,14 +263,30 @@ func renderShortcode(sc shortcode, parent *ShortcodeWithPage, p *Page, t tpl.Tem func extractAndRenderShortcodes(stringToParse string, p *Page, t tpl.Template) (string, map[string]string, error) { + if p.rendered { + panic("Illegal state: Page already marked as rendered, please reuse the shortcodes") + } + content, shortcodes, err := extractShortcodes(stringToParse, p, t) - renderedShortcodes := make(map[string]string) if err != nil { // try to render what we have whilst logging the error jww.ERROR.Println(err.Error()) } + // Save for reuse + // TODO(bep) refactor this + p.shortcodes = shortcodes + + renderedShortcodes := renderShortcodes(shortcodes, p, t) + + return content, renderedShortcodes, err + +} + +func renderShortcodes(shortcodes map[string]shortcode, p *Page, t tpl.Template) map[string]string { + renderedShortcodes := make(map[string]string) + for key, sc := range shortcodes { if sc.err != nil { // need to have something to replace with @@ -280,8 +296,7 @@ func extractAndRenderShortcodes(stringToParse string, p *Page, t tpl.Template) ( } } - return content, renderedShortcodes, err - + return renderedShortcodes } var shortCodeIllegalState = errors.New("Illegal shortcode state") diff --git a/hugolib/site.go b/hugolib/site.go index 5ff71b6cc..32c013489 100644 --- a/hugolib/site.go +++ b/hugolib/site.go @@ -475,91 +475,99 @@ func (s *Site) ReBuild(events []fsnotify.Event) error { s.resetPageBuildState() // If a content file changes, we need to reload only it and re-render the entire site. - if len(sourceChanged) > 0 { - // First step is to read the changed files and (re)place them in site.Pages - // This includes processing any meta-data for that content + // First step is to read the changed files and (re)place them in site.Pages + // This includes processing any meta-data for that content - // The second step is to convert the content into HTML - // This includes processing any shortcodes that may be present. + // The second step is to convert the content into HTML + // This includes processing any shortcodes that may be present. - // We do this in parallel... even though it's likely only one file at a time. - // We need to process the reading prior to the conversion for each file, but - // we can convert one file while another one is still reading. - errs := make(chan error) - readResults := make(chan HandledResult) - filechan := make(chan *source.File) - convertResults := make(chan HandledResult) - pageChan := make(chan *Page) - fileConvChan := make(chan *source.File) - coordinator := make(chan bool) + // We do this in parallel... even though it's likely only one file at a time. + // We need to process the reading prior to the conversion for each file, but + // we can convert one file while another one is still reading. + errs := make(chan error) + readResults := make(chan HandledResult) + filechan := make(chan *source.File) + convertResults := make(chan HandledResult) + pageChan := make(chan *Page) + fileConvChan := make(chan *source.File) + coordinator := make(chan bool) - wg := &sync.WaitGroup{} - wg.Add(2) - for i := 0; i < 2; i++ { - go sourceReader(s, filechan, readResults, wg) + wg := &sync.WaitGroup{} + wg.Add(2) + for i := 0; i < 2; i++ { + go sourceReader(s, filechan, readResults, wg) + } + + wg2 := &sync.WaitGroup{} + wg2.Add(4) + for i := 0; i < 2; i++ { + go fileConverter(s, fileConvChan, convertResults, wg2) + go pageConverter(s, pageChan, convertResults, wg2) + } + + go incrementalReadCollator(s, readResults, pageChan, fileConvChan, coordinator, errs) + go converterCollator(s, convertResults, errs) + + if len(tmplChanged) > 0 || len(dataChanged) > 0 { + // Do not need to read the files again, but they need conversion + // for shortocde re-rendering. + for _, p := range s.Pages { + pageChan <- p + } + } + + for _, ev := range sourceChanged { + + if ev.Op&fsnotify.Remove == fsnotify.Remove { + //remove the file & a create will follow + path, _ := helpers.GetRelativePath(ev.Name, s.absContentDir()) + s.RemovePageByPath(path) + continue } - wg2 := &sync.WaitGroup{} - wg2.Add(4) - for i := 0; i < 2; i++ { - go fileConverter(s, fileConvChan, convertResults, wg2) - go pageConverter(s, pageChan, convertResults, wg2) - } - - go incrementalReadCollator(s, readResults, pageChan, fileConvChan, coordinator, errs) - go converterCollator(s, convertResults, errs) - - for _, ev := range sourceChanged { - - if ev.Op&fsnotify.Remove == fsnotify.Remove { - //remove the file & a create will follow + // Some editors (Vim) sometimes issue only a Rename operation when writing an existing file + // Sometimes a rename operation means that file has been renamed other times it means + // it's been updated + if ev.Op&fsnotify.Rename == fsnotify.Rename { + // If the file is still on disk, it's only been updated, if it's not, it's been moved + if ex, err := afero.Exists(hugofs.SourceFs, ev.Name); !ex || err != nil { path, _ := helpers.GetRelativePath(ev.Name, s.absContentDir()) s.RemovePageByPath(path) continue } - - // Some editors (Vim) sometimes issue only a Rename operation when writing an existing file - // Sometimes a rename operation means that file has been renamed other times it means - // it's been updated - if ev.Op&fsnotify.Rename == fsnotify.Rename { - // If the file is still on disk, it's only been updated, if it's not, it's been moved - if ex, err := afero.Exists(hugofs.SourceFs, ev.Name); !ex || err != nil { - path, _ := helpers.GetRelativePath(ev.Name, s.absContentDir()) - s.RemovePageByPath(path) - continue - } - } - - file, err := s.ReReadFile(ev.Name) - if err != nil { - errs <- err - } - - filechan <- file } - // we close the filechan as we have sent everything we want to send to it. - // this will tell the sourceReaders to stop iterating on that channel - close(filechan) - // waiting for the sourceReaders to all finish - wg.Wait() - // Now closing readResults as this will tell the incrementalReadCollator to - // stop iterating over that. - close(readResults) + file, err := s.ReReadFile(ev.Name) + if err != nil { + errs <- err + } - // once readResults is finished it will close coordinator and move along - <-coordinator - // allow that routine to finish, then close page & fileconvchan as we've sent - // everything to them we need to. - close(pageChan) - close(fileConvChan) + filechan <- file + } + // we close the filechan as we have sent everything we want to send to it. + // this will tell the sourceReaders to stop iterating on that channel + close(filechan) - wg2.Wait() - close(convertResults) + // waiting for the sourceReaders to all finish + wg.Wait() + // Now closing readResults as this will tell the incrementalReadCollator to + // stop iterating over that. + close(readResults) - s.timerStep("read & convert pages from source") + // once readResults is finished it will close coordinator and move along + <-coordinator + // allow that routine to finish, then close page & fileconvchan as we've sent + // everything to them we need to. + close(pageChan) + close(fileConvChan) + wg2.Wait() + close(convertResults) + + s.timerStep("read & convert pages from source") + + if len(sourceChanged) > 0 { s.setupPrevNext() if err = s.BuildSiteMeta(); err != nil { return err