From 7285e74090852b5d52f25e577850fa75f4aa8573 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bj=C3=B8rn=20Erik=20Pedersen?= Date: Sun, 24 Dec 2023 19:11:05 +0100 Subject: [PATCH] all: Rework page store, add a dynacache, improve partial rebuilds, and some general spring cleaning There are some breaking changes in this commit, see #11455. Closes #11455 Closes #11549 This fixes a set of bugs (see issue list) and it is also paying some technical debt accumulated over the years. We now build with Staticcheck enabled in the CI build. The performance should be about the same as before for regular sized Hugo sites, but it should perform and scale much better to larger data sets, as objects that uses lots of memory (e.g. rendered Markdown, big JSON files read into maps with transform.Unmarshal etc.) will now get automatically garbage collected if needed. Performance on partial rebuilds when running the server in fast render mode should be the same, but the change detection should be much more accurate. A list of the notable new features: * A new dependency tracker that covers (almost) all of Hugo's API and is used to do fine grained partial rebuilds when running the server. * A new and simpler tree document store which allows fast lookups and prefix-walking in all dimensions (e.g. language) concurrently. * You can now configure an upper memory limit allowing for much larger data sets and/or running on lower specced PCs. We have lifted the "no resources in sub folders" restriction for branch bundles (e.g. sections). Memory Limit * Hugos will, by default, set aside a quarter of the total system memory, but you can set this via the OS environment variable HUGO_MEMORYLIMIT (in gigabytes). This is backed by a partitioned LRU cache used throughout Hugo. A cache that gets dynamically resized in low memory situations, allowing Go's Garbage Collector to free the memory. New Dependency Tracker: Hugo has had a rule based coarse grained approach to server rebuilds that has worked mostly pretty well, but there have been some surprises (e.g. stale content). This is now revamped with a new dependency tracker that can quickly calculate the delta given a changed resource (e.g. a content file, template, JS file etc.). This handles transitive relations, e.g. $page -> js.Build -> JS import, or $page1.Content -> render hook -> site.GetPage -> $page2.Title, or $page1.Content -> shortcode -> partial -> site.RegularPages -> $page2.Content -> shortcode ..., and should also handle changes to aggregated values (e.g. site.Lastmod) effectively. This covers all of Hugo's API with 2 known exceptions (a list that may not be fully exhaustive): Changes to files loaded with template func os.ReadFile may not be handled correctly. We recommend loading resources with resources.Get Changes to Hugo objects (e.g. Page) passed in the template context to lang.Translate may not be detected correctly. We recommend having simple i18n templates without too much data context passed in other than simple types such as strings and numbers. Note that the cachebuster configuration (when A changes then rebuild B) works well with the above, but we recommend that you revise that configuration, as it in most situations should not be needed. One example where it is still needed is with TailwindCSS and using changes to hugo_stats.json to trigger new CSS rebuilds. Document Store: Previously, a little simplified, we split the document store (where we store pages and resources) in a tree per language. This worked pretty well, but the structure made some operations harder than they needed to be. We have now restructured it into one Radix tree for all languages. Internally the language is considered to be a dimension of that tree, and the tree can be viewed in all dimensions concurrently. This makes some operations re. language simpler (e.g. finding translations is just a slice range), but the idea is that it should also be relatively inexpensive to add more dimensions if needed (e.g. role). Fixes #10169 Fixes #10364 Fixes #10482 Fixes #10630 Fixes #10656 Fixes #10694 Fixes #10918 Fixes #11262 Fixes #11439 Fixes #11453 Fixes #11457 Fixes #11466 Fixes #11540 Fixes #11551 Fixes #11556 Fixes #11654 Fixes #11661 Fixes #11663 Fixes #11664 Fixes #11669 Fixes #11671 Fixes #11807 Fixes #11808 Fixes #11809 Fixes #11815 Fixes #11840 Fixes #11853 Fixes #11860 Fixes #11883 Fixes #11904 Fixes #7388 Fixes #7425 Fixes #7436 Fixes #7544 Fixes #7882 Fixes #7960 Fixes #8255 Fixes #8307 Fixes #8863 Fixes #8927 Fixes #9192 Fixes #9324 --- .github/workflows/test.yml | 6 + cache/dynacache/dynacache.go | 550 ++++ cache/dynacache/dynacache_test.go | 175 ++ cache/filecache/filecache.go | 8 +- cache/filecache/filecache_test.go | 12 +- cache/filecache/integration_test.go | 8 +- cache/namedmemcache/named_cache.go | 78 - cache/namedmemcache/named_cache_test.go | 80 - commands/commandeer.go | 14 +- commands/commands.go | 3 +- commands/config.go | 4 +- commands/convert.go | 6 +- commands/deploy.go | 5 +- commands/deploy_off.go | 4 +- commands/env.go | 2 +- commands/gen.go | 8 +- commands/helpers.go | 5 +- commands/hugo_windows.go | 2 +- commands/hugobuilder.go | 141 +- commands/import.go | 15 +- commands/list.go | 15 +- commands/mod.go | 4 +- commands/new.go | 4 +- commands/release.go | 3 +- commands/server.go | 75 +- common/constants/constants.go | 21 + common/hcontext/context.go | 46 + common/herrors/error_locator.go | 3 +- common/herrors/error_locator_test.go | 2 +- common/herrors/errors.go | 21 +- common/herrors/errors_test.go | 3 +- common/herrors/file_error.go | 22 +- common/herrors/file_error_test.go | 6 +- common/hreflect/helpers.go | 41 +- common/hreflect/helpers_test.go | 37 + common/hstrings/strings.go | 7 +- common/hstrings/strings_test.go | 4 +- common/htime/integration_test.go | 2 +- common/hugio/copy.go | 9 +- common/hugio/hasBytesWriter.go | 2 +- common/hugio/hasBytesWriter_test.go | 2 +- common/hugio/readers.go | 20 + common/hugo/hugo.go | 9 +- common/loggers/handlerdefault.go | 6 +- common/loggers/handlersmisc.go | 23 +- common/loggers/handlerterminal.go | 4 +- common/loggers/logger.go | 57 +- common/loggers/logger_test.go | 2 +- common/loggers/loggerglobal.go | 2 +- common/maps/cache.go | 90 + common/maps/maps.go | 17 +- common/maps/params.go | 10 +- common/paths/path.go | 222 +- common/paths/path_test.go | 114 +- common/paths/pathparser.go | 494 +++ common/paths/pathparser_test.go | 351 +++ common/paths/paths_integration_test.go | 80 + common/paths/pathtype_string.go | 27 + common/paths/url.go | 10 + common/predicate/predicate.go | 72 + common/predicate/predicate_test.go | 83 + common/rungroup/rungroup.go | 93 + common/rungroup/rungroup_test.go | 44 + common/terminal/colors.go | 2 +- common/types/css/csstypes.go | 2 +- common/types/evictingqueue.go | 15 +- common/types/hstring/stringtypes.go | 2 +- common/types/hstring/stringtypes_test.go | 2 +- common/types/types.go | 13 + common/urls/baseURL.go | 28 +- common/urls/baseURL_test.go | 28 +- compare/compare.go | 13 + config/allconfig/allconfig.go | 136 +- config/allconfig/alldecoders.go | 2 +- config/allconfig/configlanguage.go | 12 +- config/allconfig/docshelper.go | 3 +- config/allconfig/integration_test.go | 7 +- config/allconfig/load.go | 12 +- config/commonConfig.go | 23 +- config/commonConfig_test.go | 14 +- config/configProvider.go | 3 + config/env.go | 37 + config/namespace.go | 3 +- config/namespace_test.go | 12 +- config/testconfig/testconfig.go | 5 +- create/content.go | 96 +- create/content_test.go | 131 +- create/skeletons/skeletons.go | 4 +- deploy/deploy.go | 6 +- deploy/deploy_test.go | 5 +- deps/deps.go | 21 +- go.mod | 18 +- go.sum | 359 ++- helpers/content.go | 11 +- helpers/content_test.go | 6 +- helpers/general.go | 32 +- helpers/general_test.go | 28 +- helpers/path.go | 105 +- helpers/path_test.go | 5 +- helpers/pathspec.go | 6 +- helpers/processing_stats.go | 2 - helpers/url.go | 47 - helpers/url_test.go | 37 +- htesting/test_helpers.go | 38 +- hugofs/component_fs.go | 284 ++ hugofs/decorators.go | 149 +- hugofs/{language_merge.go => dirsmerger.go} | 32 +- hugofs/fileinfo.go | 277 +- hugofs/fileinfo_test.go | 4 - hugofs/filename_filter_fs.go | 50 +- hugofs/filename_filter_fs_test.go | 13 +- hugofs/files/classifier.go | 95 +- hugofs/files/classifier_test.go | 11 - hugofs/filter_fs.go | 344 --- hugofs/filter_fs_test.go | 46 - hugofs/fs.go | 40 +- hugofs/fs_test.go | 7 +- hugofs/glob.go | 17 +- hugofs/glob/filename_filter.go | 31 +- hugofs/glob/filename_filter_test.go | 2 +- hugofs/glob/glob.go | 12 +- hugofs/glob_test.go | 36 +- hugofs/hasbytes_fs.go | 3 +- hugofs/noop_fs.go | 49 +- hugofs/nosymlink_fs.go | 160 - hugofs/nosymlink_test.go | 146 - hugofs/openfiles_fs.go | 110 + hugofs/rootmapping_fs.go | 446 ++- hugofs/rootmapping_fs_test.go | 174 +- hugofs/slice_fs.go | 303 -- hugofs/walk.go | 262 +- hugofs/walk_test.go | 127 +- hugolib/404_test.go | 21 +- hugolib/alias.go | 2 +- hugolib/alias_test.go | 3 + hugolib/breaking_changes_test.go | 2 +- hugolib/cascade_test.go | 111 +- hugolib/collections_test.go | 6 +- hugolib/config_test.go | 59 +- hugolib/configdir_test.go | 4 +- hugolib/content_factory.go | 27 +- hugolib/content_factory_test.go | 7 +- hugolib/content_map.go | 1229 ++------ hugolib/content_map_page.go | 2733 +++++++++++------ hugolib/content_map_test.go | 448 +-- hugolib/content_render_hooks_test.go | 369 +-- hugolib/dates_test.go | 10 +- hugolib/disableKinds_test.go | 22 +- hugolib/doctree/dimensions.go | 43 + hugolib/doctree/dimensions_test.go | 37 + hugolib/doctree/nodeshiftree_test.go | 374 +++ hugolib/doctree/nodeshifttree.go | 433 +++ hugolib/doctree/simpletree.go | 91 + hugolib/doctree/support.go | 251 ++ hugolib/doctree/treeshifttree.go | 101 + hugolib/doctree/treeshifttree_test.go | 28 + hugolib/fileInfo.go | 66 +- hugolib/filesystems/basefs.go | 437 ++- hugolib/filesystems/basefs_test.go | 317 +- hugolib/frontmatter_test.go | 3 +- hugolib/hugo_modules_test.go | 394 +-- hugolib/hugo_sites.go | 578 +--- hugolib/hugo_sites_build.go | 613 +++- hugolib/hugo_sites_build_errors_test.go | 38 +- hugolib/hugo_sites_build_test.go | 1093 +------ hugolib/hugo_sites_multihost_test.go | 257 +- hugolib/hugo_sites_rebuild_test.go | 316 -- hugolib/hugo_sites_test.go | 58 + hugolib/hugo_smoke_test.go | 533 ++-- hugolib/image_test.go | 9 +- hugolib/integration_test.go | 4 +- hugolib/integrationtest_builder.go | 189 +- hugolib/language_content_dir_test.go | 522 +--- hugolib/menu_test.go | 82 +- hugolib/mount_filters_test.go | 7 +- hugolib/page.go | 796 ++--- hugolib/page__common.go | 53 +- hugolib/page__content.go | 738 ++++- hugolib/page__data.go | 24 +- hugolib/page__fragments_test.go | 4 +- hugolib/page__meta.go | 524 ++-- hugolib/page__new.go | 328 +- hugolib/page__output.go | 55 +- hugolib/page__paginator.go | 3 - hugolib/page__paths.go | 76 +- hugolib/page__per_output.go | 661 +--- hugolib/page__tree.go | 230 +- hugolib/page_test.go | 421 +-- hugolib/page_unwrap.go | 3 + hugolib/pagebundler_test.go | 1141 ++----- hugolib/pagecollections.go | 377 +-- hugolib/pagecollections_test.go | 213 +- hugolib/pages_capture.go | 679 ++-- hugolib/pages_capture_test.go | 78 - hugolib/pages_language_merge_test.go | 4 +- hugolib/pages_process.go | 203 -- hugolib/paths/paths.go | 3 +- hugolib/rebuild_test.go | 1256 ++++++++ hugolib/rendershortcodes_test.go | 32 +- hugolib/renderstring_test.go | 7 +- hugolib/resource_chain_test.go | 45 +- hugolib/rss_test.go | 2 +- hugolib/securitypolicies_test.go | 5 +- hugolib/shortcode.go | 58 +- hugolib/shortcode_page.go | 15 +- hugolib/shortcode_test.go | 12 +- hugolib/site.go | 748 ++--- hugolib/site_benchmark_new_test.go | 13 +- hugolib/site_new.go | 206 +- hugolib/site_output.go | 2 +- hugolib/site_output_test.go | 6 +- hugolib/site_render.go | 284 +- hugolib/site_sections_test.go | 111 +- hugolib/site_stats_test.go | 35 +- hugolib/site_test.go | 13 +- hugolib/site_url_test.go | 20 +- hugolib/sitemap_test.go | 160 +- hugolib/taxonomy_test.go | 114 +- hugolib/template_test.go | 62 - hugolib/testhelpers_test.go | 128 +- hugolib/translations.go | 57 - identity/finder.go | 336 ++ identity/finder_test.go | 58 + identity/identity.go | 566 +++- identity/identity_test.go | 200 +- identity/identityhash.go | 6 +- identity/identityhash_test.go | 3 +- identity/identitytesting/identitytesting.go | 5 + identity/predicate_identity.go | 78 + identity/predicate_identity_test.go | 58 + identity/question.go | 57 + identity/question_test.go | 38 + langs/i18n/i18n_test.go | 13 +- langs/i18n/integration_test.go | 2 +- langs/i18n/translationProvider.go | 45 +- langs/language.go | 16 +- lazy/init.go | 4 +- main.go | 2 +- main_test.go | 23 +- markup/asciidocext/convert_test.go | 2 +- markup/blackfriday/anchors.go | 2 +- markup/converter/converter.go | 3 - markup/converter/hooks/hooks.go | 4 - .../goldmark/codeblocks/integration_test.go | 5 +- markup/goldmark/codeblocks/render.go | 4 +- markup/goldmark/convert.go | 28 +- markup/goldmark/convert_test.go | 5 +- markup/goldmark/goldmark_config/config.go | 7 +- markup/goldmark/images/integration_test.go | 9 +- markup/goldmark/internal/render/context.go | 9 +- markup/goldmark/links/integration_test.go | 113 - markup/goldmark/links/transform.go | 57 - markup/goldmark/render_hooks.go | 14 - markup/goldmark/toc_test.go | 2 +- markup/highlight/chromalexers/chromalexers.go | 2 +- markup/highlight/highlight.go | 12 +- markup/highlight/highlight_test.go | 1 - markup/highlight/integration_test.go | 2 +- markup/internal/attributes/attributes.go | 2 +- markup/markup.go | 6 + markup/markup_test.go | 2 +- markup/org/convert_test.go | 2 +- markup/tableofcontents/integration_test.go | 2 +- media/config.go | 2 +- media/config_test.go | 2 +- media/mediaType.go | 18 - minifiers/config_test.go | 2 +- modules/client.go | 2 +- modules/client_test.go | 4 +- modules/collect.go | 11 +- modules/config.go | 8 +- modules/npm/package_builder.go | 28 +- navigation/menu.go | 7 +- navigation/menu_cache.go | 6 - navigation/pagemenus.go | 2 +- output/config.go | 3 +- output/config_test.go | 2 +- output/layouts/layout.go | 24 +- output/outputFormat.go | 35 +- parser/lowercase_camel_json.go | 13 +- parser/metadecoders/decoder.go | 24 +- parser/pageparser/pagelexer.go | 19 +- parser/pageparser/pagelexer_intro.go | 6 +- parser/pageparser/pageparser.go | 34 +- parser/pageparser/pageparser_intro_test.go | 54 +- .../pageparser/pageparser_shortcode_test.go | 168 +- parser/pageparser/pageparser_test.go | 5 +- publisher/publisher.go | 2 +- related/integration_test.go | 4 +- related/inverted_index.go | 3 - resources/docs.go | 2 +- resources/image.go | 61 +- resources/image_cache.go | 177 +- resources/image_extended_test.go | 2 +- resources/image_test.go | 77 +- resources/images/auto_orient.go | 2 +- resources/images/exif/exif.go | 2 +- resources/images/exif/exif_test.go | 15 +- resources/images/image_resource.go | 2 +- resources/images/opacity.go | 2 +- resources/images/padding.go | 2 +- resources/images/process.go | 2 +- resources/integration_test.go | 4 +- resources/internal/resourcepaths.go | 107 + resources/kinds/kinds.go | 23 +- resources/kinds/kinds_test.go | 2 +- resources/page/page.go | 47 +- .../page_generate/generate_page_wrappers.go | 96 +- resources/page/page_lazy_contentprovider.go | 4 +- resources/page/page_marshaljson.autogen.go | 180 +- resources/page/page_matcher.go | 4 +- resources/page/page_nop.go | 42 +- resources/page/page_paths.go | 383 ++- resources/page/page_paths_test.go | 295 -- resources/page/pagegroup.go | 4 +- resources/page/pagemeta/page_frontmatter.go | 12 +- .../page/pagemeta/page_frontmatter_test.go | 69 +- resources/page/pages.go | 4 +- resources/page/pages_related.go | 3 +- resources/page/pages_sort.go | 26 +- resources/page/pages_sort_test.go | 3 +- resources/page/permalinks.go | 14 +- resources/page/permalinks_integration_test.go | 5 +- resources/page/permalinks_test.go | 5 +- resources/page/site.go | 50 +- resources/page/siteidentities/identities.go | 34 + resources/page/taxonomy.go | 2 +- resources/page/testhelpers_page_test.go | 38 - resources/page/testhelpers_test.go | 40 +- resources/page/zero_file.autogen.go | 72 - resources/postpub/postpub.go | 2 +- resources/resource.go | 634 ++-- resources/resource/dates.go | 4 + resources/resource/resources.go | 25 +- resources/resource/resourcetypes.go | 74 +- resources/resource_cache.go | 242 +- resources/resource_cache_test.go | 58 - .../resource_factories/bundler/bundler.go | 10 +- resources/resource_factories/create/create.go | 70 +- .../create/integration_test.go | 7 +- resources/resource_factories/create/remote.go | 12 +- resources/resource_metadata.go | 208 +- resources/resource_spec.go | 295 +- resources/resource_spec_test.go | 48 + resources/resource_test.go | 48 - .../resource_transformers/babel/babel.go | 4 +- .../htesting/testhelpers.go | 45 +- .../integrity/integrity.go | 3 +- .../integrity/integrity_test.go | 10 +- resources/resource_transformers/js/build.go | 5 +- .../js/integration_test.go | 8 +- resources/resource_transformers/js/options.go | 10 +- .../resource_transformers/js/options_test.go | 24 +- .../minifier/minify_test.go | 9 +- .../postcss/integration_test.go | 10 +- .../resource_transformers/postcss/postcss.go | 42 +- .../postcss/postcss_test.go | 7 +- .../templates/execute_as_template.go | 3 +- .../tocss/dartsass/client.go | 7 +- .../tocss/dartsass/transform.go | 18 +- .../tocss/internal/sass/helpers.go | 3 +- .../tocss/internal/sass/helpers_test.go | 3 +- .../tocss/scss/client.go | 5 +- .../resource_transformers/tocss/scss/tocss.go | 3 +- resources/testhelpers_test.go | 63 +- resources/transform.go | 159 +- resources/transform_integration_test.go | 50 + resources/transform_test.go | 21 +- scripts/fork_go_templates/main.go | 3 + source/content_directory_test.go | 2 +- source/fileInfo.go | 258 +- source/fileInfo_test.go | 58 - source/filesystem.go | 59 +- source/filesystem_test.go | 78 +- source/sourceSpec.go | 35 +- .../commands/hugo__processingstats.txt | 15 + .../commands/hugo__processingstats2.txt | 16 + testscripts/commands/mod_npm.txt | 48 +- testscripts/commands/mod_npm_withexisting.txt | 57 + .../commands/new_content_archetypedir.txt | 40 + testscripts/commands/server.txt | 1 + tpl/collections/apply.go | 2 +- tpl/collections/apply_test.go | 5 + tpl/collections/collections.go | 7 +- tpl/collections/collections_test.go | 4 +- tpl/collections/integration_test.go | 7 +- tpl/collections/where.go | 4 +- tpl/data/data.go | 4 +- tpl/data/resources.go | 3 +- tpl/data/resources_test.go | 7 +- tpl/debug/integration_test.go | 4 +- tpl/diagrams/diagrams.go | 2 +- tpl/diagrams/goat.go | 2 +- tpl/diagrams/init.go | 2 +- tpl/fmt/integration_test.go | 3 +- tpl/images/integration_test.go | 2 +- tpl/internal/go_templates/staticcheck.conf | 1 + .../texttemplate/hugo_template.go | 19 +- .../texttemplate/hugo_template_test.go | 4 +- tpl/internal/templatefuncsRegistry.go | 2 +- tpl/js/js.go | 1 - tpl/lang/lang_test.go | 9 +- tpl/math/math_test.go | 4 +- tpl/openapi/openapi3/integration_test.go | 2 +- tpl/openapi/openapi3/openapi3.go | 28 +- tpl/os/integration_test.go | 2 +- tpl/page/init.go | 4 +- tpl/page/integration_test.go | 31 +- tpl/partials/integration_test.go | 5 +- tpl/partials/partials.go | 31 +- tpl/reflect/reflect_test.go | 2 - tpl/resources/integration_test.go | 7 +- tpl/resources/resources.go | 16 +- tpl/safe/init.go | 5 - tpl/safe/safe.go | 7 - tpl/safe/safe_test.go | 27 - tpl/site/init.go | 2 +- tpl/strings/strings.go | 12 +- tpl/template.go | 76 +- tpl/template_info.go | 14 - tpl/template_test.go | 2 - tpl/templates/integration_test.go | 2 +- tpl/time/init.go | 2 +- tpl/time/time.go | 42 +- tpl/tplimpl/template.go | 136 +- tpl/tplimpl/template_ast_transformers.go | 47 +- tpl/tplimpl/template_ast_transformers_test.go | 1 + tpl/tplimpl/template_errors.go | 23 +- tpl/tplimpl/template_funcs.go | 70 +- tpl/transform/integration_test.go | 4 +- tpl/transform/transform.go | 22 +- tpl/transform/unmarshal.go | 43 +- tpl/transform/unmarshal_test.go | 9 +- .../livereloadinject/livereloadinject.go | 2 +- .../livereloadinject/livereloadinject_test.go | 4 +- unused.sh | 1 - watcher/filenotify/poller_test.go | 5 +- 437 files changed, 19304 insertions(+), 18384 deletions(-) create mode 100644 cache/dynacache/dynacache.go create mode 100644 cache/dynacache/dynacache_test.go delete mode 100644 cache/namedmemcache/named_cache.go delete mode 100644 cache/namedmemcache/named_cache_test.go create mode 100644 common/hcontext/context.go create mode 100644 common/maps/cache.go create mode 100644 common/paths/pathparser.go create mode 100644 common/paths/pathparser_test.go create mode 100644 common/paths/paths_integration_test.go create mode 100644 common/paths/pathtype_string.go create mode 100644 common/predicate/predicate.go create mode 100644 common/predicate/predicate_test.go create mode 100644 common/rungroup/rungroup.go create mode 100644 common/rungroup/rungroup_test.go create mode 100644 hugofs/component_fs.go rename hugofs/{language_merge.go => dirsmerger.go} (55%) delete mode 100644 hugofs/filter_fs.go delete mode 100644 hugofs/filter_fs_test.go delete mode 100644 hugofs/nosymlink_fs.go delete mode 100644 hugofs/nosymlink_test.go create mode 100644 hugofs/openfiles_fs.go delete mode 100644 hugofs/slice_fs.go create mode 100644 hugolib/doctree/dimensions.go create mode 100644 hugolib/doctree/dimensions_test.go create mode 100644 hugolib/doctree/nodeshiftree_test.go create mode 100644 hugolib/doctree/nodeshifttree.go create mode 100644 hugolib/doctree/simpletree.go create mode 100644 hugolib/doctree/support.go create mode 100644 hugolib/doctree/treeshifttree.go create mode 100644 hugolib/doctree/treeshifttree_test.go delete mode 100644 hugolib/hugo_sites_rebuild_test.go create mode 100644 hugolib/hugo_sites_test.go delete mode 100644 hugolib/pages_capture_test.go delete mode 100644 hugolib/pages_process.go create mode 100644 hugolib/rebuild_test.go delete mode 100644 hugolib/translations.go create mode 100644 identity/finder.go create mode 100644 identity/finder_test.go create mode 100644 identity/identitytesting/identitytesting.go create mode 100644 identity/predicate_identity.go create mode 100644 identity/predicate_identity_test.go create mode 100644 identity/question.go create mode 100644 identity/question_test.go delete mode 100644 markup/goldmark/links/integration_test.go delete mode 100644 markup/goldmark/links/transform.go create mode 100644 resources/internal/resourcepaths.go delete mode 100644 resources/page/page_paths_test.go create mode 100644 resources/page/siteidentities/identities.go delete mode 100644 resources/page/testhelpers_page_test.go delete mode 100644 resources/resource_cache_test.go create mode 100644 resources/resource_spec_test.go delete mode 100644 resources/resource_test.go create mode 100644 resources/transform_integration_test.go delete mode 100644 source/fileInfo_test.go create mode 100644 testscripts/commands/hugo__processingstats2.txt create mode 100644 testscripts/commands/mod_npm_withexisting.txt create mode 100644 testscripts/commands/new_content_archetypedir.txt create mode 100644 tpl/internal/go_templates/staticcheck.conf delete mode 100755 unused.sh diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 3a76b768c..1fd70492e 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -87,6 +87,12 @@ jobs: curl -LJO "https://github.com/sass/dart-sass/releases/download/${env:SASS_VERSION}/dart-sass-${env:SASS_VERSION}-windows-x64.zip"; Expand-Archive -Path "dart-sass-${env:SASS_VERSION}-windows-x64.zip" -DestinationPath .; echo "$env:GITHUB_WORKSPACE/dart-sass/" | Out-File -FilePath $Env:GITHUB_PATH -Encoding utf-8 -Append + - if: matrix.os == 'ubuntu-latest' + name: Install staticcheck + run: go install honnef.co/go/tools/cmd/staticcheck@latest + - if: matrix.os == 'ubuntu-latest' + name: Run staticcheck + run: staticcheck ./... - if: matrix.os != 'windows-latest' name: Check run: | diff --git a/cache/dynacache/dynacache.go b/cache/dynacache/dynacache.go new file mode 100644 index 000000000..bb3f7b098 --- /dev/null +++ b/cache/dynacache/dynacache.go @@ -0,0 +1,550 @@ +// Copyright 2024 The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package dynacache + +import ( + "context" + "fmt" + "math" + "path" + "regexp" + "runtime" + "sync" + "time" + + "github.com/bep/lazycache" + "github.com/bep/logg" + "github.com/gohugoio/hugo/common/herrors" + "github.com/gohugoio/hugo/common/loggers" + "github.com/gohugoio/hugo/common/paths" + "github.com/gohugoio/hugo/common/rungroup" + "github.com/gohugoio/hugo/config" + "github.com/gohugoio/hugo/helpers" + "github.com/gohugoio/hugo/identity" + "github.com/gohugoio/hugo/resources/resource" +) + +const minMaxSize = 10 + +// New creates a new cache. +func New(opts Options) *Cache { + if opts.CheckInterval == 0 { + opts.CheckInterval = time.Second * 2 + } + + if opts.MaxSize == 0 { + opts.MaxSize = 100000 + } + if opts.Log == nil { + panic("nil Log") + } + + if opts.MinMaxSize == 0 { + opts.MinMaxSize = 30 + } + + stats := &stats{ + opts: opts, + adjustmentFactor: 1.0, + currentMaxSize: opts.MaxSize, + availableMemory: config.GetMemoryLimit(), + } + + infol := opts.Log.InfoCommand("dynacache") + + c := &Cache{ + partitions: make(map[string]PartitionManager), + opts: opts, + stats: stats, + infol: infol, + } + + c.stop = c.start() + + return c +} + +// Options for the cache. +type Options struct { + Log loggers.Logger + CheckInterval time.Duration + MaxSize int + MinMaxSize int + Running bool +} + +// Options for a partition. +type OptionsPartition struct { + // When to clear the this partition. + ClearWhen ClearWhen + + // Weight is a number between 1 and 100 that indicates how, in general, how big this partition may get. + Weight int +} + +func (o OptionsPartition) WeightFraction() float64 { + return float64(o.Weight) / 100 +} + +func (o OptionsPartition) CalculateMaxSize(maxSizePerPartition int) int { + return int(math.Floor(float64(maxSizePerPartition) * o.WeightFraction())) +} + +// A dynamic partitioned cache. +type Cache struct { + mu sync.RWMutex + + partitions map[string]PartitionManager + opts Options + infol logg.LevelLogger + + stats *stats + stopOnce sync.Once + stop func() +} + +// ClearMatching clears all partition for which the predicate returns true. +func (c *Cache) ClearMatching(predicate func(k, v any) bool) { + g := rungroup.Run[PartitionManager](context.Background(), rungroup.Config[PartitionManager]{ + NumWorkers: len(c.partitions), + Handle: func(ctx context.Context, partition PartitionManager) error { + partition.clearMatching(predicate) + return nil + }, + }) + + for _, p := range c.partitions { + g.Enqueue(p) + } + + g.Wait() +} + +// ClearOnRebuild prepares the cache for a new rebuild taking the given changeset into account. +func (c *Cache) ClearOnRebuild(changeset ...identity.Identity) { + g := rungroup.Run[PartitionManager](context.Background(), rungroup.Config[PartitionManager]{ + NumWorkers: len(c.partitions), + Handle: func(ctx context.Context, partition PartitionManager) error { + partition.clearOnRebuild(changeset...) + return nil + }, + }) + + for _, p := range c.partitions { + g.Enqueue(p) + } + + g.Wait() + + // Clear any entries marked as stale above. + g = rungroup.Run[PartitionManager](context.Background(), rungroup.Config[PartitionManager]{ + NumWorkers: len(c.partitions), + Handle: func(ctx context.Context, partition PartitionManager) error { + partition.clearStale() + return nil + }, + }) + + for _, p := range c.partitions { + g.Enqueue(p) + } + + g.Wait() +} + +type keysProvider interface { + Keys() []string +} + +// Keys returns a list of keys in all partitions. +func (c *Cache) Keys(predicate func(s string) bool) []string { + if predicate == nil { + predicate = func(s string) bool { return true } + } + var keys []string + for pn, g := range c.partitions { + pkeys := g.(keysProvider).Keys() + for _, k := range pkeys { + p := path.Join(pn, k) + if predicate(p) { + keys = append(keys, p) + } + } + + } + return keys +} + +func calculateMaxSizePerPartition(maxItemsTotal, totalWeightQuantity, numPartitions int) int { + if numPartitions == 0 { + panic("numPartitions must be > 0") + } + if totalWeightQuantity == 0 { + panic("totalWeightQuantity must be > 0") + } + + avgWeight := float64(totalWeightQuantity) / float64(numPartitions) + return int(math.Floor(float64(maxItemsTotal) / float64(numPartitions) * (100.0 / avgWeight))) +} + +// Stop stops the cache. +func (c *Cache) Stop() { + c.stopOnce.Do(func() { + c.stop() + }) +} + +func (c *Cache) adjustCurrentMaxSize() { + c.mu.RLock() + defer c.mu.RUnlock() + + if len(c.partitions) == 0 { + return + } + var m runtime.MemStats + runtime.ReadMemStats(&m) + s := c.stats + s.memstatsCurrent = m + // fmt.Printf("\n\nAvailable = %v\nAlloc = %v\nTotalAlloc = %v\nSys = %v\nNumGC = %v\nMaxSize = %d\nAdjustmentFactor=%f\n\n", helpers.FormatByteCount(s.availableMemory), helpers.FormatByteCount(m.Alloc), helpers.FormatByteCount(m.TotalAlloc), helpers.FormatByteCount(m.Sys), m.NumGC, c.stats.currentMaxSize, s.adjustmentFactor) + + if s.availableMemory >= s.memstatsCurrent.Alloc { + if s.adjustmentFactor <= 1.0 { + s.adjustmentFactor += 0.2 + } + } else { + // We're low on memory. + s.adjustmentFactor -= 0.4 + } + + if s.adjustmentFactor <= 0 { + s.adjustmentFactor = 0.05 + } + + if !s.adjustCurrentMaxSize() { + return + } + + totalWeight := 0 + for _, pm := range c.partitions { + totalWeight += pm.getOptions().Weight + } + + maxSizePerPartition := calculateMaxSizePerPartition(c.stats.currentMaxSize, totalWeight, len(c.partitions)) + + evicted := 0 + for _, p := range c.partitions { + evicted += p.adjustMaxSize(p.getOptions().CalculateMaxSize(maxSizePerPartition)) + } + + if evicted > 0 { + c.infol. + WithFields( + logg.Fields{ + {Name: "evicted", Value: evicted}, + {Name: "numGC", Value: m.NumGC}, + {Name: "limit", Value: helpers.FormatByteCount(c.stats.availableMemory)}, + {Name: "alloc", Value: helpers.FormatByteCount(m.Alloc)}, + {Name: "totalAlloc", Value: helpers.FormatByteCount(m.TotalAlloc)}, + }, + ).Logf("adjusted partitions' max size") + } +} + +func (c *Cache) start() func() { + ticker := time.NewTicker(c.opts.CheckInterval) + quit := make(chan struct{}) + + go func() { + for { + select { + case <-ticker.C: + c.adjustCurrentMaxSize() + case <-quit: + ticker.Stop() + return + } + } + }() + + return func() { + close(quit) + } +} + +var partitionNameRe = regexp.MustCompile(`^\/[a-zA-Z0-9]{4}(\/[a-zA-Z0-9]+)?(\/[a-zA-Z0-9]+)?`) + +// GetOrCreatePartition gets or creates a partition with the given name. +func GetOrCreatePartition[K comparable, V any](c *Cache, name string, opts OptionsPartition) *Partition[K, V] { + if c == nil { + panic("nil Cache") + } + if opts.Weight < 1 || opts.Weight > 100 { + panic("invalid Weight, must be between 1 and 100") + } + + if partitionNameRe.FindString(name) != name { + panic(fmt.Sprintf("invalid partition name %q", name)) + } + + c.mu.RLock() + p, found := c.partitions[name] + c.mu.RUnlock() + if found { + return p.(*Partition[K, V]) + } + + c.mu.Lock() + defer c.mu.Unlock() + + // Double check. + p, found = c.partitions[name] + if found { + return p.(*Partition[K, V]) + } + + // At this point, we don't know the the number of partitions or their configuration, but + // this will be re-adjusted later. + const numberOfPartitionsEstimate = 10 + maxSize := opts.CalculateMaxSize(c.opts.MaxSize / numberOfPartitionsEstimate) + + // Create a new partition and cache it. + partition := &Partition[K, V]{ + c: lazycache.New(lazycache.Options[K, V]{MaxEntries: maxSize}), + maxSize: maxSize, + trace: c.opts.Log.Logger().WithLevel(logg.LevelTrace).WithField("partition", name), + opts: opts, + } + c.partitions[name] = partition + + return partition +} + +// Partition is a partition in the cache. +type Partition[K comparable, V any] struct { + c *lazycache.Cache[K, V] + + zero V + + trace logg.LevelLogger + opts OptionsPartition + + maxSize int +} + +// GetOrCreate gets or creates a value for the given key. +func (p *Partition[K, V]) GetOrCreate(key K, create func(key K) (V, error)) (V, error) { + v, _, err := p.c.GetOrCreate(key, create) + return v, err +} + +// GetOrCreateWitTimeout gets or creates a value for the given key and times out if the create function +// takes too long. +func (p *Partition[K, V]) GetOrCreateWitTimeout(key K, duration time.Duration, create func(key K) (V, error)) (V, error) { + resultch := make(chan V, 1) + errch := make(chan error, 1) + + go func() { + v, _, err := p.c.GetOrCreate(key, create) + if err != nil { + errch <- err + return + } + resultch <- v + }() + + select { + case v := <-resultch: + return v, nil + case err := <-errch: + return p.zero, err + case <-time.After(duration): + return p.zero, &herrors.TimeoutError{ + Duration: duration, + } + } +} + +func (p *Partition[K, V]) clearMatching(predicate func(k, v any) bool) { + p.c.DeleteFunc(func(key K, v V) bool { + if predicate(key, v) { + p.trace.Log( + logg.StringFunc( + func() string { + return fmt.Sprintf("clearing cache key %v", key) + }, + ), + ) + return true + } + return false + }) +} + +func (p *Partition[K, V]) clearOnRebuild(changeset ...identity.Identity) { + opts := p.getOptions() + if opts.ClearWhen == ClearNever { + return + } + + if opts.ClearWhen == ClearOnRebuild { + // Clear all. + p.Clear() + return + } + + depsFinder := identity.NewFinder(identity.FinderConfig{}) + + shouldDelete := func(key K, v V) bool { + // We always clear elements marked as stale. + if resource.IsStaleAny(v) { + return true + } + + // Now check if this entry has changed based on the changeset + // based on filesystem events. + if len(changeset) == 0 { + // Nothing changed. + return false + } + + var probablyDependent bool + identity.WalkIdentitiesShallow(v, func(level int, id2 identity.Identity) bool { + for _, id := range changeset { + if r := depsFinder.Contains(id, id2, -1); r > 0 { + // It's probably dependent, evict from cache. + probablyDependent = true + return true + } + } + return false + }) + + return probablyDependent + } + + // First pass. + // Second pass needs to be done in a separate loop to catch any + // elements marked as stale in the other partitions. + p.c.DeleteFunc(func(key K, v V) bool { + if shouldDelete(key, v) { + p.trace.Log( + logg.StringFunc( + func() string { + return fmt.Sprintf("first pass: clearing cache key %v", key) + }, + ), + ) + resource.MarkStale(v) + return true + } + return false + }) +} + +func (p *Partition[K, V]) Keys() []K { + var keys []K + p.c.DeleteFunc(func(key K, v V) bool { + keys = append(keys, key) + return false + }) + return keys +} + +func (p *Partition[K, V]) clearStale() { + p.c.DeleteFunc(func(key K, v V) bool { + isStale := resource.IsStaleAny(v) + if isStale { + p.trace.Log( + logg.StringFunc( + func() string { + return fmt.Sprintf("second pass: clearing cache key %v", key) + }, + ), + ) + } + + return isStale + }) +} + +// adjustMaxSize adjusts the max size of the and returns the number of items evicted. +func (p *Partition[K, V]) adjustMaxSize(newMaxSize int) int { + if newMaxSize < minMaxSize { + newMaxSize = minMaxSize + } + p.maxSize = newMaxSize + // fmt.Println("Adjusting max size of partition from", oldMaxSize, "to", newMaxSize) + return p.c.Resize(newMaxSize) +} + +func (p *Partition[K, V]) getMaxSize() int { + return p.maxSize +} + +func (p *Partition[K, V]) getOptions() OptionsPartition { + return p.opts +} + +func (p *Partition[K, V]) Clear() { + p.c.DeleteFunc(func(key K, v V) bool { + return true + }) +} + +func (p *Partition[K, V]) Get(ctx context.Context, key K) (V, bool) { + return p.c.Get(key) +} + +type PartitionManager interface { + adjustMaxSize(addend int) int + getMaxSize() int + getOptions() OptionsPartition + clearOnRebuild(changeset ...identity.Identity) + clearMatching(predicate func(k, v any) bool) + clearStale() +} + +const ( + ClearOnRebuild ClearWhen = iota + 1 + ClearOnChange + ClearNever +) + +type ClearWhen int + +type stats struct { + opts Options + memstatsCurrent runtime.MemStats + currentMaxSize int + availableMemory uint64 + + adjustmentFactor float64 +} + +func (s *stats) adjustCurrentMaxSize() bool { + newCurrentMaxSize := int(math.Floor(float64(s.opts.MaxSize) * s.adjustmentFactor)) + + if newCurrentMaxSize < s.opts.MaxSize { + newCurrentMaxSize = int(s.opts.MinMaxSize) + } + changed := newCurrentMaxSize != s.currentMaxSize + s.currentMaxSize = newCurrentMaxSize + return changed +} + +// CleanKey turns s into a format suitable for a cache key for this package. +// The key will be a Unix-styled path with a leading slash but no trailing slash. +func CleanKey(s string) string { + return path.Clean(paths.ToSlashPreserveLeading(s)) +} diff --git a/cache/dynacache/dynacache_test.go b/cache/dynacache/dynacache_test.go new file mode 100644 index 000000000..53de2385e --- /dev/null +++ b/cache/dynacache/dynacache_test.go @@ -0,0 +1,175 @@ +// Copyright 2024 The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package dynacache + +import ( + "path/filepath" + "testing" + + qt "github.com/frankban/quicktest" + "github.com/gohugoio/hugo/common/loggers" + "github.com/gohugoio/hugo/identity" + "github.com/gohugoio/hugo/resources/resource" +) + +var ( + _ resource.StaleInfo = (*testItem)(nil) + _ identity.Identity = (*testItem)(nil) +) + +type testItem struct { + name string + isStale bool +} + +func (t testItem) IsStale() bool { + return t.isStale +} + +func (t testItem) IdentifierBase() string { + return t.name +} + +func TestCache(t *testing.T) { + t.Parallel() + c := qt.New(t) + + cache := New(Options{ + Log: loggers.NewDefault(), + }) + + c.Cleanup(func() { + cache.Stop() + }) + + opts := OptionsPartition{Weight: 30} + + c.Assert(cache, qt.Not(qt.IsNil)) + + p1 := GetOrCreatePartition[string, testItem](cache, "/aaaa/bbbb", opts) + c.Assert(p1, qt.Not(qt.IsNil)) + + p2 := GetOrCreatePartition[string, testItem](cache, "/aaaa/bbbb", opts) + + c.Assert(func() { GetOrCreatePartition[string, testItem](cache, "foo bar", opts) }, qt.PanicMatches, ".*invalid partition name.*") + c.Assert(func() { GetOrCreatePartition[string, testItem](cache, "/aaaa/cccc", OptionsPartition{Weight: 1234}) }, qt.PanicMatches, ".*invalid Weight.*") + + c.Assert(p2, qt.Equals, p1) + + p3 := GetOrCreatePartition[string, testItem](cache, "/aaaa/cccc", opts) + c.Assert(p3, qt.Not(qt.IsNil)) + c.Assert(p3, qt.Not(qt.Equals), p1) + + c.Assert(func() { New(Options{}) }, qt.PanicMatches, ".*nil Log.*") +} + +func TestCalculateMaxSizePerPartition(t *testing.T) { + t.Parallel() + c := qt.New(t) + + c.Assert(calculateMaxSizePerPartition(1000, 500, 5), qt.Equals, 200) + c.Assert(calculateMaxSizePerPartition(1000, 250, 5), qt.Equals, 400) + c.Assert(func() { calculateMaxSizePerPartition(1000, 250, 0) }, qt.PanicMatches, ".*must be > 0.*") + c.Assert(func() { calculateMaxSizePerPartition(1000, 0, 1) }, qt.PanicMatches, ".*must be > 0.*") +} + +func TestCleanKey(t *testing.T) { + c := qt.New(t) + + c.Assert(CleanKey("a/b/c"), qt.Equals, "/a/b/c") + c.Assert(CleanKey("/a/b/c"), qt.Equals, "/a/b/c") + c.Assert(CleanKey("a/b/c/"), qt.Equals, "/a/b/c") + c.Assert(CleanKey(filepath.FromSlash("/a/b/c/")), qt.Equals, "/a/b/c") +} + +func newTestCache(t *testing.T) *Cache { + cache := New( + Options{ + Log: loggers.NewDefault(), + }, + ) + + p1 := GetOrCreatePartition[string, testItem](cache, "/aaaa/bbbb", OptionsPartition{Weight: 30, ClearWhen: ClearOnRebuild}) + p2 := GetOrCreatePartition[string, testItem](cache, "/aaaa/cccc", OptionsPartition{Weight: 30, ClearWhen: ClearOnChange}) + + p1.GetOrCreate("clearOnRebuild", func(string) (testItem, error) { + return testItem{}, nil + }) + + p2.GetOrCreate("clearBecauseStale", func(string) (testItem, error) { + return testItem{ + isStale: true, + }, nil + }) + + p2.GetOrCreate("clearBecauseIdentityChanged", func(string) (testItem, error) { + return testItem{ + name: "changed", + }, nil + }) + + p2.GetOrCreate("clearNever", func(string) (testItem, error) { + return testItem{ + isStale: false, + }, nil + }) + + t.Cleanup(func() { + cache.Stop() + }) + + return cache +} + +func TestClear(t *testing.T) { + t.Parallel() + c := qt.New(t) + + predicateAll := func(string) bool { + return true + } + + cache := newTestCache(t) + + c.Assert(cache.Keys(predicateAll), qt.HasLen, 4) + + cache.ClearOnRebuild() + + // Stale items are always cleared. + c.Assert(cache.Keys(predicateAll), qt.HasLen, 2) + + cache = newTestCache(t) + cache.ClearOnRebuild(identity.StringIdentity("changed")) + + c.Assert(cache.Keys(nil), qt.HasLen, 1) + + cache = newTestCache(t) + + cache.ClearMatching(func(k, v any) bool { + return k.(string) == "clearOnRebuild" + }) + + c.Assert(cache.Keys(predicateAll), qt.HasLen, 3) + + cache.adjustCurrentMaxSize() +} + +func TestAdjustCurrentMaxSize(t *testing.T) { + t.Parallel() + c := qt.New(t) + cache := newTestCache(t) + alloc := cache.stats.memstatsCurrent.Alloc + cache.adjustCurrentMaxSize() + c.Assert(cache.stats.memstatsCurrent.Alloc, qt.Not(qt.Equals), alloc) +} diff --git a/cache/filecache/filecache.go b/cache/filecache/filecache.go index 414478ee2..093d2941c 100644 --- a/cache/filecache/filecache.go +++ b/cache/filecache/filecache.go @@ -24,6 +24,7 @@ import ( "time" "github.com/gohugoio/hugo/common/hugio" + "github.com/gohugoio/hugo/hugofs" "github.com/gohugoio/hugo/helpers" @@ -109,7 +110,7 @@ func (l *lockedFile) Close() error { func (c *Cache) init() error { c.initOnce.Do(func() { // Create the base dir if it does not exist. - if err := c.Fs.MkdirAll("", 0777); err != nil && !os.IsExist(err) { + if err := c.Fs.MkdirAll("", 0o777); err != nil && !os.IsExist(err) { c.initErr = err } }) @@ -146,7 +147,8 @@ func (c *Cache) WriteCloser(id string) (ItemInfo, io.WriteCloser, error) { // it when done. func (c *Cache) ReadOrCreate(id string, read func(info ItemInfo, r io.ReadSeeker) error, - create func(info ItemInfo, w io.WriteCloser) error) (info ItemInfo, err error) { + create func(info ItemInfo, w io.WriteCloser) error, +) (info ItemInfo, err error) { if err := c.init(); err != nil { return ItemInfo{}, err } @@ -380,7 +382,7 @@ func NewCaches(p *helpers.PathSpec) (Caches, error) { baseDir := v.DirCompiled - bfs := afero.NewBasePathFs(cfs, baseDir) + bfs := hugofs.NewBasePathFs(cfs, baseDir) var pruneAllRootDir string if k == CacheKeyModules { diff --git a/cache/filecache/filecache_test.go b/cache/filecache/filecache_test.go index 61f9eda64..59fb09276 100644 --- a/cache/filecache/filecache_test.go +++ b/cache/filecache/filecache_test.go @@ -1,4 +1,4 @@ -// Copyright 2018 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -17,7 +17,6 @@ import ( "errors" "fmt" "io" - "path/filepath" "strings" "sync" "testing" @@ -86,17 +85,8 @@ dir = ":cacheDir/c" cache := caches.Get("GetJSON") c.Assert(cache, qt.Not(qt.IsNil)) - bfs, ok := cache.Fs.(*afero.BasePathFs) - c.Assert(ok, qt.Equals, true) - filename, err := bfs.RealPath("key") - c.Assert(err, qt.IsNil) - cache = caches.Get("Images") c.Assert(cache, qt.Not(qt.IsNil)) - bfs, ok = cache.Fs.(*afero.BasePathFs) - c.Assert(ok, qt.Equals, true) - filename, _ = bfs.RealPath("key") - c.Assert(filename, qt.Equals, filepath.FromSlash("_gen/images/key")) rf := func(s string) func() (io.ReadCloser, error) { return func() (io.ReadCloser, error) { diff --git a/cache/filecache/integration_test.go b/cache/filecache/integration_test.go index a8a45988e..1e920c29f 100644 --- a/cache/filecache/integration_test.go +++ b/cache/filecache/integration_test.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -15,7 +15,6 @@ package filecache_test import ( "path/filepath" - "testing" "time" @@ -47,7 +46,6 @@ title: "Home" _, err := b.H.BaseFs.ResourcesCache.Stat(filepath.Join("_gen", "images")) b.Assert(err, qt.IsNil) - } func TestPruneImages(t *testing.T) { @@ -55,6 +53,7 @@ func TestPruneImages(t *testing.T) { // TODO(bep) t.Skip("skip flaky test on CI server") } + t.Skip("skip flaky test") files := ` -- hugo.toml -- baseURL = "https://example.com" @@ -92,7 +91,7 @@ iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mNkYPhfDwAChwGA60e6kgAA // TODO(bep) we need a way to test full rebuilds. // For now, just sleep a little so the cache elements expires. - time.Sleep(300 * time.Millisecond) + time.Sleep(500 * time.Millisecond) b.RenameFile("assets/a/pixel.png", "assets/b/pixel2.png").Build() @@ -104,5 +103,4 @@ iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mNkYPhfDwAChwGA60e6kgAA b.Assert(err, qt.Not(qt.IsNil)) _, err = b.H.BaseFs.ResourcesCache.Stat(imagesCacheDir) b.Assert(err, qt.IsNil) - } diff --git a/cache/namedmemcache/named_cache.go b/cache/namedmemcache/named_cache.go deleted file mode 100644 index 7fb4fe8ed..000000000 --- a/cache/namedmemcache/named_cache.go +++ /dev/null @@ -1,78 +0,0 @@ -// Copyright 2018 The Hugo Authors. All rights reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// Package namedmemcache provides a memory cache with a named lock. This is suitable -// for situations where creating the cached resource can be time consuming or otherwise -// resource hungry, or in situations where a "once only per key" is a requirement. -package namedmemcache - -import ( - "sync" - - "github.com/BurntSushi/locker" -) - -// Cache holds the cached values. -type Cache struct { - nlocker *locker.Locker - cache map[string]cacheEntry - mu sync.RWMutex -} - -type cacheEntry struct { - value any - err error -} - -// New creates a new cache. -func New() *Cache { - return &Cache{ - nlocker: locker.NewLocker(), - cache: make(map[string]cacheEntry), - } -} - -// Clear clears the cache state. -func (c *Cache) Clear() { - c.mu.Lock() - defer c.mu.Unlock() - - c.cache = make(map[string]cacheEntry) - c.nlocker = locker.NewLocker() -} - -// GetOrCreate tries to get the value with the given cache key, if not found -// create will be called and cached. -// This method is thread safe. It also guarantees that the create func for a given -// key is invoked only once for this cache. -func (c *Cache) GetOrCreate(key string, create func() (any, error)) (any, error) { - c.mu.RLock() - entry, found := c.cache[key] - c.mu.RUnlock() - - if found { - return entry.value, entry.err - } - - c.nlocker.Lock(key) - defer c.nlocker.Unlock(key) - - // Create it. - value, err := create() - - c.mu.Lock() - c.cache[key] = cacheEntry{value: value, err: err} - c.mu.Unlock() - - return value, err -} diff --git a/cache/namedmemcache/named_cache_test.go b/cache/namedmemcache/named_cache_test.go deleted file mode 100644 index 2db923d76..000000000 --- a/cache/namedmemcache/named_cache_test.go +++ /dev/null @@ -1,80 +0,0 @@ -// Copyright 2018 The Hugo Authors. All rights reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package namedmemcache - -import ( - "fmt" - "sync" - "testing" - - qt "github.com/frankban/quicktest" -) - -func TestNamedCache(t *testing.T) { - t.Parallel() - c := qt.New(t) - - cache := New() - - counter := 0 - create := func() (any, error) { - counter++ - return counter, nil - } - - for i := 0; i < 5; i++ { - v1, err := cache.GetOrCreate("a1", create) - c.Assert(err, qt.IsNil) - c.Assert(v1, qt.Equals, 1) - v2, err := cache.GetOrCreate("a2", create) - c.Assert(err, qt.IsNil) - c.Assert(v2, qt.Equals, 2) - } - - cache.Clear() - - v3, err := cache.GetOrCreate("a2", create) - c.Assert(err, qt.IsNil) - c.Assert(v3, qt.Equals, 3) -} - -func TestNamedCacheConcurrent(t *testing.T) { - t.Parallel() - - c := qt.New(t) - - var wg sync.WaitGroup - - cache := New() - - create := func(i int) func() (any, error) { - return func() (any, error) { - return i, nil - } - } - - for i := 0; i < 10; i++ { - wg.Add(1) - go func() { - defer wg.Done() - for j := 0; j < 100; j++ { - id := fmt.Sprintf("id%d", j) - v, err := cache.GetOrCreate(id, create(j)) - c.Assert(err, qt.IsNil) - c.Assert(v, qt.Equals, j) - } - }() - } - wg.Wait() -} diff --git a/commands/commandeer.go b/commands/commandeer.go index 5d414b04a..1aac08c42 100644 --- a/commands/commandeer.go +++ b/commands/commandeer.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -259,7 +259,7 @@ func (r *rootCommand) ConfigFromProvider(key int32, cfg config.Provider) (*commo publishDirStatic := cfg.GetString("publishDirStatic") workingDir := cfg.GetString("workingDir") absPublishDirStatic := paths.AbsPathify(workingDir, publishDirStatic) - staticFs := afero.NewBasePathFs(afero.NewOsFs(), absPublishDirStatic) + staticFs := hugofs.NewBasePathFs(afero.NewOsFs(), absPublishDirStatic) // Serve from both the static and dynamic fs, // the first will take priority. @@ -405,8 +405,14 @@ func (r *rootCommand) PreRun(cd, runner *simplecobra.Commandeer) error { return err } - r.commonConfigs = lazycache.New[int32, *commonConfig](lazycache.Options{MaxEntries: 5}) - r.hugoSites = lazycache.New[int32, *hugolib.HugoSites](lazycache.Options{MaxEntries: 5}) + r.commonConfigs = lazycache.New(lazycache.Options[int32, *commonConfig]{MaxEntries: 5}) + // We don't want to keep stale HugoSites in memory longer than needed. + r.hugoSites = lazycache.New(lazycache.Options[int32, *hugolib.HugoSites]{ + MaxEntries: 1, + OnEvict: func(key int32, value *hugolib.HugoSites) { + value.Close() + }, + }) return nil } diff --git a/commands/commands.go b/commands/commands.go index 9d707b841..e21d743ab 100644 --- a/commands/commands.go +++ b/commands/commands.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -37,5 +37,4 @@ func newExec() (*simplecobra.Exec, error) { } return simplecobra.New(rootCmd) - } diff --git a/commands/config.go b/commands/config.go index 63ee4f7c8..dfe54cba2 100644 --- a/commands/config.go +++ b/commands/config.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -37,7 +37,6 @@ func newConfigCommand() *configCommand { &configMountsCommand{}, }, } - } type configCommand struct { @@ -190,7 +189,6 @@ func (m *configModMounts) MarshalJSON() ([]byte, error) { Dir: m.m.Dir(), Mounts: mounts, }) - } type configMountsCommand struct { diff --git a/commands/convert.go b/commands/convert.go index 702c9227f..c81ec792a 100644 --- a/commands/convert.go +++ b/commands/convert.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -134,7 +134,7 @@ func (c *convertCommand) convertAndSavePage(p page.Page, site *hugolib.Site, tar } } - if p.File().IsZero() { + if p.File() == nil { // No content file. return nil } @@ -209,7 +209,7 @@ func (c *convertCommand) convertContents(format metadecoders.Format) error { var pagesBackedByFile page.Pages for _, p := range site.AllPages() { - if p.File().IsZero() { + if p.File() == nil { continue } pagesBackedByFile = append(pagesBackedByFile, p) diff --git a/commands/deploy.go b/commands/deploy.go index ce1af9546..ca6e4d60e 100644 --- a/commands/deploy.go +++ b/commands/deploy.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -14,7 +14,7 @@ //go:build !nodeploy // +build !nodeploy -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -38,7 +38,6 @@ import ( ) func newDeployCommand() simplecobra.Commander { - return &simpleCommand{ name: "deploy", short: "Deploy your site to a Cloud provider.", diff --git a/commands/deploy_off.go b/commands/deploy_off.go index 3150dba16..8a481bd96 100644 --- a/commands/deploy_off.go +++ b/commands/deploy_off.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -14,7 +14,7 @@ //go:build nodeploy // +build nodeploy -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/commands/env.go b/commands/env.go index 0652deb87..8e4f03c55 100644 --- a/commands/env.go +++ b/commands/env.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/commands/gen.go b/commands/gen.go index 534eb0df5..11c32d778 100644 --- a/commands/gen.go +++ b/commands/gen.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -101,7 +101,7 @@ See https://xyproto.github.io/splash/docs/all.html for a preview of the availabl } if found, _ := helpers.Exists(genmandir, hugofs.Os); !found { r.Println("Directory", genmandir, "does not exist, creating...") - if err := hugofs.Os.MkdirAll(genmandir, 0777); err != nil { + if err := hugofs.Os.MkdirAll(genmandir, 0o777); err != nil { return err } } @@ -150,7 +150,7 @@ url: %s } if found, _ := helpers.Exists(gendocdir, hugofs.Os); !found { r.Println("Directory", gendocdir, "does not exist, creating...") - if err := hugofs.Os.MkdirAll(gendocdir, 0777); err != nil { + if err := hugofs.Os.MkdirAll(gendocdir, 0o777); err != nil { return err } } @@ -177,7 +177,6 @@ url: %s cmd.PersistentFlags().SetAnnotation("dir", cobra.BashCompSubdirsInDir, []string{}) }, } - } var docsHelperTarget string @@ -241,7 +240,6 @@ url: %s newDocsHelper(), }, } - } type genCommand struct { diff --git a/commands/helpers.go b/commands/helpers.go index 3b0c50159..a13bdebc2 100644 --- a/commands/helpers.go +++ b/commands/helpers.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -110,12 +110,11 @@ func flagsToCfgWithAdditionalConfigBase(cd *simplecobra.Commandeer, cfg config.P }) return cfg - } func mkdir(x ...string) { p := filepath.Join(x...) - err := os.MkdirAll(p, 0777) // before umask + err := os.MkdirAll(p, 0o777) // before umask if err != nil { log.Fatal(err) } diff --git a/commands/hugo_windows.go b/commands/hugo_windows.go index 169c6288f..c354e889d 100644 --- a/commands/hugo_windows.go +++ b/commands/hugo_windows.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/commands/hugobuilder.go b/commands/hugobuilder.go index d2b43cc77..41f42ae6d 100644 --- a/commands/hugobuilder.go +++ b/commands/hugobuilder.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -24,6 +24,7 @@ import ( "runtime/trace" "strings" "sync" + "sync/atomic" "time" "github.com/bep/logg" @@ -34,6 +35,7 @@ import ( "github.com/gohugoio/hugo/common/hugo" "github.com/gohugoio/hugo/common/loggers" "github.com/gohugoio/hugo/common/maps" + "github.com/gohugoio/hugo/common/paths" "github.com/gohugoio/hugo/common/terminal" "github.com/gohugoio/hugo/common/types" "github.com/gohugoio/hugo/config" @@ -83,7 +85,6 @@ func (c *hugoBuilder) withConf(fn func(conf *commonConfig)) { c.confmu.Lock() defer c.confmu.Unlock() fn(c.conf) - } type hugoBuilderErrState struct { @@ -135,46 +136,12 @@ func (c *hugoBuilder) errCount() int { // getDirList provides NewWatcher() with a list of directories to watch for changes. func (c *hugoBuilder) getDirList() ([]string, error) { - var filenames []string - - walkFn := func(path string, fi hugofs.FileMetaInfo, err error) error { - if err != nil { - c.r.logger.Errorln("walker: ", err) - return nil - } - - if fi.IsDir() { - if fi.Name() == ".git" || - fi.Name() == "node_modules" || fi.Name() == "bower_components" { - return filepath.SkipDir - } - - filenames = append(filenames, fi.Meta().Filename) - } - - return nil - } - h, err := c.hugo() if err != nil { return nil, err } - watchFiles := h.PathSpec.BaseFs.WatchDirs() - for _, fi := range watchFiles { - if !fi.IsDir() { - filenames = append(filenames, fi.Meta().Filename) - continue - } - w := hugofs.NewWalkway(hugofs.WalkwayConfig{Logger: c.r.logger, Info: fi, WalkFn: walkFn}) - if err := w.Walk(); err != nil { - c.r.logger.Errorln("walker: ", err) - } - } - - filenames = helpers.UniqueStringsSorted(filenames) - - return filenames, nil + return helpers.UniqueStringsSorted(h.PathSpec.BaseFs.WatchFilenames()), nil } func (c *hugoBuilder) initCPUProfile() (func(), error) { @@ -441,7 +408,7 @@ func (c *hugoBuilder) copyStatic() (map[string]uint64, error) { } func (c *hugoBuilder) copyStaticTo(sourceFs *filesystems.SourceFilesystem) (uint64, error) { - infol := c.r.logger.InfoCommand("copy static") + infol := c.r.logger.InfoCommand("static") publishDir := helpers.FilePathSeparator if sourceFs.PublishFolder != "" { @@ -467,11 +434,11 @@ func (c *hugoBuilder) copyStaticTo(sourceFs *filesystems.SourceFilesystem) (uint if syncer.Delete { infol.Logf("removing all files from destination that don't exist in static dirs") - syncer.DeleteFilter = func(f os.FileInfo) bool { + syncer.DeleteFilter = func(f fsync.FileInfo) bool { return f.IsDir() && strings.HasPrefix(f.Name(), ".") } } - infol.Logf("syncing static files to %s", publishDir) + start := time.Now() // because we are using a baseFs (to get the union right). // set sync src to root @@ -479,9 +446,10 @@ func (c *hugoBuilder) copyStaticTo(sourceFs *filesystems.SourceFilesystem) (uint if err != nil { return 0, err } + loggers.TimeTrackf(infol, start, nil, "syncing static files to %s", publishDir) - // Sync runs Stat 3 times for every source file (which sounds much) - numFiles := fs.statCounter / 3 + // Sync runs Stat 2 times for every source file. + numFiles := fs.statCounter / 2 return numFiles, err } @@ -652,13 +620,31 @@ func (c *hugoBuilder) handleBuildErr(err error, msg string) { func (c *hugoBuilder) handleEvents(watcher *watcher.Batcher, staticSyncer *staticSyncer, evs []fsnotify.Event, - configSet map[string]bool) { + configSet map[string]bool, +) { defer func() { c.errState.setWasErr(false) }() var isHandled bool + // Filter out ghost events (from deleted, renamed directories). + // This seems to be a bug in fsnotify, or possibly MacOS. + var n int + for _, ev := range evs { + keep := true + if ev.Has(fsnotify.Create) || ev.Has(fsnotify.Write) { + if _, err := os.Stat(ev.Name); err != nil { + keep = false + } + } + if keep { + evs[n] = ev + n++ + } + } + evs = evs[:n] + for _, ev := range evs { isConfig := configSet[ev.Name] configChangeType := configChangeConfig @@ -726,48 +712,25 @@ func (c *hugoBuilder) handleEvents(watcher *watcher.Batcher, return } - c.r.logger.Infoln("Received System Events:", evs) + c.r.logger.Debugln("Received System Events:", evs) staticEvents := []fsnotify.Event{} dynamicEvents := []fsnotify.Event{} - filtered := []fsnotify.Event{} h, err := c.hugo() if err != nil { c.r.logger.Errorln("Error getting the Hugo object:", err) return } + n = 0 for _, ev := range evs { if h.ShouldSkipFileChangeEvent(ev) { continue } - // Check the most specific first, i.e. files. - contentMapped := h.ContentChanges.GetSymbolicLinkMappings(ev.Name) - if len(contentMapped) > 0 { - for _, mapped := range contentMapped { - filtered = append(filtered, fsnotify.Event{Name: mapped, Op: ev.Op}) - } - continue - } - - // Check for any symbolic directory mapping. - - dir, name := filepath.Split(ev.Name) - - contentMapped = h.ContentChanges.GetSymbolicLinkMappings(dir) - - if len(contentMapped) == 0 { - filtered = append(filtered, ev) - continue - } - - for _, mapped := range contentMapped { - mappedFilename := filepath.Join(mapped, name) - filtered = append(filtered, fsnotify.Event{Name: mappedFilename, Op: ev.Op}) - } + evs[n] = ev + n++ } - - evs = filtered + evs = evs[:n] for _, ev := range evs { ext := filepath.Ext(ev.Name) @@ -788,6 +751,7 @@ func (c *hugoBuilder) handleEvents(watcher *watcher.Batcher, if istemp { continue } + if h.Deps.SourceSpec.IgnoreFile(ev.Name) { continue } @@ -811,7 +775,7 @@ func (c *hugoBuilder) handleEvents(watcher *watcher.Batcher, continue } - walkAdder := func(path string, f hugofs.FileMetaInfo, err error) error { + walkAdder := func(path string, f hugofs.FileMetaInfo) error { if f.IsDir() { c.r.logger.Println("adding created directory to watchlist", path) if err := watcher.Add(path); err != nil { @@ -827,11 +791,10 @@ func (c *hugoBuilder) handleEvents(watcher *watcher.Batcher, } // recursively add new directories to watch list - // When mkdir -p is used, only the top directory triggers an event (at least on OSX) - if ev.Op&fsnotify.Create == fsnotify.Create { + if ev.Has(fsnotify.Create) || ev.Has(fsnotify.Rename) { c.withConf(func(conf *commonConfig) { if s, err := conf.fs.Source.Stat(ev.Name); err == nil && s.Mode().IsDir() { - _ = helpers.SymbolicWalk(conf.fs.Source, ev.Name, walkAdder) + _ = helpers.Walk(conf.fs.Source, ev.Name, walkAdder) } }) } @@ -872,7 +835,7 @@ func (c *hugoBuilder) handleEvents(watcher *watcher.Batcher, return } path := h.BaseFs.SourceFilesystems.MakeStaticPathRelative(ev.Name) - path = h.RelURL(helpers.ToSlashTrimLeading(path), false) + path = h.RelURL(paths.ToSlashTrimLeading(path), false) livereload.RefreshPath(path) } else { @@ -909,7 +872,7 @@ func (c *hugoBuilder) handleEvents(watcher *watcher.Batcher, // Nothing has changed. return } else if len(changed) == 1 { - pathToRefresh := h.PathSpec.RelURL(helpers.ToSlashTrimLeading(changed[0]), false) + pathToRefresh := h.PathSpec.RelURL(paths.ToSlashTrimLeading(changed[0]), false) livereload.RefreshPath(pathToRefresh) } else { livereload.ForceRefresh() @@ -944,7 +907,6 @@ func (c *hugoBuilder) hugo() (*hugolib.HugoSites, error) { var err error h, err = c.r.HugFromConfig(conf) return err - }); err != nil { return nil, err } @@ -1000,6 +962,7 @@ func (c *hugoBuilder) loadConfig(cd *simplecobra.Commandeer, running bool) error } if len(conf.configs.LoadingInfo.ConfigFiles) == 0 { + //lint:ignore ST1005 end user message. return errors.New("Unable to locate config file or config directory. Perhaps you need to create a new site.\nRun `hugo help new` for details.") } @@ -1011,15 +974,16 @@ func (c *hugoBuilder) loadConfig(cd *simplecobra.Commandeer, running bool) error } return nil - } +var rebuildCounter atomic.Uint64 + func (c *hugoBuilder) printChangeDetected(typ string) { msg := "\nChange" if typ != "" { msg += " of " + typ } - msg += " detected, rebuilding site." + msg += fmt.Sprintf(" detected, rebuilding site (#%d).", rebuildCounter.Add(1)) c.r.logger.Println(msg) const layout = "2006-01-02 15:04:05.000 -0700" @@ -1034,25 +998,12 @@ func (c *hugoBuilder) rebuildSites(events []fsnotify.Event) error { } } c.errState.setBuildErr(nil) - visited := c.visitedURLs.PeekAllSet() h, err := c.hugo() if err != nil { return err } - if c.fastRenderMode { - c.withConf(func(conf *commonConfig) { - // Make sure we always render the home pages - for _, l := range conf.configs.ConfigLangs() { - langPath := l.LanguagePrefix() - if langPath != "" { - langPath = langPath + "/" - } - home := h.PrependBasePath("/"+langPath, false) - visited[home] = true - } - }) - } - return h.Build(hugolib.BuildCfg{NoBuildLock: true, RecentlyVisited: visited, ErrRecovery: c.errState.wasErr()}, events...) + + return h.Build(hugolib.BuildCfg{NoBuildLock: true, RecentlyVisited: c.visitedURLs, ErrRecovery: c.errState.wasErr()}, events...) } func (c *hugoBuilder) reloadConfig() error { diff --git a/commands/import.go b/commands/import.go index 18ed7b328..947b6d11f 100644 --- a/commands/import.go +++ b/commands/import.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -23,7 +23,6 @@ import ( "os" "path/filepath" "regexp" - "strconv" "strings" "time" @@ -66,7 +65,6 @@ Import from Jekyll requires two paths, e.g. ` + "`hugo import jekyll jekyll_root } return c - } type importCommand struct { @@ -312,7 +310,7 @@ func (c *importCommand) convertJekyllPost(path, relPath, targetDir string, draft targetFile := filepath.Join(targetDir, relPath) targetParentDir := filepath.Dir(targetFile) - os.MkdirAll(targetParentDir, 0777) + os.MkdirAll(targetParentDir, 0o777) contentBytes, err := os.ReadFile(path) if err != nil { @@ -398,7 +396,6 @@ func (c *importCommand) copyJekyllFilesAndFolders(jekyllRoot, dest string, jekyl } func (c *importCommand) importFromJekyll(args []string) error { - jekyllRoot, err := filepath.Abs(filepath.Clean(args[0])) if err != nil { return newUserError("path error:", args[0]) @@ -429,11 +426,7 @@ func (c *importCommand) importFromJekyll(args []string) error { c.r.Println("Importing...") fileCount := 0 - callback := func(path string, fi hugofs.FileMetaInfo, err error) error { - if err != nil { - return err - } - + callback := func(path string, fi hugofs.FileMetaInfo) error { if fi.IsDir() { return nil } @@ -462,7 +455,7 @@ func (c *importCommand) importFromJekyll(args []string) error { for jekyllPostDir, hasAnyPostInDir := range jekyllPostDirs { if hasAnyPostInDir { - if err = helpers.SymbolicWalk(hugofs.Os, filepath.Join(jekyllRoot, jekyllPostDir), callback); err != nil { + if err = helpers.Walk(hugofs.Os, filepath.Join(jekyllRoot, jekyllPostDir), callback); err != nil { return err } } diff --git a/commands/list.go b/commands/list.go index 6690ea9ee..41a45e402 100644 --- a/commands/list.go +++ b/commands/list.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -31,7 +31,6 @@ import ( // newListCommand creates a new list command and its subcommands. func newListCommand() *listCommand { - createRecord := func(workingDir string, p page.Page) []string { return []string{ filepath.ToSlash(strings.TrimPrefix(p.File().Filename(), workingDir+string(os.PathSeparator))), @@ -83,7 +82,6 @@ func newListCommand() *listCommand { } return nil - } return &listCommand{ @@ -94,11 +92,10 @@ func newListCommand() *listCommand { long: `List all of the drafts in your content directory.`, run: func(ctx context.Context, cd *simplecobra.Commandeer, r *rootCommand, args []string) error { shouldInclude := func(p page.Page) bool { - if !p.Draft() || p.File().IsZero() { + if !p.Draft() || p.File() == nil { return false } return true - } return list(cd, r, shouldInclude, "buildDrafts", true, @@ -113,11 +110,10 @@ func newListCommand() *listCommand { long: `List all of the posts in your content directory which will be posted in the future.`, run: func(ctx context.Context, cd *simplecobra.Commandeer, r *rootCommand, args []string) error { shouldInclude := func(p page.Page) bool { - if !resource.IsFuture(p) || p.File().IsZero() { + if !resource.IsFuture(p) || p.File() == nil { return false } return true - } return list(cd, r, shouldInclude, "buildFuture", true, @@ -131,7 +127,7 @@ func newListCommand() *listCommand { long: `List all of the posts in your content directory which has already expired.`, run: func(ctx context.Context, cd *simplecobra.Commandeer, r *rootCommand, args []string) error { shouldInclude := func(p page.Page) bool { - if !resource.IsExpired(p) || p.File().IsZero() { + if !resource.IsExpired(p) || p.File() == nil { return false } return true @@ -148,14 +144,13 @@ func newListCommand() *listCommand { long: `List all of the posts in your content directory, include drafts, future and expired pages.`, run: func(ctx context.Context, cd *simplecobra.Commandeer, r *rootCommand, args []string) error { shouldInclude := func(p page.Page) bool { - return !p.File().IsZero() + return p.File() != nil } return list(cd, r, shouldInclude, "buildDrafts", true, "buildFuture", true, "buildExpired", true) }, }, }, } - } type listCommand struct { diff --git a/commands/mod.go b/commands/mod.go index 20b9d3960..d64d2a983 100644 --- a/commands/mod.go +++ b/commands/mod.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -69,7 +69,7 @@ so this may/will change in future versions of Hugo. if err != nil { return err } - return npm.Pack(h.BaseFs.SourceFs, h.BaseFs.Assets.Dirs) + return npm.Pack(h.BaseFs.ProjectSourceFs, h.BaseFs.AssetsWithDuplicatesPreserved.Fs) }, }, }, diff --git a/commands/new.go b/commands/new.go index 8e348366d..79d2c9e7e 100644 --- a/commands/new.go +++ b/commands/new.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -64,7 +64,6 @@ Ensure you run this within the root directory of your site.`, cmd.Flags().String("editor", "", "edit new content with this editor, if provided") cmd.Flags().BoolVarP(&force, "force", "f", false, "overwrite file if it already exists") applyLocalFlagsBuildConfig(cmd, r) - }, }, &simpleCommand{ @@ -143,7 +142,6 @@ according to your needs.`, } return c - } type newCommand struct { diff --git a/commands/release.go b/commands/release.go index 54cf936e8..1d1aaad53 100644 --- a/commands/release.go +++ b/commands/release.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -24,7 +24,6 @@ import ( // Note: This is a command only meant for internal use and must be run // via "go run -tags release main.go release" on the actual code base that is in the release. func newReleaseCommand() simplecobra.Commander { - var ( step int skipPush bool diff --git a/commands/server.go b/commands/server.go index 63c09fccd..97cf405b7 100644 --- a/commands/server.go +++ b/commands/server.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -27,20 +27,19 @@ import ( "net/http" "net/url" "os" - "sync" - "sync/atomic" - - "github.com/bep/mclib" - "os/signal" "path" "path/filepath" "regexp" "strconv" "strings" + "sync" + "sync/atomic" "syscall" "time" + "github.com/bep/mclib" + "github.com/bep/debounce" "github.com/bep/simplecobra" "github.com/fsnotify/fsnotify" @@ -83,10 +82,14 @@ const ( ) func newHugoBuilder(r *rootCommand, s *serverCommand, onConfigLoaded ...func(reloaded bool) error) *hugoBuilder { + var visitedURLs *types.EvictingStringQueue + if s != nil && !s.disableFastRender { + visitedURLs = types.NewEvictingStringQueue(20) + } return &hugoBuilder{ r: r, s: s, - visitedURLs: types.NewEvictingStringQueue(100), + visitedURLs: visitedURLs, fullRebuildSem: semaphore.NewWeighted(1), debounce: debounce.New(4 * time.Second), onConfigLoaded: func(reloaded bool) error { @@ -120,7 +123,6 @@ func newServerCommand() *serverCommand { }, withc: func(cmd *cobra.Command, r *rootCommand) { cmd.Flags().BoolVar(&uninstall, "uninstall", false, "Uninstall the local CA (but do not delete it).") - }, }, }, @@ -219,7 +221,7 @@ func (f *fileChangeDetector) filterIrrelevant(in []string) []string { } type fileServer struct { - baseURLs []string + baseURLs []urls.BaseURL roots []string errorTemplate func(err any) (io.Reader, error) c *serverCommand @@ -255,12 +257,6 @@ func (f *fileServer) createEndpoint(i int) (*http.ServeMux, net.Listener, string r.Println("Running in Fast Render Mode. For full rebuilds on change: hugo server --disableFastRender") } - // We're only interested in the path - u, err := url.Parse(baseURL) - if err != nil { - return nil, nil, "", "", fmt.Errorf("invalid baseURL: %w", err) - } - decorate := func(h http.Handler) http.Handler { return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { if f.c.showErrorInBrowser { @@ -280,7 +276,7 @@ func (f *fileServer) createEndpoint(i int) (*http.ServeMux, net.Listener, string port = lrport } }) - lr := *u + lr := baseURL.URL() lr.Host = fmt.Sprintf("%s:%d", lr.Hostname(), port) fmt.Fprint(w, injectLiveReloadScript(r, lr)) @@ -311,7 +307,7 @@ func (f *fileServer) createEndpoint(i int) (*http.ServeMux, net.Listener, string // This matches Netlify's behaviour and is needed for SPA behaviour. // See https://docs.netlify.com/routing/redirects/rewrites-proxies/ if !redirect.Force { - path := filepath.Clean(strings.TrimPrefix(requestURI, u.Path)) + path := filepath.Clean(strings.TrimPrefix(requestURI, baseURL.Path())) if root != "" { path = filepath.Join(root, path) } @@ -338,7 +334,7 @@ func (f *fileServer) createEndpoint(i int) (*http.ServeMux, net.Listener, string switch redirect.Status { case 404: w.WriteHeader(404) - file, err := fs.Open(strings.TrimPrefix(redirect.To, u.Path)) + file, err := fs.Open(strings.TrimPrefix(redirect.To, baseURL.Path())) if err == nil { defer file.Close() io.Copy(w, file) @@ -347,7 +343,7 @@ func (f *fileServer) createEndpoint(i int) (*http.ServeMux, net.Listener, string } return case 200: - if r2 := f.rewriteRequest(r, strings.TrimPrefix(redirect.To, u.Path)); r2 != nil { + if r2 := f.rewriteRequest(r, strings.TrimPrefix(redirect.To, baseURL.Path())); r2 != nil { requestURI = redirect.To r = r2 } @@ -385,10 +381,10 @@ func (f *fileServer) createEndpoint(i int) (*http.ServeMux, net.Listener, string fileserver := decorate(http.FileServer(fs)) mu := http.NewServeMux() - if u.Path == "" || u.Path == "/" { + if baseURL.Path() == "" || baseURL.Path() == "/" { mu.Handle("/", fileserver) } else { - mu.Handle(u.Path, http.StripPrefix(u.Path, fileserver)) + mu.Handle(baseURL.Path(), http.StripPrefix(baseURL.Path(), fileserver)) } if r.IsTestRun() { var shutDownOnce sync.Once @@ -401,7 +397,7 @@ func (f *fileServer) createEndpoint(i int) (*http.ServeMux, net.Listener, string endpoint := net.JoinHostPort(f.c.serverInterface, strconv.Itoa(port)) - return mu, listener, u.String(), endpoint, nil + return mu, listener, baseURL.String(), endpoint, nil } func (f *fileServer) rewriteRequest(r *http.Request, toPath string) *http.Request { @@ -469,7 +465,6 @@ func (c *serverCommand) Name() string { } func (c *serverCommand) Run(ctx context.Context, cd *simplecobra.Commandeer, args []string) error { - // Watch runs its own server as part of the routine if c.serverWatch { @@ -676,7 +671,7 @@ func (c *serverCommand) createCertificates(conf *commonConfig) error { // Create the directory if it doesn't exist. if _, err := os.Stat(keyDir); os.IsNotExist(err) { - if err := os.MkdirAll(keyDir, 0777); err != nil { + if err := os.MkdirAll(keyDir, 0o777); err != nil { return err } } @@ -701,7 +696,6 @@ func (c *serverCommand) createCertificates(conf *commonConfig) error { // Yes, this is unfortunate, but it's currently the only way to use Mkcert as a library. os.Args = []string{"-cert-file", c.tlsCertFile, "-key-file", c.tlsKeyFile, hostname} return mclib.RunMain() - } func (c *serverCommand) verifyCert(rootPEM, certPEM []byte, name string) error { @@ -831,9 +825,9 @@ func (c *serverCommand) partialReRender(urls ...string) error { c.errState.setWasErr(false) }() c.errState.setBuildErr(nil) - visited := make(map[string]bool) + visited := types.NewEvictingStringQueue(len(urls)) for _, url := range urls { - visited[url] = true + visited.Add(url) } h, err := c.hugo() @@ -846,7 +840,7 @@ func (c *serverCommand) partialReRender(urls ...string) error { func (c *serverCommand) serve() error { var ( - baseURLs []string + baseURLs []urls.BaseURL roots []string h *hugolib.HugoSites ) @@ -863,18 +857,17 @@ func (c *serverCommand) serve() error { if isMultiHost { for _, l := range conf.configs.ConfigLangs() { - baseURLs = append(baseURLs, l.BaseURL().String()) + baseURLs = append(baseURLs, l.BaseURL()) roots = append(roots, l.Language().Lang) } } else { l := conf.configs.GetFirstLanguageConfig() - baseURLs = []string{l.BaseURL().String()} + baseURLs = []urls.BaseURL{l.BaseURL()} roots = []string{""} } return nil }) - if err != nil { return err } @@ -946,13 +939,9 @@ func (c *serverCommand) serve() error { servers = append(servers, srv) if doLiveReload { - u, err := url.Parse(helpers.SanitizeURL(baseURLs[i])) - if err != nil { - return err - } - - mu.HandleFunc(u.Path+"/livereload.js", livereload.ServeJS) - mu.HandleFunc(u.Path+"/livereload", livereload.Handler) + baseURL := baseURLs[i] + mu.HandleFunc(baseURL.Path()+"livereload.js", livereload.ServeJS) + mu.HandleFunc(baseURL.Path()+"livereload", livereload.Handler) } c.r.Printf("Web Server is available at %s (bind address %s) %s\n", serverURL, c.serverInterface, roots[i]) wg1.Go(func() error { @@ -971,8 +960,12 @@ func (c *serverCommand) serve() error { if c.r.IsTestRun() { // Write a .ready file to disk to signal ready status. // This is where the test is run from. + var baseURLs []string + for _, baseURL := range srv.baseURLs { + baseURLs = append(baseURLs, baseURL.String()) + } testInfo := map[string]any{ - "baseURLs": srv.baseURLs, + "baseURLs": baseURLs, } dir := os.Getenv("WORK") @@ -983,7 +976,7 @@ func (c *serverCommand) serve() error { if err != nil { return err } - err = os.WriteFile(readyFile, b, 0777) + err = os.WriteFile(readyFile, b, 0o777) if err != nil { return err } @@ -1167,7 +1160,7 @@ func cleanErrorLog(content string) string { return strings.Join(keep, ": ") } -func injectLiveReloadScript(src io.Reader, baseURL url.URL) string { +func injectLiveReloadScript(src io.Reader, baseURL *url.URL) string { var b bytes.Buffer chain := transform.Chain{livereloadinject.New(baseURL)} chain.Apply(&b, src) diff --git a/common/constants/constants.go b/common/constants/constants.go index 6afb9e283..e4f5a63a2 100644 --- a/common/constants/constants.go +++ b/common/constants/constants.go @@ -20,3 +20,24 @@ const ( ErrRemoteGetJSON = "error-remote-getjson" ErrRemoteGetCSV = "error-remote-getcsv" ) + +// Field/method names with special meaning. +const ( + FieldRelPermalink = "RelPermalink" + FieldPermalink = "Permalink" +) + +// IsFieldRelOrPermalink returns whether the given name is a RelPermalink or Permalink. +func IsFieldRelOrPermalink(name string) bool { + return name == FieldRelPermalink || name == FieldPermalink +} + +// Resource transformations. +const ( + ResourceTransformationFingerprint = "fingerprint" +) + +// IsResourceTransformationLinkChange returns whether the given name is a resource transformation that changes the permalink based on the content. +func IsResourceTransformationPermalinkHash(name string) bool { + return name == ResourceTransformationFingerprint +} diff --git a/common/hcontext/context.go b/common/hcontext/context.go new file mode 100644 index 000000000..9524ef284 --- /dev/null +++ b/common/hcontext/context.go @@ -0,0 +1,46 @@ +// Copyright 2024 The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package hcontext + +import "context" + +// ContextDispatcher is a generic interface for setting and getting values from a context. +type ContextDispatcher[T any] interface { + Set(ctx context.Context, value T) context.Context + Get(ctx context.Context) T +} + +// NewContextDispatcher creates a new ContextDispatcher with the given key. +func NewContextDispatcher[T any, R comparable](key R) ContextDispatcher[T] { + return keyInContext[T, R]{ + id: key, + } +} + +type keyInContext[T any, R comparable] struct { + zero T + id R +} + +func (f keyInContext[T, R]) Get(ctx context.Context) T { + v := ctx.Value(f.id) + if v == nil { + return f.zero + } + return v.(T) +} + +func (f keyInContext[T, R]) Set(ctx context.Context, value T) context.Context { + return context.WithValue(ctx, f.id, value) +} diff --git a/common/herrors/error_locator.go b/common/herrors/error_locator.go index b880fe045..1ece0cca4 100644 --- a/common/herrors/error_locator.go +++ b/common/herrors/error_locator.go @@ -1,4 +1,4 @@ -// Copyright 2022 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -74,7 +74,6 @@ func ContainsMatcher(text string) func(m LineMatcher) int { // ErrorContext contains contextual information about an error. This will // typically be the lines surrounding some problem in a file. type ErrorContext struct { - // If a match will contain the matched line and up to 2 lines before and after. // Will be empty if no match. Lines []string diff --git a/common/herrors/error_locator_test.go b/common/herrors/error_locator_test.go index 6135657d8..62f15213d 100644 --- a/common/herrors/error_locator_test.go +++ b/common/herrors/error_locator_test.go @@ -1,4 +1,4 @@ -// Copyright 2022 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/common/herrors/errors.go b/common/herrors/errors.go index 8e62b2c99..59739a86a 100644 --- a/common/herrors/errors.go +++ b/common/herrors/errors.go @@ -1,4 +1,4 @@ -// Copyright 2022 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -21,6 +21,7 @@ import ( "os" "runtime" "runtime/debug" + "time" ) // PrintStackTrace prints the current stacktrace to w. @@ -47,6 +48,24 @@ func Recover(args ...any) { } } +// IsTimeoutError returns true if the given error is or contains a TimeoutError. +func IsTimeoutError(err error) bool { + return errors.Is(err, &TimeoutError{}) +} + +type TimeoutError struct { + Duration time.Duration +} + +func (e *TimeoutError) Error() string { + return fmt.Sprintf("timeout after %s", e.Duration) +} + +func (e *TimeoutError) Is(target error) bool { + _, ok := target.(*TimeoutError) + return ok +} + // IsFeatureNotAvailableError returns true if the given error is or contains a FeatureNotAvailableError. func IsFeatureNotAvailableError(err error) bool { return errors.Is(err, &FeatureNotAvailableError{}) diff --git a/common/herrors/errors_test.go b/common/herrors/errors_test.go index 223782e23..2f53a1e89 100644 --- a/common/herrors/errors_test.go +++ b/common/herrors/errors_test.go @@ -1,4 +1,4 @@ -// Copyright 2022 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -42,5 +42,4 @@ func TestIsFeatureNotAvailableError(t *testing.T) { c.Assert(IsFeatureNotAvailableError(ErrFeatureNotAvailable), qt.Equals, true) c.Assert(IsFeatureNotAvailableError(&FeatureNotAvailableError{}), qt.Equals, true) c.Assert(IsFeatureNotAvailableError(errors.New("asdf")), qt.Equals, false) - } diff --git a/common/herrors/file_error.go b/common/herrors/file_error.go index f8bcecd34..32a6f0081 100644 --- a/common/herrors/file_error.go +++ b/common/herrors/file_error.go @@ -1,4 +1,4 @@ -// Copyright 2022 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -15,13 +15,13 @@ package herrors import ( "encoding/json" - - godartsassv1 "github.com/bep/godartsass" - + "errors" "fmt" "io" "path/filepath" + godartsassv1 "github.com/bep/godartsass" + "github.com/bep/godartsass/v2" "github.com/bep/golibsass/libsass/libsasserrors" "github.com/gohugoio/hugo/common/paths" @@ -29,8 +29,6 @@ import ( "github.com/pelletier/go-toml/v2" "github.com/spf13/afero" "github.com/tdewolff/parse/v2" - - "errors" ) // FileError represents an error when handling a file: Parsing a config file, @@ -48,6 +46,9 @@ type FileError interface { // UpdateContent updates the error with a new ErrorContext from the content of the file. UpdateContent(r io.Reader, linematcher LineMatcherFn) FileError + + // SetFilename sets the filename of the error. + SetFilename(filename string) FileError } // Unwrapper can unwrap errors created with fmt.Errorf. @@ -60,6 +61,11 @@ var ( _ Unwrapper = (*fileError)(nil) ) +func (fe *fileError) SetFilename(filename string) FileError { + fe.position.Filename = filename + return fe +} + func (fe *fileError) UpdatePosition(pos text.Position) FileError { oldFilename := fe.Position().Filename if pos.Filename != "" && fe.fileType == "" { @@ -115,7 +121,6 @@ func (fe *fileError) UpdateContent(r io.Reader, linematcher LineMatcherFn) FileE } return fe - } type fileError struct { @@ -181,7 +186,6 @@ func NewFileErrorFromName(err error, name string) FileError { } return &fileError{cause: err, fileType: fileType, position: pos} - } // NewFileErrorFromPos will use the filename and line number from pos to create a new FileError, wrapping err. @@ -192,7 +196,6 @@ func NewFileErrorFromPos(err error, pos text.Position) FileError { _, fileType = paths.FileAndExtNoDelimiter(filepath.Clean(pos.Filename)) } return &fileError{cause: err, fileType: fileType, position: pos} - } func NewFileErrorFromFileInErr(err error, fs afero.Fs, linematcher LineMatcherFn) FileError { @@ -249,7 +252,6 @@ func openFile(filename string, fs afero.Fs) (afero.File, string, error) { }); ok { realFilename = s.Filename() } - } f, err2 := fs.Open(filename) diff --git a/common/herrors/file_error_test.go b/common/herrors/file_error_test.go index 0b260a255..7aca08405 100644 --- a/common/herrors/file_error_test.go +++ b/common/herrors/file_error_test.go @@ -1,4 +1,4 @@ -// Copyright 2022 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -14,12 +14,11 @@ package herrors import ( + "errors" "fmt" "strings" "testing" - "errors" - "github.com/gohugoio/hugo/common/text" qt "github.com/frankban/quicktest" @@ -48,7 +47,6 @@ func TestNewFileError(t *testing.T) { c.Assert(errorContext.Lines, qt.DeepEquals, []string{"line 30", "line 31", "line 32", "line 33", "line 34"}) c.Assert(errorContext.LinesPos, qt.Equals, 2) c.Assert(errorContext.ChromaLexer, qt.Equals, "go-html-template") - } func TestNewFileErrorExtractFromMessage(t *testing.T) { diff --git a/common/hreflect/helpers.go b/common/hreflect/helpers.go index 17afbf912..b5a8bacc9 100644 --- a/common/hreflect/helpers.go +++ b/common/hreflect/helpers.go @@ -1,4 +1,4 @@ -// Copyright 2019 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // Some functions in this file (see comments) is based on the Go source code, // copyright The Go Authors and governed by a BSD-style license. // @@ -23,6 +23,7 @@ import ( "time" "github.com/gohugoio/hugo/common/htime" + "github.com/gohugoio/hugo/common/maps" "github.com/gohugoio/hugo/common/types" ) @@ -188,6 +189,20 @@ func IsTime(tp reflect.Type) bool { return false } +// IsValid returns whether v is not nil and a valid value. +func IsValid(v reflect.Value) bool { + if !v.IsValid() { + return false + } + + switch v.Kind() { + case reflect.Chan, reflect.Func, reflect.Interface, reflect.Map, reflect.Ptr, reflect.Slice: + return !v.IsNil() + } + + return true +} + // AsTime returns v as a time.Time if possible. // The given location is only used if the value implements AsTimeProvider (e.g. go-toml local). // A zero Time and false is returned if this isn't possible. @@ -217,7 +232,7 @@ func CallMethodByName(cxt context.Context, name string, v reflect.Value) []refle panic("not supported") } first := tp.In(0) - if first.Implements(ContextInterface) { + if IsContextType(first) { args = append(args, reflect.ValueOf(cxt)) } } @@ -236,4 +251,24 @@ func indirectInterface(v reflect.Value) reflect.Value { return v.Elem() } -var ContextInterface = reflect.TypeOf((*context.Context)(nil)).Elem() +var contextInterface = reflect.TypeOf((*context.Context)(nil)).Elem() + +var isContextCache = maps.NewCache[reflect.Type, bool]() + +type k string + +var contextTypeValue = reflect.TypeOf(context.WithValue(context.Background(), k("key"), 32)) + +// IsContextType returns whether tp is a context.Context type. +func IsContextType(tp reflect.Type) bool { + if tp == contextTypeValue { + return true + } + if tp == contextInterface { + return true + } + + return isContextCache.GetOrCreate(tp, func() bool { + return tp.Implements(contextInterface) + }) +} diff --git a/common/hreflect/helpers_test.go b/common/hreflect/helpers_test.go index d16b9b9b3..27b774337 100644 --- a/common/hreflect/helpers_test.go +++ b/common/hreflect/helpers_test.go @@ -14,6 +14,7 @@ package hreflect import ( + "context" "reflect" "testing" "time" @@ -40,6 +41,42 @@ func TestGetMethodByName(t *testing.T) { c.Assert(GetMethodIndexByName(tp, "Foo"), qt.Equals, -1) } +func TestIsContextType(t *testing.T) { + c := qt.New(t) + type k string + ctx := context.Background() + valueCtx := context.WithValue(ctx, k("key"), 32) + c.Assert(IsContextType(reflect.TypeOf(ctx)), qt.IsTrue) + c.Assert(IsContextType(reflect.TypeOf(valueCtx)), qt.IsTrue) +} + +func BenchmarkIsContextType(b *testing.B) { + type k string + b.Run("value", func(b *testing.B) { + ctx := context.Background() + ctxs := make([]reflect.Type, b.N) + for i := 0; i < b.N; i++ { + ctxs[i] = reflect.TypeOf(context.WithValue(ctx, k("key"), i)) + } + + b.ResetTimer() + for i := 0; i < b.N; i++ { + if !IsContextType(ctxs[i]) { + b.Fatal("not context") + } + } + }) + + b.Run("background", func(b *testing.B) { + var ctxt reflect.Type = reflect.TypeOf(context.Background()) + for i := 0; i < b.N; i++ { + if !IsContextType(ctxt) { + b.Fatal("not context") + } + } + }) +} + func BenchmarkIsTruthFul(b *testing.B) { v := reflect.ValueOf("Hugo") diff --git a/common/hstrings/strings.go b/common/hstrings/strings.go index 88df97607..d9426ab5d 100644 --- a/common/hstrings/strings.go +++ b/common/hstrings/strings.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -122,3 +122,8 @@ func InSlicEqualFold(arr []string, el string) bool { } return false } + +type Tuple struct { + First string + Second string +} diff --git a/common/hstrings/strings_test.go b/common/hstrings/strings_test.go index 85068bdf9..d8e9e204a 100644 --- a/common/hstrings/strings_test.go +++ b/common/hstrings/strings_test.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -33,7 +33,6 @@ func TestStringEqualFold(t *testing.T) { c.Assert(StringEqualFold(s1).EqualFold("b"), qt.Equals, false) c.Assert(StringEqualFold(s1).Eq(s2), qt.Equals, true) c.Assert(StringEqualFold(s1).Eq("b"), qt.Equals, false) - } func TestGetOrCompileRegexp(t *testing.T) { @@ -42,7 +41,6 @@ func TestGetOrCompileRegexp(t *testing.T) { re, err := GetOrCompileRegexp(`\d+`) c.Assert(err, qt.IsNil) c.Assert(re.MatchString("123"), qt.Equals, true) - } func BenchmarkGetOrCompileRegexp(b *testing.B) { diff --git a/common/htime/integration_test.go b/common/htime/integration_test.go index e72c216d9..983fff1f7 100644 --- a/common/htime/integration_test.go +++ b/common/htime/integration_test.go @@ -1,4 +1,4 @@ -// Copyright 2022 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/common/hugio/copy.go b/common/hugio/copy.go index 8dbadc48c..31d679dfc 100644 --- a/common/hugio/copy.go +++ b/common/hugio/copy.go @@ -16,6 +16,7 @@ package hugio import ( "fmt" "io" + iofs "io/fs" "path/filepath" "github.com/spf13/afero" @@ -60,12 +61,16 @@ func CopyDir(fs afero.Fs, from, to string, shouldCopy func(filename string) bool return fmt.Errorf("%q is not a directory", from) } - err = fs.MkdirAll(to, 0777) // before umask + err = fs.MkdirAll(to, 0o777) // before umask if err != nil { return err } - entries, _ := afero.ReadDir(fs, from) + d, err := fs.Open(from) + if err != nil { + return err + } + entries, _ := d.(iofs.ReadDirFile).ReadDir(-1) for _, entry := range entries { fromFilename := filepath.Join(from, entry.Name()) toFilename := filepath.Join(to, entry.Name()) diff --git a/common/hugio/hasBytesWriter.go b/common/hugio/hasBytesWriter.go index 7b7d7a5d7..5148c82f9 100644 --- a/common/hugio/hasBytesWriter.go +++ b/common/hugio/hasBytesWriter.go @@ -1,4 +1,4 @@ -// Copyright 2022 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/common/hugio/hasBytesWriter_test.go b/common/hugio/hasBytesWriter_test.go index b1b8011d5..af53fa5dd 100644 --- a/common/hugio/hasBytesWriter_test.go +++ b/common/hugio/hasBytesWriter_test.go @@ -1,4 +1,4 @@ -// Copyright 2022 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/common/hugio/readers.go b/common/hugio/readers.go index 60bd97992..feb1b1412 100644 --- a/common/hugio/readers.go +++ b/common/hugio/readers.go @@ -14,6 +14,7 @@ package hugio import ( + "bytes" "io" "strings" ) @@ -57,3 +58,22 @@ func NewReadSeekerNoOpCloser(r ReadSeeker) ReadSeekerNoOpCloser { func NewReadSeekerNoOpCloserFromString(content string) ReadSeekerNoOpCloser { return ReadSeekerNoOpCloser{strings.NewReader(content)} } + +// NewReadSeekerNoOpCloserFromString uses strings.NewReader to create a new ReadSeekerNoOpCloser +// from the given bytes slice. +func NewReadSeekerNoOpCloserFromBytes(content []byte) ReadSeekerNoOpCloser { + return ReadSeekerNoOpCloser{bytes.NewReader(content)} +} + +// NewReadSeekCloser creates a new ReadSeekCloser from the given ReadSeeker. +// The ReadSeeker will be seeked to the beginning before returned. +func NewOpenReadSeekCloser(r ReadSeekCloser) OpenReadSeekCloser { + return func() (ReadSeekCloser, error) { + r.Seek(0, io.SeekStart) + return r, nil + } +} + +// OpenReadSeekCloser allows setting some other way (than reading from a filesystem) +// to open or create a ReadSeekCloser. +type OpenReadSeekCloser func() (ReadSeekCloser, error) diff --git a/common/hugo/hugo.go b/common/hugo/hugo.go index 67d52f6c8..be43e2a38 100644 --- a/common/hugo/hugo.go +++ b/common/hugo/hugo.go @@ -35,6 +35,8 @@ import ( "github.com/spf13/afero" + iofs "io/fs" + "github.com/gohugoio/hugo/config" "github.com/gohugoio/hugo/hugofs" ) @@ -159,7 +161,12 @@ func GetExecEnviron(workDir string, cfg config.AllProvider, fs afero.Fs) []strin config.SetEnvVars(&env, "HUGO_PUBLISHDIR", filepath.Join(workDir, cfg.BaseConfig().PublishDir)) if fs != nil { - fis, err := afero.ReadDir(fs, files.FolderJSConfig) + var fis []iofs.DirEntry + d, err := fs.Open(files.FolderJSConfig) + if err == nil { + fis, err = d.(iofs.ReadDirFile).ReadDir(-1) + } + if err == nil { for _, fi := range fis { key := fmt.Sprintf("HUGO_FILE_%s", strings.ReplaceAll(strings.ToUpper(fi.Name()), ".", "_")) diff --git a/common/loggers/handlerdefault.go b/common/loggers/handlerdefault.go index bb48895bc..bc3c7eec2 100644 --- a/common/loggers/handlerdefault.go +++ b/common/loggers/handlerdefault.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // Some functions in this file (see comments) is based on the Go source code, // copyright The Go Authors and governed by a BSD-style license. // @@ -27,10 +27,9 @@ import ( "github.com/fatih/color" ) -var bold = color.New(color.Bold) - // levelColor mapping. var levelColor = [...]*color.Color{ + logg.LevelTrace: color.New(color.FgWhite), logg.LevelDebug: color.New(color.FgWhite), logg.LevelInfo: color.New(color.FgBlue), logg.LevelWarn: color.New(color.FgYellow), @@ -39,6 +38,7 @@ var levelColor = [...]*color.Color{ // levelString mapping. var levelString = [...]string{ + logg.LevelTrace: "TRACE", logg.LevelDebug: "DEBUG", logg.LevelInfo: "INFO ", logg.LevelWarn: "WARN ", diff --git a/common/loggers/handlersmisc.go b/common/loggers/handlersmisc.go index 5c9d6c091..55bf8b940 100644 --- a/common/loggers/handlersmisc.go +++ b/common/loggers/handlersmisc.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // Some functions in this file (see comments) is based on the Go source code, // copyright The Go Authors and governed by a BSD-style license. // @@ -69,7 +69,7 @@ func (h *logLevelCounter) HandleLog(e *logg.Entry) error { return nil } -var stopError = fmt.Errorf("stop") +var errStop = fmt.Errorf("stop") type logOnceHandler struct { threshold logg.Level @@ -87,7 +87,7 @@ func (h *logOnceHandler) HandleLog(e *logg.Entry) error { defer h.mu.Unlock() hash := identity.HashUint64(e.Level, e.Message, e.Fields) if h.seen[hash] { - return stopError + return errStop } h.seen[hash] = true return nil @@ -107,7 +107,7 @@ type stopHandler struct { func (h *stopHandler) HandleLog(e *logg.Entry) error { for _, handler := range h.handlers { if err := handler.HandleLog(e); err != nil { - if err == stopError { + if err == errStop { return nil } return err @@ -124,26 +124,13 @@ func (h *suppressStatementsHandler) HandleLog(e *logg.Entry) error { for _, field := range e.Fields { if field.Name == FieldNameStatementID { if h.statements[field.Value.(string)] { - return stopError + return errStop } } } return nil } -// replacer creates a new log handler that does string replacement in log messages. -func replacer(repl *strings.Replacer) logg.Handler { - return logg.HandlerFunc(func(e *logg.Entry) error { - e.Message = repl.Replace(e.Message) - for i, field := range e.Fields { - if s, ok := field.Value.(string); ok { - e.Fields[i].Value = repl.Replace(s) - } - } - return nil - }) -} - // whiteSpaceTrimmer creates a new log handler that trims whitespace from log messages and string fields. func whiteSpaceTrimmer() logg.Handler { return logg.HandlerFunc(func(e *logg.Entry) error { diff --git a/common/loggers/handlerterminal.go b/common/loggers/handlerterminal.go index e3d377bbf..53f6e41da 100644 --- a/common/loggers/handlerterminal.go +++ b/common/loggers/handlerterminal.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // Some functions in this file (see comments) is based on the Go source code, // copyright The Go Authors and governed by a BSD-style license. // @@ -81,7 +81,7 @@ func (h *noColoursHandler) HandleLog(e *logg.Entry) error { if strings.HasPrefix(field.Name, reservedFieldNamePrefix) { continue } - fmt.Fprintf(w, " %s %q", field.Name, field.Value) + fmt.Fprintf(w, " %s %v", field.Name, field.Value) } fmt.Fprintln(w) diff --git a/common/loggers/logger.go b/common/loggers/logger.go index bc64ae0e5..c4d81fb83 100644 --- a/common/loggers/logger.go +++ b/common/loggers/logger.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // Some functions in this file (see comments) is based on the Go source code, // copyright The Go Authors and governed by a BSD-style license. // @@ -68,11 +68,24 @@ func New(opts Options) Logger { errorsw := &strings.Builder{} logCounters := newLogLevelCounter() handlers := []logg.Handler{ - whiteSpaceTrimmer(), - logHandler, logCounters, } + if opts.Level == logg.LevelTrace { + // Trace is used during development only, and it's useful to + // only see the trace messages. + handlers = append(handlers, + logg.HandlerFunc(func(e *logg.Entry) error { + if e.Level != logg.LevelTrace { + return logg.ErrStopLogEntry + } + return nil + }), + ) + } + + handlers = append(handlers, whiteSpaceTrimmer(), logHandler) + if opts.HandlerPost != nil { var hookHandler logg.HandlerFunc = func(e *logg.Entry) error { opts.HandlerPost(e) @@ -127,6 +140,7 @@ func New(opts Options) Logger { out: opts.Stdout, level: opts.Level, logger: logger, + tracel: l.WithLevel(logg.LevelTrace), debugl: l.WithLevel(logg.LevelDebug), infol: l.WithLevel(logg.LevelInfo), warnl: l.WithLevel(logg.LevelWarn), @@ -145,11 +159,22 @@ func NewDefault() Logger { return New(opts) } +func NewTrace() Logger { + opts := Options{ + DistinctLevel: logg.LevelWarn, + Level: logg.LevelTrace, + Stdout: os.Stdout, + Stderr: os.Stdout, + } + return New(opts) +} + func LevelLoggerToWriter(l logg.LevelLogger) io.Writer { return logWriter{l: l} } type Logger interface { + Debug() logg.LevelLogger Debugf(format string, v ...any) Debugln(v ...any) Error() logg.LevelLogger @@ -174,6 +199,7 @@ type Logger interface { Warnf(format string, v ...any) Warnln(v ...any) Deprecatef(fail bool, format string, v ...any) + Trace(s logg.StringFunc) } type logAdapter struct { @@ -183,12 +209,17 @@ type logAdapter struct { out io.Writer level logg.Level logger logg.Logger + tracel logg.LevelLogger debugl logg.LevelLogger infol logg.LevelLogger warnl logg.LevelLogger errorl logg.LevelLogger } +func (l *logAdapter) Debug() logg.LevelLogger { + return l.debugl +} + func (l *logAdapter) Debugf(format string, v ...any) { l.debugl.Logf(format, v...) } @@ -294,6 +325,10 @@ func (l *logAdapter) Errorsf(id, format string, v ...any) { l.errorl.WithField(FieldNameStatementID, id).Logf(format, v...) } +func (l *logAdapter) Trace(s logg.StringFunc) { + l.tracel.Log(s) +} + func (l *logAdapter) sprint(v ...any) string { return strings.TrimRight(fmt.Sprintln(v...), "\n") } @@ -315,3 +350,19 @@ func (w logWriter) Write(p []byte) (n int, err error) { w.l.Log(logg.String(string(p))) return len(p), nil } + +func TimeTrackf(l logg.LevelLogger, start time.Time, fields logg.Fields, format string, a ...any) { + elapsed := time.Since(start) + if fields != nil { + l = l.WithFields(fields) + } + l.WithField("duration", elapsed).Logf(format, a...) +} + +func TimeTrackfn(fn func() (logg.LevelLogger, error)) error { + start := time.Now() + l, err := fn() + elapsed := time.Since(start) + l.WithField("duration", elapsed).Logf("") + return err +} diff --git a/common/loggers/logger_test.go b/common/loggers/logger_test.go index 6f589aafe..dcf94b123 100644 --- a/common/loggers/logger_test.go +++ b/common/loggers/logger_test.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // Some functions in this file (see comments) is based on the Go source code, // copyright The Go Authors and governed by a BSD-style license. // diff --git a/common/loggers/loggerglobal.go b/common/loggers/loggerglobal.go index 6fd474a69..c3e2970d0 100644 --- a/common/loggers/loggerglobal.go +++ b/common/loggers/loggerglobal.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // Some functions in this file (see comments) is based on the Go source code, // copyright The Go Authors and governed by a BSD-style license. // diff --git a/common/maps/cache.go b/common/maps/cache.go new file mode 100644 index 000000000..7e23a2662 --- /dev/null +++ b/common/maps/cache.go @@ -0,0 +1,90 @@ +// Copyright 2024 The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package maps + +import "sync" + +// Cache is a simple thread safe cache backed by a map. +type Cache[K comparable, T any] struct { + m map[K]T + sync.RWMutex +} + +// NewCache creates a new Cache. +func NewCache[K comparable, T any]() *Cache[K, T] { + return &Cache[K, T]{m: make(map[K]T)} +} + +// Delete deletes the given key from the cache. +func (c *Cache[K, T]) Get(key K) (T, bool) { + c.RLock() + v, found := c.m[key] + c.RUnlock() + return v, found +} + +// GetOrCreate gets the value for the given key if it exists, or creates it if not. +func (c *Cache[K, T]) GetOrCreate(key K, create func() T) T { + c.RLock() + v, found := c.m[key] + c.RUnlock() + if found { + return v + } + c.Lock() + defer c.Unlock() + v, found = c.m[key] + if found { + return v + } + v = create() + c.m[key] = v + return v +} + +// Set sets the given key to the given value. +func (c *Cache[K, T]) Set(key K, value T) { + c.Lock() + c.m[key] = value + c.Unlock() +} + +// SliceCache is a simple thread safe cache backed by a map. +type SliceCache[T any] struct { + m map[string][]T + sync.RWMutex +} + +func NewSliceCache[T any]() *SliceCache[T] { + return &SliceCache[T]{m: make(map[string][]T)} +} + +func (c *SliceCache[T]) Get(key string) ([]T, bool) { + c.RLock() + v, found := c.m[key] + c.RUnlock() + return v, found +} + +func (c *SliceCache[T]) Append(key string, values ...T) { + c.Lock() + c.m[key] = append(c.m[key], values...) + c.Unlock() +} + +func (c *SliceCache[T]) Reset() { + c.Lock() + c.m = make(map[string][]T) + c.Unlock() +} diff --git a/common/maps/maps.go b/common/maps/maps.go index f0fd3d5ce..2686baad6 100644 --- a/common/maps/maps.go +++ b/common/maps/maps.go @@ -29,7 +29,7 @@ func ToStringMapE(in any) (map[string]any, error) { case Params: return vv, nil case map[string]string: - var m = map[string]any{} + m := map[string]any{} for k, v := range vv { m[k] = v } @@ -192,21 +192,20 @@ func (KeyRenamer) keyPath(k1, k2 string) string { } func (r KeyRenamer) renamePath(parentKeyPath string, m map[string]any) { - for key, val := range m { - keyPath := r.keyPath(parentKeyPath, key) - switch val.(type) { + for k, v := range m { + keyPath := r.keyPath(parentKeyPath, k) + switch vv := v.(type) { case map[any]any: - val = cast.ToStringMap(val) - r.renamePath(keyPath, val.(map[string]any)) + r.renamePath(keyPath, cast.ToStringMap(vv)) case map[string]any: - r.renamePath(keyPath, val.(map[string]any)) + r.renamePath(keyPath, vv) } newKey := r.getNewKey(keyPath) if newKey != "" { - delete(m, key) - m[newKey] = val + delete(m, k) + m[newKey] = v } } } diff --git a/common/maps/params.go b/common/maps/params.go index d94d16f9d..a8cbba555 100644 --- a/common/maps/params.go +++ b/common/maps/params.go @@ -61,7 +61,7 @@ func SetParams(dst, src Params) { // IsZero returns true if p is considered empty. func (p Params) IsZero() bool { - if p == nil || len(p) == 0 { + if len(p) == 0 { return true } @@ -74,7 +74,6 @@ func (p Params) IsZero() bool { } return false - } // MergeParamsWithStrategy transfers values from src to dst for new keys using the merge strategy given. @@ -93,7 +92,7 @@ func MergeParams(dst, src Params) { func (p Params) merge(ps ParamsMergeStrategy, pp Params) { ns, found := p.GetMergeStrategy() - var ms = ns + ms := ns if !found && ps != "" { ms = ps } @@ -248,7 +247,7 @@ const ( // CleanConfigStringMapString removes any processing instructions from m, // m will never be modified. func CleanConfigStringMapString(m map[string]string) map[string]string { - if m == nil || len(m) == 0 { + if len(m) == 0 { return m } if _, found := m[MergeStrategyKey]; !found { @@ -267,7 +266,7 @@ func CleanConfigStringMapString(m map[string]string) map[string]string { // CleanConfigStringMap is the same as CleanConfigStringMapString but for // map[string]any. func CleanConfigStringMap(m map[string]any) map[string]any { - if m == nil || len(m) == 0 { + if len(m) == 0 { return m } if _, found := m[MergeStrategyKey]; !found { @@ -291,7 +290,6 @@ func CleanConfigStringMap(m map[string]any) map[string]any { } return m2 - } func toMergeStrategy(v any) ParamsMergeStrategy { diff --git a/common/paths/path.go b/common/paths/path.go index 5d211c5e0..da99b16ac 100644 --- a/common/paths/path.go +++ b/common/paths/path.go @@ -16,14 +16,18 @@ package paths import ( "errors" "fmt" + "net/url" "path" "path/filepath" - "regexp" "strings" + "unicode" ) // FilePathSeparator as defined by os.Separator. -const FilePathSeparator = string(filepath.Separator) +const ( + FilePathSeparator = string(filepath.Separator) + slash = "/" +) // filepathPathBridge is a bridge for common functionality in filepath vs path type filepathPathBridge interface { @@ -72,6 +76,30 @@ func AbsPathify(workingDir, inPath string) string { return filepath.Join(workingDir, inPath) } +// AddTrailingSlash adds a trailing Unix styled slash (/) if not already +// there. +func AddTrailingSlash(path string) string { + if !strings.HasSuffix(path, "/") { + path += "/" + } + return path +} + +// AddLeadingSlash adds a leading Unix styled slash (/) if not already +// there. +func AddLeadingSlash(path string) string { + if !strings.HasPrefix(path, "/") { + path = "/" + path + } + return path +} + +// AddTrailingAndLeadingSlash adds a leading and trailing Unix styled slash (/) if not already +// there. +func AddLeadingAndTrailingSlash(path string) string { + return AddTrailingSlash(AddLeadingSlash(path)) +} + // MakeTitle converts the path given to a suitable title, trimming whitespace // and replacing hyphens with whitespace. func MakeTitle(inpath string) string { @@ -94,43 +122,6 @@ func makePathRelative(inPath string, possibleDirectories ...string) (string, err return inPath, errors.New("can't extract relative path, unknown prefix") } -// Should be good enough for Hugo. -var isFileRe = regexp.MustCompile(`.*\..{1,6}$`) - -// GetDottedRelativePath expects a relative path starting after the content directory. -// It returns a relative path with dots ("..") navigating up the path structure. -func GetDottedRelativePath(inPath string) string { - inPath = path.Clean(filepath.ToSlash(inPath)) - - if inPath == "." { - return "./" - } - - if !isFileRe.MatchString(inPath) && !strings.HasSuffix(inPath, "/") { - inPath += "/" - } - - if !strings.HasPrefix(inPath, "/") { - inPath = "/" + inPath - } - - dir, _ := filepath.Split(inPath) - - sectionCount := strings.Count(dir, "/") - - if sectionCount == 0 || dir == "/" { - return "./" - } - - var dottedPath string - - for i := 1; i < sectionCount; i++ { - dottedPath += "../" - } - - return dottedPath -} - // ExtNoDelimiter takes a path and returns the extension, excluding the delimiter, i.e. "md". func ExtNoDelimiter(in string) string { return strings.TrimPrefix(Ext(in), ".") @@ -167,12 +158,6 @@ func Filename(in string) (name string) { return } -// PathNoExt takes a path, strips out the extension, -// and returns the name of the file. -func PathNoExt(in string) string { - return strings.TrimSuffix(in, path.Ext(in)) -} - // FileAndExt returns the filename and any extension of a file path as // two separate strings. // @@ -252,16 +237,125 @@ func prettifyPath(in string, b filepathPathBridge) string { return b.Join(b.Dir(in), name, "index"+ext) } -type NamedSlice struct { - Name string - Slice []string +// CommonDir returns the common directory of the given paths. +func CommonDir(path1, path2 string) string { + if path1 == "" || path2 == "" { + return "" + } + + p1 := strings.Split(path1, "/") + p2 := strings.Split(path2, "/") + + var common []string + + for i := 0; i < len(p1) && i < len(p2); i++ { + if p1[i] == p2[i] { + common = append(common, p1[i]) + } else { + break + } + } + + return strings.Join(common, "/") } -func (n NamedSlice) String() string { - if len(n.Slice) == 0 { - return n.Name +// Sanitize sanitizes string to be used in Hugo's file paths and URLs, allowing only +// a predefined set of special Unicode characters. +// +// Spaces will be replaced with a single hyphen. +// +// This function is the core function used to normalize paths in Hugo. +// +// Note that this is the first common step for URL/path sanitation, +// the final URL/path may end up looking differently if the user has stricter rules defined (e.g. removePathAccents=true). +func Sanitize(s string) string { + var willChange bool + for i, r := range s { + willChange = !isAllowedPathCharacter(s, i, r) + if willChange { + break + } } - return fmt.Sprintf("%s%s{%s}", n.Name, FilePathSeparator, strings.Join(n.Slice, ",")) + + if !willChange { + // Prevent allocation when nothing changes. + return s + } + + target := make([]rune, 0, len(s)) + var ( + prependHyphen bool + wasHyphen bool + ) + + for i, r := range s { + isAllowed := isAllowedPathCharacter(s, i, r) + + if isAllowed { + // track explicit hyphen in input; no need to add a new hyphen if + // we just saw one. + wasHyphen = r == '-' + + if prependHyphen { + // if currently have a hyphen, don't prepend an extra one + if !wasHyphen { + target = append(target, '-') + } + prependHyphen = false + } + target = append(target, r) + } else if len(target) > 0 && !wasHyphen && unicode.IsSpace(r) { + prependHyphen = true + } + } + + return string(target) +} + +func isAllowedPathCharacter(s string, i int, r rune) bool { + if r == ' ' { + return false + } + // Check for the most likely first (faster). + isAllowed := unicode.IsLetter(r) || unicode.IsDigit(r) + isAllowed = isAllowed || r == '.' || r == '/' || r == '\\' || r == '_' || r == '#' || r == '+' || r == '~' || r == '-' || r == '@' + isAllowed = isAllowed || unicode.IsMark(r) + isAllowed = isAllowed || (r == '%' && i+2 < len(s) && ishex(s[i+1]) && ishex(s[i+2])) + return isAllowed +} + +// From https://golang.org/src/net/url/url.go +func ishex(c byte) bool { + switch { + case '0' <= c && c <= '9': + return true + case 'a' <= c && c <= 'f': + return true + case 'A' <= c && c <= 'F': + return true + } + return false +} + +var slashFunc = func(r rune) bool { + return r == '/' +} + +// Dir behaves like path.Dir without the path.Clean step. +// +// The returned path ends in a slash only if it is the root "/". +func Dir(s string) string { + dir, _ := path.Split(s) + if len(dir) > 1 && dir[len(dir)-1] == '/' { + return dir[:len(dir)-1] + } + return dir +} + +// FieldsSlash cuts s into fields separated with '/'. +func FieldsSlash(s string) []string { + f := strings.FieldsFunc(s, slashFunc) + return f } // DirFile holds the result from path.Split. @@ -274,3 +368,27 @@ type DirFile struct { func (df DirFile) String() string { return fmt.Sprintf("%s|%s", df.Dir, df.File) } + +// PathEscape escapes unicode letters in pth. +// Use URLEscape to escape full URLs including scheme, query etc. +// This is slightly faster for the common case. +// Note, there is a url.PathEscape function, but that also +// escapes /. +func PathEscape(pth string) string { + u, err := url.Parse(pth) + if err != nil { + panic(err) + } + return u.EscapedPath() +} + +// ToSlashTrimLeading is just a filepath.ToSlash with an added / prefix trimmer. +func ToSlashTrimLeading(s string) string { + return strings.TrimPrefix(filepath.ToSlash(s), "/") +} + +// ToSlashPreserveLeading converts the path given to a forward slash separated path +// and preserves the leading slash if present trimming any trailing slash. +func ToSlashPreserveLeading(s string) string { + return "/" + strings.Trim(filepath.ToSlash(s), "/") +} diff --git a/common/paths/path_test.go b/common/paths/path_test.go index 2400f16ab..3605bfc43 100644 --- a/common/paths/path_test.go +++ b/common/paths/path_test.go @@ -1,4 +1,4 @@ -// Copyright 2021 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -75,44 +75,6 @@ func TestMakePathRelative(t *testing.T) { } } -func TestGetDottedRelativePath(t *testing.T) { - // on Windows this will receive both kinds, both country and western ... - for _, f := range []func(string) string{filepath.FromSlash, func(s string) string { return s }} { - doTestGetDottedRelativePath(f, t) - } -} - -func doTestGetDottedRelativePath(urlFixer func(string) string, t *testing.T) { - type test struct { - input, expected string - } - data := []test{ - {"", "./"}, - {urlFixer("/"), "./"}, - {urlFixer("post"), "../"}, - {urlFixer("/post"), "../"}, - {urlFixer("post/"), "../"}, - {urlFixer("tags/foo.html"), "../"}, - {urlFixer("/tags/foo.html"), "../"}, - {urlFixer("/post/"), "../"}, - {urlFixer("////post/////"), "../"}, - {urlFixer("/foo/bar/index.html"), "../../"}, - {urlFixer("/foo/bar/foo/"), "../../../"}, - {urlFixer("/foo/bar/foo"), "../../../"}, - {urlFixer("foo/bar/foo/"), "../../../"}, - {urlFixer("foo/bar/foo/bar"), "../../../../"}, - {"404.html", "./"}, - {"404.xml", "./"}, - {"/404.html", "./"}, - } - for i, d := range data { - output := GetDottedRelativePath(d.input) - if d.expected != output { - t.Errorf("Test %d failed. Expected %q got %q", i, d.expected, output) - } - } -} - func TestMakeTitle(t *testing.T) { type test struct { input, expected string @@ -226,3 +188,77 @@ func TestFileAndExt(t *testing.T) { } } } + +func TestSanitize(t *testing.T) { + c := qt.New(t) + tests := []struct { + input string + expected string + }{ + {" Foo bar ", "Foo-bar"}, + {"Foo.Bar/foo_Bar-Foo", "Foo.Bar/foo_Bar-Foo"}, + {"fOO,bar:foobAR", "fOObarfoobAR"}, + {"FOo/BaR.html", "FOo/BaR.html"}, + {"FOo/Ba---R.html", "FOo/Ba---R.html"}, /// See #10104 + {"FOo/Ba R.html", "FOo/Ba-R.html"}, + {"трям/трям", "трям/трям"}, + {"은행", "은행"}, + {"Банковский кассир", "Банковский-кассир"}, + // Issue #1488 + {"संस्कृत", "संस्कृत"}, + {"a%C3%B1ame", "a%C3%B1ame"}, // Issue #1292 + {"this+is+a+test", "this+is+a+test"}, // Issue #1290 + {"~foo", "~foo"}, // Issue #2177 + + } + + for _, test := range tests { + c.Assert(Sanitize(test.input), qt.Equals, test.expected) + } +} + +func BenchmarkSanitize(b *testing.B) { + const ( + allAlowedPath = "foo/bar" + spacePath = "foo bar" + ) + + // This should not allocate any memory. + b.Run("All allowed", func(b *testing.B) { + for i := 0; i < b.N; i++ { + got := Sanitize(allAlowedPath) + if got != allAlowedPath { + b.Fatal(got) + } + } + }) + + // This will allocate some memory. + b.Run("Spaces", func(b *testing.B) { + for i := 0; i < b.N; i++ { + got := Sanitize(spacePath) + if got != "foo-bar" { + b.Fatal(got) + } + } + }) +} + +func TestDir(t *testing.T) { + c := qt.New(t) + c.Assert(Dir("/a/b/c/d"), qt.Equals, "/a/b/c") + c.Assert(Dir("/a"), qt.Equals, "/") + c.Assert(Dir("/"), qt.Equals, "/") + c.Assert(Dir(""), qt.Equals, "") +} + +func TestFieldsSlash(t *testing.T) { + c := qt.New(t) + + c.Assert(FieldsSlash("a/b/c"), qt.DeepEquals, []string{"a", "b", "c"}) + c.Assert(FieldsSlash("/a/b/c"), qt.DeepEquals, []string{"a", "b", "c"}) + c.Assert(FieldsSlash("/a/b/c/"), qt.DeepEquals, []string{"a", "b", "c"}) + c.Assert(FieldsSlash("a/b/c/"), qt.DeepEquals, []string{"a", "b", "c"}) + c.Assert(FieldsSlash("/"), qt.DeepEquals, []string{}) + c.Assert(FieldsSlash(""), qt.DeepEquals, []string{}) +} diff --git a/common/paths/pathparser.go b/common/paths/pathparser.go new file mode 100644 index 000000000..842d9307b --- /dev/null +++ b/common/paths/pathparser.go @@ -0,0 +1,494 @@ +// Copyright 2024 The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package paths + +import ( + "path" + "path/filepath" + "runtime" + "strings" + + "github.com/gohugoio/hugo/common/types" + "github.com/gohugoio/hugo/hugofs/files" +) + +var defaultPathParser PathParser + +// PathParser parses a path into a Path. +type PathParser struct { + // Maps the language code to its index in the languages/sites slice. + LanguageIndex map[string]int +} + +// Parse parses component c with path s into Path using the default path parser. +func Parse(c, s string) *Path { + return defaultPathParser.Parse(c, s) +} + +// NormalizePathString returns a normalized path string using the very basic Hugo rules. +func NormalizePathStringBasic(s string) string { + // All lower case. + s = strings.ToLower(s) + + // Replace spaces with hyphens. + s = strings.ReplaceAll(s, " ", "-") + + return s +} + +// Parse parses component c with path s into Path using Hugo's content path rules. +func (parser PathParser) Parse(c, s string) *Path { + p, err := parser.parse(c, s) + if err != nil { + panic(err) + } + return p +} + +func (pp *PathParser) parse(component, s string) (*Path, error) { + ss := NormalizePathStringBasic(s) + + p, err := pp.doParse(component, ss) + if err != nil { + return nil, err + } + + if s != ss { + var err error + // Preserve the original case for titles etc. + p.unnormalized, err = pp.doParse(component, s) + + if err != nil { + return nil, err + } + } else { + p.unnormalized = p + } + + return p, nil +} + +func (pp *PathParser) doParse(component, s string) (*Path, error) { + p := &Path{ + component: component, + posContainerLow: -1, + posContainerHigh: -1, + posSectionHigh: -1, + posIdentifierLanguage: -1, + } + + hasLang := pp.LanguageIndex != nil + hasLang = hasLang && (component == files.ComponentFolderContent || component == files.ComponentFolderLayouts) + + if runtime.GOOS == "windows" { + s = path.Clean(filepath.ToSlash(s)) + if s == "." { + s = "" + } + } + + if s == "" { + s = "/" + } + + // Leading slash, no trailing slash. + if !strings.HasPrefix(s, "/") { + s = "/" + s + } + + if s != "/" && s[len(s)-1] == '/' { + s = s[:len(s)-1] + } + + p.s = s + slashCount := 0 + + for i := len(s) - 1; i >= 0; i-- { + c := s[i] + + switch c { + case '.': + if p.posContainerHigh == -1 { + var high int + if len(p.identifiers) > 0 { + high = p.identifiers[len(p.identifiers)-1].Low - 1 + } else { + high = len(p.s) + } + id := types.LowHigh{Low: i + 1, High: high} + if len(p.identifiers) == 0 { + p.identifiers = append(p.identifiers, id) + } else if len(p.identifiers) == 1 { + // Check for a valid language. + s := p.s[id.Low:id.High] + + if hasLang { + if _, found := pp.LanguageIndex[s]; found { + p.posIdentifierLanguage = 1 + p.identifiers = append(p.identifiers, id) + } + } + } + } + case '/': + slashCount++ + if p.posContainerHigh == -1 { + p.posContainerHigh = i + 1 + } else if p.posContainerLow == -1 { + p.posContainerLow = i + 1 + } + if i > 0 { + p.posSectionHigh = i + } + } + } + + isContentComponent := p.component == files.ComponentFolderContent || p.component == files.ComponentFolderArchetypes + isContent := isContentComponent && files.IsContentExt(p.Ext()) + + if isContent { + id := p.identifiers[len(p.identifiers)-1] + b := p.s[p.posContainerHigh : id.Low-1] + switch b { + case "index": + p.bundleType = PathTypeLeaf + case "_index": + p.bundleType = PathTypeBranch + default: + p.bundleType = PathTypeContentSingle + } + + if slashCount == 2 && p.IsLeafBundle() { + p.posSectionHigh = 0 + } + } + + return p, nil +} + +func ModifyPathBundleTypeResource(p *Path) { + if p.IsContent() { + p.bundleType = PathTypeContentResource + } else { + p.bundleType = PathTypeFile + } +} + +type PathType int + +const ( + // A generic resource, e.g. a JSON file. + PathTypeFile PathType = iota + + // All below are content files. + // A resource of a content type with front matter. + PathTypeContentResource + + // E.g. /blog/my-post.md + PathTypeContentSingle + + // All bewlow are bundled content files. + + // Leaf bundles, e.g. /blog/my-post/index.md + PathTypeLeaf + + // Branch bundles, e.g. /blog/_index.md + PathTypeBranch +) + +type Path struct { + s string + + posContainerLow int + posContainerHigh int + posSectionHigh int + + component string + bundleType PathType + + identifiers []types.LowHigh + + posIdentifierLanguage int + + trimLeadingSlash bool + + unnormalized *Path +} + +// TrimLeadingSlash returns a copy of the Path with the leading slash removed. +func (p Path) TrimLeadingSlash() *Path { + p.trimLeadingSlash = true + return &p +} + +func (p *Path) norm(s string) string { + if p.trimLeadingSlash { + s = strings.TrimPrefix(s, "/") + } + return s +} + +// IdentifierBase satifies identity.Identity. +func (p *Path) IdentifierBase() string { + return p.Base()[1:] +} + +// Component returns the component for this path (e.g. "content"). +func (p *Path) Component() string { + return p.component +} + +// Container returns the base name of the container directory for this path. +func (p *Path) Container() string { + if p.posContainerLow == -1 { + return "" + } + return p.norm(p.s[p.posContainerLow : p.posContainerHigh-1]) +} + +// ContainerDir returns the container directory for this path. +// For content bundles this will be the parent directory. +func (p *Path) ContainerDir() string { + if p.posContainerLow == -1 || !p.IsBundle() { + return p.Dir() + } + return p.norm(p.s[:p.posContainerLow-1]) +} + +// Section returns the first path element (section). +func (p *Path) Section() string { + if p.posSectionHigh <= 0 { + return "" + } + return p.norm(p.s[1:p.posSectionHigh]) +} + +// IsContent returns true if the path is a content file (e.g. mypost.md). +// Note that this will also return true for content files in a bundle. +func (p *Path) IsContent() bool { + return p.BundleType() >= PathTypeContentResource +} + +// isContentPage returns true if the path is a content file (e.g. mypost.md), +// but nof if inside a leaf bundle. +func (p *Path) isContentPage() bool { + return p.BundleType() >= PathTypeContentSingle +} + +// Name returns the last element of path. +func (p *Path) Name() string { + if p.posContainerHigh > 0 { + return p.s[p.posContainerHigh:] + } + return p.s +} + +// Name returns the last element of path withhout any extension. +func (p *Path) NameNoExt() string { + if i := p.identifierIndex(0); i != -1 { + return p.s[p.posContainerHigh : p.identifiers[i].Low-1] + } + return p.s[p.posContainerHigh:] +} + +// Name returns the last element of path withhout any language identifier. +func (p *Path) NameNoLang() string { + i := p.identifierIndex(p.posIdentifierLanguage) + if i == -1 { + return p.Name() + } + + return p.s[p.posContainerHigh:p.identifiers[i].Low-1] + p.s[p.identifiers[i].High:] +} + +// BaseNameNoIdentifier returns the logcical base name for a resource without any idenifier (e.g. no extension). +// For bundles this will be the containing directory's name, e.g. "blog". +func (p *Path) BaseNameNoIdentifier() string { + if p.IsBundle() { + return p.Container() + } + return p.NameNoIdentifier() +} + +// NameNoIdentifier returns the last element of path withhout any identifier (e.g. no extension). +func (p *Path) NameNoIdentifier() string { + if len(p.identifiers) > 0 { + return p.s[p.posContainerHigh : p.identifiers[len(p.identifiers)-1].Low-1] + } + return p.s[p.posContainerHigh:] +} + +// Dir returns all but the last element of path, typically the path's directory. +func (p *Path) Dir() (d string) { + if p.posContainerHigh > 0 { + d = p.s[:p.posContainerHigh-1] + } + if d == "" { + d = "/" + } + d = p.norm(d) + return +} + +// Path returns the full path. +func (p *Path) Path() (d string) { + return p.norm(p.s) +} + +// Unmormalized returns the Path with the original case preserved. +func (p *Path) Unmormalized() *Path { + return p.unnormalized +} + +// PathNoLang returns the Path but with any language identifier removed. +func (p *Path) PathNoLang() string { + return p.base(true, false) +} + +// PathNoIdentifier returns the Path but with any identifier (ext, lang) removed. +func (p *Path) PathNoIdentifier() string { + return p.base(false, false) +} + +// PathRel returns the path relativeto the given owner. +func (p *Path) PathRel(owner *Path) string { + ob := owner.Base() + if !strings.HasSuffix(ob, "/") { + ob += "/" + } + return strings.TrimPrefix(p.Path(), ob) +} + +// BaseRel returns the base path relative to the given owner. +func (p *Path) BaseRel(owner *Path) string { + ob := owner.Base() + if ob == "/" { + ob = "" + } + return p.Base()[len(ob)+1:] +} + +// For content files, Base returns the path without any identifiers (extension, language code etc.). +// Any 'index' as the last path element is ignored. +// +// For other files (Resources), any extension is kept. +func (p *Path) Base() string { + return p.base(!p.isContentPage(), p.IsBundle()) +} + +// BaseNoLeadingSlash returns the base path without the leading slash. +func (p *Path) BaseNoLeadingSlash() string { + return p.Base()[1:] +} + +func (p *Path) base(preserveExt, isBundle bool) string { + if len(p.identifiers) == 0 { + return p.norm(p.s) + } + + if preserveExt && len(p.identifiers) == 1 { + // Preserve extension. + return p.norm(p.s) + } + + id := p.identifiers[len(p.identifiers)-1] + high := id.Low - 1 + + if isBundle { + high = p.posContainerHigh - 1 + } + + if high == 0 { + high++ + } + + if !preserveExt { + return p.norm(p.s[:high]) + } + + // For txt files etc. we want to preserve the extension. + id = p.identifiers[0] + + return p.norm(p.s[:high] + p.s[id.Low-1:id.High]) +} + +func (p *Path) Ext() string { + return p.identifierAsString(0) +} + +func (p *Path) Lang() string { + return p.identifierAsString(1) +} + +func (p *Path) Identifier(i int) string { + return p.identifierAsString(i) +} + +func (p *Path) Identifiers() []string { + ids := make([]string, len(p.identifiers)) + for i, id := range p.identifiers { + ids[i] = p.s[id.Low:id.High] + } + return ids +} + +func (p *Path) IsHTML() bool { + return files.IsHTML(p.Ext()) +} + +func (p *Path) BundleType() PathType { + return p.bundleType +} + +func (p *Path) IsBundle() bool { + return p.bundleType >= PathTypeLeaf +} + +func (p *Path) IsBranchBundle() bool { + return p.bundleType == PathTypeBranch +} + +func (p *Path) IsLeafBundle() bool { + return p.bundleType == PathTypeLeaf +} + +func (p *Path) identifierAsString(i int) string { + i = p.identifierIndex(i) + if i == -1 { + return "" + } + + id := p.identifiers[i] + return p.s[id.Low:id.High] +} + +func (p *Path) identifierIndex(i int) int { + if i < 0 || i >= len(p.identifiers) { + return -1 + } + return i +} + +// HasExt returns true if the Unix styled path has an extension. +func HasExt(p string) bool { + for i := len(p) - 1; i >= 0; i-- { + if p[i] == '.' { + return true + } + if p[i] == '/' { + return false + } + } + return false +} diff --git a/common/paths/pathparser_test.go b/common/paths/pathparser_test.go new file mode 100644 index 000000000..3546b6605 --- /dev/null +++ b/common/paths/pathparser_test.go @@ -0,0 +1,351 @@ +// Copyright 2024 The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package paths + +import ( + "path/filepath" + "testing" + + "github.com/gohugoio/hugo/hugofs/files" + + qt "github.com/frankban/quicktest" +) + +var testParser = &PathParser{ + LanguageIndex: map[string]int{ + "no": 0, + "en": 1, + }, +} + +func TestParse(t *testing.T) { + c := qt.New(t) + + tests := []struct { + name string + path string + assert func(c *qt.C, p *Path) + }{ + { + "Basic text file", + "/a/b.txt", + func(c *qt.C, p *Path) { + c.Assert(p.Name(), qt.Equals, "b.txt") + c.Assert(p.Base(), qt.Equals, "/a/b.txt") + c.Assert(p.Container(), qt.Equals, "a") + c.Assert(p.Dir(), qt.Equals, "/a") + c.Assert(p.Ext(), qt.Equals, "txt") + c.Assert(p.IsContent(), qt.IsFalse) + }, + }, + { + "Basic text file, upper case", + "/A/B.txt", + func(c *qt.C, p *Path) { + c.Assert(p.Name(), qt.Equals, "b.txt") + c.Assert(p.NameNoExt(), qt.Equals, "b") + c.Assert(p.NameNoIdentifier(), qt.Equals, "b") + c.Assert(p.BaseNameNoIdentifier(), qt.Equals, "b") + c.Assert(p.Base(), qt.Equals, "/a/b.txt") + c.Assert(p.Ext(), qt.Equals, "txt") + }, + }, + { + "Basic text file, 1 space in dir", + "/a b/c.txt", + func(c *qt.C, p *Path) { + c.Assert(p.Base(), qt.Equals, "/a-b/c.txt") + }, + }, + { + "Basic text file, 2 spaces in dir", + "/a b/c.txt", + func(c *qt.C, p *Path) { + c.Assert(p.Base(), qt.Equals, "/a--b/c.txt") + }, + }, + { + "Basic text file, 1 space in filename", + "/a/b c.txt", + func(c *qt.C, p *Path) { + c.Assert(p.Base(), qt.Equals, "/a/b-c.txt") + }, + }, + { + "Basic text file, 2 spaces in filename", + "/a/b c.txt", + func(c *qt.C, p *Path) { + c.Assert(p.Base(), qt.Equals, "/a/b--c.txt") + }, + }, + { + "Basic text file, mixed case and spaces, unnormalized", + "/a/Foo BAR.txt", + func(c *qt.C, p *Path) { + pp := p.Unmormalized() + c.Assert(pp, qt.IsNotNil) + c.Assert(pp.BaseNameNoIdentifier(), qt.Equals, "Foo BAR") + }, + }, + { + "Basic Markdown file", + "/a/b/c.md", + func(c *qt.C, p *Path) { + c.Assert(p.IsContent(), qt.IsTrue) + c.Assert(p.IsLeafBundle(), qt.IsFalse) + c.Assert(p.Name(), qt.Equals, "c.md") + c.Assert(p.Base(), qt.Equals, "/a/b/c") + c.Assert(p.Section(), qt.Equals, "a") + c.Assert(p.BaseNameNoIdentifier(), qt.Equals, "c") + c.Assert(p.Path(), qt.Equals, "/a/b/c.md") + c.Assert(p.Dir(), qt.Equals, "/a/b") + c.Assert(p.Container(), qt.Equals, "b") + c.Assert(p.ContainerDir(), qt.Equals, "/a/b") + c.Assert(p.Ext(), qt.Equals, "md") + }, + }, + { + "Content resource", + "/a/b.md", + func(c *qt.C, p *Path) { + c.Assert(p.Name(), qt.Equals, "b.md") + c.Assert(p.Base(), qt.Equals, "/a/b") + c.Assert(p.BaseNoLeadingSlash(), qt.Equals, "a/b") + c.Assert(p.Section(), qt.Equals, "a") + c.Assert(p.BaseNameNoIdentifier(), qt.Equals, "b") + + // Reclassify it as a content resource. + ModifyPathBundleTypeResource(p) + c.Assert(p.BundleType(), qt.Equals, PathTypeContentResource) + c.Assert(p.IsContent(), qt.IsTrue) + c.Assert(p.Name(), qt.Equals, "b.md") + c.Assert(p.Base(), qt.Equals, "/a/b.md") + }, + }, + { + "No ext", + "/a/b", + func(c *qt.C, p *Path) { + c.Assert(p.Name(), qt.Equals, "b") + c.Assert(p.NameNoExt(), qt.Equals, "b") + c.Assert(p.Base(), qt.Equals, "/a/b") + c.Assert(p.Ext(), qt.Equals, "") + }, + }, + { + "No ext, trailing slash", + "/a/b/", + func(c *qt.C, p *Path) { + c.Assert(p.Name(), qt.Equals, "b") + c.Assert(p.Base(), qt.Equals, "/a/b") + c.Assert(p.Ext(), qt.Equals, "") + }, + }, + { + "Identifiers", + "/a/b.a.b.no.txt", + func(c *qt.C, p *Path) { + c.Assert(p.Name(), qt.Equals, "b.a.b.no.txt") + c.Assert(p.NameNoIdentifier(), qt.Equals, "b.a.b") + c.Assert(p.NameNoLang(), qt.Equals, "b.a.b.txt") + c.Assert(p.Identifiers(), qt.DeepEquals, []string{"txt", "no"}) + c.Assert(p.Base(), qt.Equals, "/a/b.a.b.txt") + c.Assert(p.BaseNoLeadingSlash(), qt.Equals, "a/b.a.b.txt") + c.Assert(p.PathNoLang(), qt.Equals, "/a/b.a.b.txt") + c.Assert(p.Ext(), qt.Equals, "txt") + c.Assert(p.PathNoIdentifier(), qt.Equals, "/a/b.a.b") + }, + }, + { + "Home branch cundle", + "/_index.md", + func(c *qt.C, p *Path) { + c.Assert(p.Base(), qt.Equals, "/") + c.Assert(p.Path(), qt.Equals, "/_index.md") + c.Assert(p.Container(), qt.Equals, "") + c.Assert(p.ContainerDir(), qt.Equals, "/") + }, + }, + { + "Index content file in root", + "/a/index.md", + func(c *qt.C, p *Path) { + c.Assert(p.Base(), qt.Equals, "/a") + c.Assert(p.BaseNameNoIdentifier(), qt.Equals, "a") + c.Assert(p.Container(), qt.Equals, "a") + c.Assert(p.Container(), qt.Equals, "a") + c.Assert(p.ContainerDir(), qt.Equals, "") + c.Assert(p.Dir(), qt.Equals, "/a") + c.Assert(p.Ext(), qt.Equals, "md") + c.Assert(p.Identifiers(), qt.DeepEquals, []string{"md"}) + c.Assert(p.IsBranchBundle(), qt.IsFalse) + c.Assert(p.IsBundle(), qt.IsTrue) + c.Assert(p.IsLeafBundle(), qt.IsTrue) + c.Assert(p.Lang(), qt.Equals, "") + c.Assert(p.NameNoExt(), qt.Equals, "index") + c.Assert(p.NameNoIdentifier(), qt.Equals, "index") + c.Assert(p.NameNoLang(), qt.Equals, "index.md") + c.Assert(p.Section(), qt.Equals, "") + }, + }, + { + "Index content file with lang", + "/a/b/index.no.md", + func(c *qt.C, p *Path) { + c.Assert(p.Base(), qt.Equals, "/a/b") + c.Assert(p.BaseNameNoIdentifier(), qt.Equals, "b") + c.Assert(p.Container(), qt.Equals, "b") + c.Assert(p.ContainerDir(), qt.Equals, "/a") + c.Assert(p.Dir(), qt.Equals, "/a/b") + c.Assert(p.Ext(), qt.Equals, "md") + c.Assert(p.Identifiers(), qt.DeepEquals, []string{"md", "no"}) + c.Assert(p.IsBranchBundle(), qt.IsFalse) + c.Assert(p.IsBundle(), qt.IsTrue) + c.Assert(p.IsLeafBundle(), qt.IsTrue) + c.Assert(p.Lang(), qt.Equals, "no") + c.Assert(p.NameNoExt(), qt.Equals, "index.no") + c.Assert(p.NameNoIdentifier(), qt.Equals, "index") + c.Assert(p.NameNoLang(), qt.Equals, "index.md") + c.Assert(p.PathNoLang(), qt.Equals, "/a/b/index.md") + c.Assert(p.Section(), qt.Equals, "a") + }, + }, + { + "Index branch content file", + "/a/b/_index.no.md", + func(c *qt.C, p *Path) { + c.Assert(p.Base(), qt.Equals, "/a/b") + c.Assert(p.BaseNameNoIdentifier(), qt.Equals, "b") + c.Assert(p.Container(), qt.Equals, "b") + c.Assert(p.ContainerDir(), qt.Equals, "/a") + c.Assert(p.Ext(), qt.Equals, "md") + c.Assert(p.Identifiers(), qt.DeepEquals, []string{"md", "no"}) + c.Assert(p.IsBranchBundle(), qt.IsTrue) + c.Assert(p.IsBundle(), qt.IsTrue) + c.Assert(p.IsLeafBundle(), qt.IsFalse) + c.Assert(p.NameNoExt(), qt.Equals, "_index.no") + c.Assert(p.NameNoLang(), qt.Equals, "_index.md") + }, + }, + { + "Index root no slash", + "_index.md", + func(c *qt.C, p *Path) { + c.Assert(p.Base(), qt.Equals, "/") + c.Assert(p.Ext(), qt.Equals, "md") + c.Assert(p.Name(), qt.Equals, "_index.md") + }, + }, + { + "Index root", + "/_index.md", + func(c *qt.C, p *Path) { + c.Assert(p.Base(), qt.Equals, "/") + c.Assert(p.Ext(), qt.Equals, "md") + c.Assert(p.Name(), qt.Equals, "_index.md") + }, + }, + { + "Index first", + "/a/_index.md", + func(c *qt.C, p *Path) { + c.Assert(p.Section(), qt.Equals, "a") + }, + }, + { + "Index text file", + "/a/b/index.no.txt", + func(c *qt.C, p *Path) { + c.Assert(p.Base(), qt.Equals, "/a/b/index.txt") + c.Assert(p.Ext(), qt.Equals, "txt") + c.Assert(p.Identifiers(), qt.DeepEquals, []string{"txt", "no"}) + c.Assert(p.IsLeafBundle(), qt.IsFalse) + c.Assert(p.PathNoIdentifier(), qt.Equals, "/a/b/index") + }, + }, + { + "Empty", + "", + func(c *qt.C, p *Path) { + c.Assert(p.Base(), qt.Equals, "/") + c.Assert(p.Ext(), qt.Equals, "") + c.Assert(p.Name(), qt.Equals, "") + c.Assert(p.Path(), qt.Equals, "/") + }, + }, + { + "Slash", + "/", + func(c *qt.C, p *Path) { + c.Assert(p.Base(), qt.Equals, "/") + c.Assert(p.Ext(), qt.Equals, "") + c.Assert(p.Name(), qt.Equals, "") + }, + }, + { + "Trim Leading Slash bundle", + "foo/bar/index.no.md", + func(c *qt.C, p *Path) { + c.Assert(p.Path(), qt.Equals, "/foo/bar/index.no.md") + pp := p.TrimLeadingSlash() + c.Assert(pp.Path(), qt.Equals, "foo/bar/index.no.md") + c.Assert(pp.PathNoLang(), qt.Equals, "foo/bar/index.md") + c.Assert(pp.Base(), qt.Equals, "foo/bar") + c.Assert(pp.Dir(), qt.Equals, "foo/bar") + c.Assert(pp.ContainerDir(), qt.Equals, "foo") + c.Assert(pp.Container(), qt.Equals, "bar") + c.Assert(pp.BaseNameNoIdentifier(), qt.Equals, "bar") + }, + }, + { + "Trim Leading Slash file", + "foo/bar.txt", + func(c *qt.C, p *Path) { + c.Assert(p.Path(), qt.Equals, "/foo/bar.txt") + pp := p.TrimLeadingSlash() + c.Assert(pp.Path(), qt.Equals, "foo/bar.txt") + c.Assert(pp.PathNoLang(), qt.Equals, "foo/bar.txt") + c.Assert(pp.Base(), qt.Equals, "foo/bar.txt") + c.Assert(pp.Dir(), qt.Equals, "foo") + c.Assert(pp.ContainerDir(), qt.Equals, "foo") + c.Assert(pp.Container(), qt.Equals, "foo") + c.Assert(pp.BaseNameNoIdentifier(), qt.Equals, "bar") + }, + }, + { + "File separator", + filepath.FromSlash("/a/b/c.txt"), + func(c *qt.C, p *Path) { + c.Assert(p.Base(), qt.Equals, "/a/b/c.txt") + c.Assert(p.Ext(), qt.Equals, "txt") + c.Assert(p.Name(), qt.Equals, "c.txt") + c.Assert(p.Path(), qt.Equals, "/a/b/c.txt") + }, + }, + } + for _, test := range tests { + c.Run(test.name, func(c *qt.C) { + test.assert(c, testParser.Parse(files.ComponentFolderContent, test.path)) + }) + } +} + +func TestHasExt(t *testing.T) { + c := qt.New(t) + + c.Assert(HasExt("/a/b/c.txt"), qt.IsTrue) + c.Assert(HasExt("/a/b.c/d.txt"), qt.IsTrue) + c.Assert(HasExt("/a/b/c"), qt.IsFalse) + c.Assert(HasExt("/a/b.c/d"), qt.IsFalse) +} diff --git a/common/paths/paths_integration_test.go b/common/paths/paths_integration_test.go new file mode 100644 index 000000000..62d40f527 --- /dev/null +++ b/common/paths/paths_integration_test.go @@ -0,0 +1,80 @@ +// Copyright 2024 The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package paths_test + +import ( + "testing" + + "github.com/gohugoio/hugo/hugolib" +) + +func TestRemovePathAccents(t *testing.T) { + t.Parallel() + + files := ` +-- hugo.toml -- +disableKinds = ["taxonomy", "term"] +defaultContentLanguage = "en" +defaultContentLanguageInSubdir = true +[languages] +[languages.en] +weight = 1 +[languages.fr] +weight = 2 +removePathAccents = true +-- content/διακριτικός.md -- +-- content/διακριτικός.fr.md -- +-- layouts/_default/single.html -- +{{ .Language.Lang }}|Single. +-- layouts/_default/list.html -- +List +` + b := hugolib.Test(t, files) + + b.AssertFileContent("public/en/διακριτικός/index.html", "en|Single") + b.AssertFileContent("public/fr/διακριτικος/index.html", "fr|Single") +} + +func TestDisablePathToLower(t *testing.T) { + t.Parallel() + + files := ` +-- hugo.toml -- +disableKinds = ["taxonomy", "term"] +defaultContentLanguage = "en" +defaultContentLanguageInSubdir = true +[languages] +[languages.en] +weight = 1 +[languages.fr] +weight = 2 +disablePathToLower = true +-- content/MySection/MyPage.md -- +-- content/MySection/MyPage.fr.md -- +-- content/MySection/MyBundle/index.md -- +-- content/MySection/MyBundle/index.fr.md -- +-- layouts/_default/single.html -- +{{ .Language.Lang }}|Single. +-- layouts/_default/list.html -- +{{ .Language.Lang }}|List. +` + b := hugolib.Test(t, files) + + b.AssertFileContent("public/en/mysection/index.html", "en|List") + b.AssertFileContent("public/en/mysection/mypage/index.html", "en|Single") + b.AssertFileContent("public/fr/MySection/index.html", "fr|List") + b.AssertFileContent("public/fr/MySection/MyPage/index.html", "fr|Single") + b.AssertFileContent("public/en/mysection/mybundle/index.html", "en|Single") + b.AssertFileContent("public/fr/MySection/MyBundle/index.html", "fr|Single") +} diff --git a/common/paths/pathtype_string.go b/common/paths/pathtype_string.go new file mode 100644 index 000000000..7a99f8a03 --- /dev/null +++ b/common/paths/pathtype_string.go @@ -0,0 +1,27 @@ +// Code generated by "stringer -type=PathType"; DO NOT EDIT. + +package paths + +import "strconv" + +func _() { + // An "invalid array index" compiler error signifies that the constant values have changed. + // Re-run the stringer command to generate them again. + var x [1]struct{} + _ = x[PathTypeFile-0] + _ = x[PathTypeContentResource-1] + _ = x[PathTypeContentSingle-2] + _ = x[PathTypeLeaf-3] + _ = x[PathTypeBranch-4] +} + +const _PathType_name = "PathTypeFilePathTypeContentResourcePathTypeContentSinglePathTypeLeafPathTypeBranch" + +var _PathType_index = [...]uint8{0, 12, 35, 56, 68, 82} + +func (i PathType) String() string { + if i < 0 || i >= PathType(len(_PathType_index)-1) { + return "PathType(" + strconv.FormatInt(int64(i), 10) + ")" + } + return _PathType_name[_PathType_index[i]:_PathType_index[i+1]] +} diff --git a/common/paths/url.go b/common/paths/url.go index 093ba9ff7..4c4a7f2dc 100644 --- a/common/paths/url.go +++ b/common/paths/url.go @@ -184,3 +184,13 @@ func UrlToFilename(s string) (string, bool) { return p, true } + +// URLEscape escapes unicode letters. +func URLEscape(uri string) string { + // escape unicode letters + u, err := url.Parse(uri) + if err != nil { + panic(err) + } + return u.String() +} diff --git a/common/predicate/predicate.go b/common/predicate/predicate.go new file mode 100644 index 000000000..f9cb1bb2b --- /dev/null +++ b/common/predicate/predicate.go @@ -0,0 +1,72 @@ +// Copyright 2024 The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package predicate + +// P is a predicate function that tests whether a value of type T satisfies some condition. +type P[T any] func(T) bool + +// And returns a predicate that is a short-circuiting logical AND of this and the given predicates. +func (p P[T]) And(ps ...P[T]) P[T] { + return func(v T) bool { + for _, pp := range ps { + if !pp(v) { + return false + } + } + return p(v) + } +} + +// Or returns a predicate that is a short-circuiting logical OR of this and the given predicates. +func (p P[T]) Or(ps ...P[T]) P[T] { + return func(v T) bool { + for _, pp := range ps { + if pp(v) { + return true + } + } + return p(v) + } +} + +// Negate returns a predicate that is a logical negation of this predicate. +func (p P[T]) Negate() P[T] { + return func(v T) bool { + return !p(v) + } +} + +// Filter returns a new slice holding only the elements of s that satisfy p. +// Filter modifies the contents of the slice s and returns the modified slice, which may have a smaller length. +func (p P[T]) Filter(s []T) []T { + var n int + for _, v := range s { + if p(v) { + s[n] = v + n++ + } + } + return s[:n] +} + +// FilterCopy returns a new slice holding only the elements of s that satisfy p. +func (p P[T]) FilterCopy(s []T) []T { + var result []T + for _, v := range s { + if p(v) { + result = append(result, v) + } + } + return result +} diff --git a/common/predicate/predicate_test.go b/common/predicate/predicate_test.go new file mode 100644 index 000000000..1e1ec004b --- /dev/null +++ b/common/predicate/predicate_test.go @@ -0,0 +1,83 @@ +// Copyright 2024 The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package predicate_test + +import ( + "testing" + + qt "github.com/frankban/quicktest" + "github.com/gohugoio/hugo/common/predicate" +) + +func TestAdd(t *testing.T) { + c := qt.New(t) + + var p predicate.P[int] = intP1 + + c.Assert(p(1), qt.IsTrue) + c.Assert(p(2), qt.IsFalse) + + neg := p.Negate() + c.Assert(neg(1), qt.IsFalse) + c.Assert(neg(2), qt.IsTrue) + + and := p.And(intP2) + c.Assert(and(1), qt.IsFalse) + c.Assert(and(2), qt.IsFalse) + c.Assert(and(10), qt.IsTrue) + + or := p.Or(intP2) + c.Assert(or(1), qt.IsTrue) + c.Assert(or(2), qt.IsTrue) + c.Assert(or(10), qt.IsTrue) + c.Assert(or(11), qt.IsFalse) +} + +func TestFilter(t *testing.T) { + c := qt.New(t) + + var p predicate.P[int] = intP1 + p = p.Or(intP2) + + ints := []int{1, 2, 3, 4, 1, 6, 7, 8, 2} + + c.Assert(p.Filter(ints), qt.DeepEquals, []int{1, 2, 1, 2}) + c.Assert(ints, qt.DeepEquals, []int{1, 2, 1, 2, 1, 6, 7, 8, 2}) +} + +func TestFilterCopy(t *testing.T) { + c := qt.New(t) + + var p predicate.P[int] = intP1 + p = p.Or(intP2) + + ints := []int{1, 2, 3, 4, 1, 6, 7, 8, 2} + + c.Assert(p.FilterCopy(ints), qt.DeepEquals, []int{1, 2, 1, 2}) + c.Assert(ints, qt.DeepEquals, []int{1, 2, 3, 4, 1, 6, 7, 8, 2}) +} + +var intP1 = func(i int) bool { + if i == 10 { + return true + } + return i == 1 +} + +var intP2 = func(i int) bool { + if i == 10 { + return true + } + return i == 2 +} diff --git a/common/rungroup/rungroup.go b/common/rungroup/rungroup.go new file mode 100644 index 000000000..96ec57883 --- /dev/null +++ b/common/rungroup/rungroup.go @@ -0,0 +1,93 @@ +// Copyright 2024 The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package rungroup + +import ( + "context" + + "golang.org/x/sync/errgroup" +) + +// Group is a group of workers that can be used to enqueue work and wait for +// them to finish. +type Group[T any] interface { + Enqueue(T) error + Wait() error +} + +type runGroup[T any] struct { + ctx context.Context + g *errgroup.Group + ch chan T +} + +// Config is the configuration for a new Group. +type Config[T any] struct { + NumWorkers int + Handle func(context.Context, T) error +} + +// Run creates a new Group with the given configuration. +func Run[T any](ctx context.Context, cfg Config[T]) Group[T] { + if cfg.NumWorkers <= 0 { + cfg.NumWorkers = 1 + } + if cfg.Handle == nil { + panic("Handle must be set") + } + + g, ctx := errgroup.WithContext(ctx) + // Buffered for performance. + ch := make(chan T, cfg.NumWorkers) + + for i := 0; i < cfg.NumWorkers; i++ { + g.Go(func() error { + for { + select { + case <-ctx.Done(): + return nil + case v, ok := <-ch: + if !ok { + return nil + } + if err := cfg.Handle(ctx, v); err != nil { + return err + } + } + } + }) + } + + return &runGroup[T]{ + ctx: ctx, + g: g, + ch: ch, + } +} + +// Enqueue enqueues a new item to be handled by the workers. +func (r *runGroup[T]) Enqueue(t T) error { + select { + case <-r.ctx.Done(): + return nil + case r.ch <- t: + } + return nil +} + +// Wait waits for all workers to finish and returns the first error. +func (r *runGroup[T]) Wait() error { + close(r.ch) + return r.g.Wait() +} diff --git a/common/rungroup/rungroup_test.go b/common/rungroup/rungroup_test.go new file mode 100644 index 000000000..ac902079e --- /dev/null +++ b/common/rungroup/rungroup_test.go @@ -0,0 +1,44 @@ +// Copyright 2024 The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package rungroup + +import ( + "context" + "testing" + + qt "github.com/frankban/quicktest" +) + +func TestNew(t *testing.T) { + c := qt.New(t) + + var result int + adder := func(ctx context.Context, i int) error { + result += i + return nil + } + + g := Run[int]( + context.Background(), + Config[int]{ + Handle: adder, + }, + ) + + c.Assert(g, qt.IsNotNil) + g.Enqueue(32) + g.Enqueue(33) + c.Assert(g.Wait(), qt.IsNil) + c.Assert(result, qt.Equals, 65) +} diff --git a/common/terminal/colors.go b/common/terminal/colors.go index c4a78291e..8aa0e1af2 100644 --- a/common/terminal/colors.go +++ b/common/terminal/colors.go @@ -1,4 +1,4 @@ -// Copyright 2022 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/common/types/css/csstypes.go b/common/types/css/csstypes.go index a31df00e7..061acfe64 100644 --- a/common/types/css/csstypes.go +++ b/common/types/css/csstypes.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/common/types/evictingqueue.go b/common/types/evictingqueue.go index 884762426..88add59d5 100644 --- a/common/types/evictingqueue.go +++ b/common/types/evictingqueue.go @@ -35,11 +35,11 @@ func NewEvictingStringQueue(size int) *EvictingStringQueue { } // Add adds a new string to the tail of the queue if it's not already there. -func (q *EvictingStringQueue) Add(v string) { +func (q *EvictingStringQueue) Add(v string) *EvictingStringQueue { q.mu.Lock() if q.set[v] { q.mu.Unlock() - return + return q } if len(q.set) == q.size { @@ -50,6 +50,17 @@ func (q *EvictingStringQueue) Add(v string) { q.set[v] = true q.vals = append(q.vals, v) q.mu.Unlock() + + return q +} + +func (q *EvictingStringQueue) Len() int { + if q == nil { + return 0 + } + q.mu.Lock() + defer q.mu.Unlock() + return len(q.vals) } // Contains returns whether the queue contains v. diff --git a/common/types/hstring/stringtypes.go b/common/types/hstring/stringtypes.go index 601218e0e..5e8e3a23d 100644 --- a/common/types/hstring/stringtypes.go +++ b/common/types/hstring/stringtypes.go @@ -1,4 +1,4 @@ -// Copyright 2022 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/common/types/hstring/stringtypes_test.go b/common/types/hstring/stringtypes_test.go index 8fa1c9760..2f1f865c8 100644 --- a/common/types/hstring/stringtypes_test.go +++ b/common/types/hstring/stringtypes_test.go @@ -1,4 +1,4 @@ -// Copyright 2022 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/common/types/types.go b/common/types/types.go index c36c51b3e..11683c196 100644 --- a/common/types/types.go +++ b/common/types/types.go @@ -92,5 +92,18 @@ type DevMarker interface { DevOnly() } +// Unwrapper is implemented by types that can unwrap themselves. +type Unwrapper interface { + // Unwrapv is for internal use only. + // It got its slightly odd name to prevent collisions with user types. + Unwrapv() any +} + +// LowHigh is typically used to represent a slice boundary. +type LowHigh struct { + Low int + High int +} + // This is only used for debugging purposes. var InvocationCounter atomic.Int64 diff --git a/common/urls/baseURL.go b/common/urls/baseURL.go index df26730ec..2958a2a04 100644 --- a/common/urls/baseURL.go +++ b/common/urls/baseURL.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -23,10 +23,12 @@ import ( // A BaseURL in Hugo is normally on the form scheme://path, but the // form scheme: is also valid (mailto:hugo@rules.com). type BaseURL struct { - url *url.URL - WithPath string - WithoutPath string - BasePath string + url *url.URL + WithPath string + WithPathNoTrailingSlash string + WithoutPath string + BasePath string + BasePathNoTrailingSlash string } func (b BaseURL) String() string { @@ -92,19 +94,19 @@ func NewBaseURLFromString(b string) (BaseURL, error) { return BaseURL{}, err } return newBaseURLFromURL(u) - } func newBaseURLFromURL(u *url.URL) (BaseURL, error) { - baseURL := BaseURL{url: u, WithPath: u.String()} - var baseURLNoPath = baseURL.URL() + // A baseURL should always have a trailing slash, see #11669. + if !strings.HasSuffix(u.Path, "/") { + u.Path += "/" + } + baseURL := BaseURL{url: u, WithPath: u.String(), WithPathNoTrailingSlash: strings.TrimSuffix(u.String(), "/")} + baseURLNoPath := baseURL.URL() baseURLNoPath.Path = "" baseURL.WithoutPath = baseURLNoPath.String() - - basePath := u.Path - if basePath != "" && basePath != "/" { - baseURL.BasePath = basePath - } + baseURL.BasePath = u.Path + baseURL.BasePathNoTrailingSlash = strings.TrimSuffix(u.Path, "/") return baseURL, nil } diff --git a/common/urls/baseURL_test.go b/common/urls/baseURL_test.go index 95dc73339..ba337aac8 100644 --- a/common/urls/baseURL_test.go +++ b/common/urls/baseURL_test.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -21,17 +21,24 @@ import ( func TestBaseURL(t *testing.T) { c := qt.New(t) - b, err := NewBaseURLFromString("http://example.com") + + b, err := NewBaseURLFromString("http://example.com/") c.Assert(err, qt.IsNil) - c.Assert(b.String(), qt.Equals, "http://example.com") + c.Assert(b.String(), qt.Equals, "http://example.com/") + + b, err = NewBaseURLFromString("http://example.com") + c.Assert(err, qt.IsNil) + c.Assert(b.String(), qt.Equals, "http://example.com/") + c.Assert(b.WithPathNoTrailingSlash, qt.Equals, "http://example.com") + c.Assert(b.BasePath, qt.Equals, "/") p, err := b.WithProtocol("webcal://") c.Assert(err, qt.IsNil) - c.Assert(p.String(), qt.Equals, "webcal://example.com") + c.Assert(p.String(), qt.Equals, "webcal://example.com/") p, err = b.WithProtocol("webcal") c.Assert(err, qt.IsNil) - c.Assert(p.String(), qt.Equals, "webcal://example.com") + c.Assert(p.String(), qt.Equals, "webcal://example.com/") _, err = b.WithProtocol("mailto:") c.Assert(err, qt.Not(qt.IsNil)) @@ -57,11 +64,18 @@ func TestBaseURL(t *testing.T) { b, err = NewBaseURLFromString("") c.Assert(err, qt.IsNil) - c.Assert(b.String(), qt.Equals, "") + c.Assert(b.String(), qt.Equals, "/") // BaseURL with sub path b, err = NewBaseURLFromString("http://example.com/sub") c.Assert(err, qt.IsNil) - c.Assert(b.String(), qt.Equals, "http://example.com/sub") + c.Assert(b.String(), qt.Equals, "http://example.com/sub/") + c.Assert(b.WithPathNoTrailingSlash, qt.Equals, "http://example.com/sub") + c.Assert(b.BasePath, qt.Equals, "/sub/") + c.Assert(b.BasePathNoTrailingSlash, qt.Equals, "/sub") + + b, err = NewBaseURLFromString("http://example.com/sub/") + c.Assert(err, qt.IsNil) + c.Assert(b.String(), qt.Equals, "http://example.com/sub/") c.Assert(b.HostURL(), qt.Equals, "http://example.com") } diff --git a/compare/compare.go b/compare/compare.go index 67bb1c125..fd15bd087 100644 --- a/compare/compare.go +++ b/compare/compare.go @@ -52,3 +52,16 @@ func Eq(v1, v2 any) bool { return v1 == v2 } + +// ProbablyEq returns whether v1 is probably equal to v2. +func ProbablyEq(v1, v2 any) bool { + if Eq(v1, v2) { + return true + } + + if peqer, ok := v1.(ProbablyEqer); ok { + return peqer.ProbablyEq(v2) + } + + return false +} diff --git a/config/allconfig/allconfig.go b/config/allconfig/allconfig.go index 9f0d73ecd..5788e792b 100644 --- a/config/allconfig/allconfig.go +++ b/config/allconfig/allconfig.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -30,6 +30,7 @@ import ( "github.com/gohugoio/hugo/common/hugo" "github.com/gohugoio/hugo/common/loggers" "github.com/gohugoio/hugo/common/maps" + "github.com/gohugoio/hugo/common/paths" "github.com/gohugoio/hugo/common/urls" "github.com/gohugoio/hugo/config" "github.com/gohugoio/hugo/config/privacy" @@ -283,12 +284,13 @@ func (c *Config) CompileConfig(logger loggers.Logger) error { disabledLangs := make(map[string]bool) for _, lang := range c.DisableLanguages { - if lang == c.DefaultContentLanguage { - return fmt.Errorf("cannot disable default content language %q", lang) - } disabledLangs[lang] = true } for lang, language := range c.Languages { + if !language.Disabled && disabledLangs[lang] { + language.Disabled = true + c.Languages[lang] = language + } if language.Disabled { disabledLangs[lang] = true if lang == c.DefaultContentLanguage { @@ -408,15 +410,19 @@ type ConfigCompiled struct { } // This may be set after the config is compiled. -func (c *ConfigCompiled) SetMainSectionsIfNotSet(sections []string) { +func (c *ConfigCompiled) SetMainSections(sections []string) { c.mu.Lock() defer c.mu.Unlock() - if c.MainSections != nil { - return - } c.MainSections = sections } +// IsMainSectionsSet returns whether the main sections have been set. +func (c *ConfigCompiled) IsMainSectionsSet() bool { + c.mu.Lock() + defer c.mu.Unlock() + return c.MainSections != nil +} + // This is set after the config is compiled by the server command. func (c *ConfigCompiled) SetBaseURL(baseURL, baseURLLiveReload urls.BaseURL) { c.BaseURL = baseURL @@ -425,7 +431,6 @@ func (c *ConfigCompiled) SetBaseURL(baseURL, baseURLLiveReload urls.BaseURL) { // RootConfig holds all the top-level configuration options in Hugo type RootConfig struct { - // The base URL of the site. // Note that the default value is empty, but Hugo requires a valid URL (e.g. "https://example.com/") to work properly. // {"identifiers": ["URL"] } @@ -648,13 +653,16 @@ type Configs struct { LanguageConfigMap map[string]*Config LanguageConfigSlice []*Config - IsMultihost bool - Languages langs.Languages - LanguagesDefaultFirst langs.Languages + IsMultihost bool Modules modules.Modules ModulesClient *modules.Client + // All below is set in Init. + Languages langs.Languages + LanguagesDefaultFirst langs.Languages + ContentPathParser paths.PathParser + configLangs []config.AllProvider } @@ -674,6 +682,58 @@ func (c *Configs) IsZero() bool { } func (c *Configs) Init() error { + var languages langs.Languages + defaultContentLanguage := c.Base.DefaultContentLanguage + for k, v := range c.LanguageConfigMap { + c.LanguageConfigSlice = append(c.LanguageConfigSlice, v) + languageConf := v.Languages[k] + language, err := langs.NewLanguage(k, defaultContentLanguage, v.TimeZone, languageConf) + if err != nil { + return err + } + languages = append(languages, language) + } + + // Sort the sites by language weight (if set) or lang. + sort.Slice(languages, func(i, j int) bool { + li := languages[i] + lj := languages[j] + if li.Weight != lj.Weight { + return li.Weight < lj.Weight + } + return li.Lang < lj.Lang + }) + + for _, l := range languages { + c.LanguageConfigSlice = append(c.LanguageConfigSlice, c.LanguageConfigMap[l.Lang]) + } + + // Filter out disabled languages. + var n int + for _, l := range languages { + if !l.Disabled { + languages[n] = l + n++ + } + } + languages = languages[:n] + + var languagesDefaultFirst langs.Languages + for _, l := range languages { + if l.Lang == defaultContentLanguage { + languagesDefaultFirst = append(languagesDefaultFirst, l) + } + } + for _, l := range languages { + if l.Lang != defaultContentLanguage { + languagesDefaultFirst = append(languagesDefaultFirst, l) + } + } + + c.Languages = languages + c.LanguagesDefaultFirst = languagesDefaultFirst + c.ContentPathParser = paths.PathParser{LanguageIndex: languagesDefaultFirst.AsIndexSet()} + c.configLangs = make([]config.AllProvider, len(c.Languages)) for i, l := range c.LanguagesDefaultFirst { c.configLangs[i] = ConfigLanguage{ @@ -751,7 +811,6 @@ func fromLoadConfigResult(fs afero.Fs, logger loggers.Logger, res config.LoadCon } langConfigMap := make(map[string]*Config) - var langConfigs []*Config languagesConfig := cfg.GetStringMap("languages") var isMultiHost bool @@ -848,65 +907,24 @@ func fromLoadConfigResult(fs afero.Fs, logger loggers.Logger, res config.LoadCon } } - var languages langs.Languages - defaultContentLanguage := all.DefaultContentLanguage - for k, v := range langConfigMap { - languageConf := v.Languages[k] - language, err := langs.NewLanguage(k, defaultContentLanguage, v.TimeZone, languageConf) - if err != nil { - return nil, err - } - languages = append(languages, language) - } - - // Sort the sites by language weight (if set) or lang. - sort.Slice(languages, func(i, j int) bool { - li := languages[i] - lj := languages[j] - if li.Weight != lj.Weight { - return li.Weight < lj.Weight - } - return li.Lang < lj.Lang - }) - - for _, l := range languages { - langConfigs = append(langConfigs, langConfigMap[l.Lang]) - } - - var languagesDefaultFirst langs.Languages - for _, l := range languages { - if l.Lang == defaultContentLanguage { - languagesDefaultFirst = append(languagesDefaultFirst, l) - } - } - for _, l := range languages { - if l.Lang != defaultContentLanguage { - languagesDefaultFirst = append(languagesDefaultFirst, l) - } - } - bcfg.PublishDir = all.PublishDir res.BaseConfig = bcfg all.CommonDirs.CacheDir = bcfg.CacheDir - for _, l := range langConfigs { + for _, l := range langConfigMap { l.CommonDirs.CacheDir = bcfg.CacheDir } cm := &Configs{ - Base: all, - LanguageConfigMap: langConfigMap, - LanguageConfigSlice: langConfigs, - LoadingInfo: res, - IsMultihost: isMultiHost, - Languages: languages, - LanguagesDefaultFirst: languagesDefaultFirst, + Base: all, + LanguageConfigMap: langConfigMap, + LoadingInfo: res, + IsMultihost: isMultiHost, } return cm, nil } func decodeConfigFromParams(fs afero.Fs, logger loggers.Logger, bcfg config.BaseConfig, p config.Provider, target *Config, keys []string) error { - var decoderSetups []decodeWeight if len(keys) == 0 { diff --git a/config/allconfig/alldecoders.go b/config/allconfig/alldecoders.go index dc58882f3..f96c19cfc 100644 --- a/config/allconfig/alldecoders.go +++ b/config/allconfig/alldecoders.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/config/allconfig/configlanguage.go b/config/allconfig/configlanguage.go index 2c5a116f4..71bd232de 100644 --- a/config/allconfig/configlanguage.go +++ b/config/allconfig/configlanguage.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -16,6 +16,7 @@ package allconfig import ( "time" + "github.com/gohugoio/hugo/common/paths" "github.com/gohugoio/hugo/common/urls" "github.com/gohugoio/hugo/config" "github.com/gohugoio/hugo/langs" @@ -41,10 +42,15 @@ func (c ConfigLanguage) LanguagesDefaultFirst() langs.Languages { return c.m.LanguagesDefaultFirst } +func (c ConfigLanguage) PathParser() paths.PathParser { + return c.m.ContentPathParser +} + func (c ConfigLanguage) LanguagePrefix() string { if c.DefaultContentLanguageInSubdir() && c.DefaultContentLanguage() == c.Language().Lang { return c.Language().Lang } + if !c.IsMultiLingual() || c.DefaultContentLanguage() == c.Language().Lang { return "" } @@ -119,6 +125,10 @@ func (c ConfigLanguage) Quiet() bool { return c.m.Base.Internal.Quiet } +func (c ConfigLanguage) Watching() bool { + return c.m.Base.Internal.Watch +} + // GetConfigSection is mostly used in tests. The switch statement isn't complete, but what's in use. func (c ConfigLanguage) GetConfigSection(s string) any { switch s { diff --git a/config/allconfig/docshelper.go b/config/allconfig/docshelper.go index 48a09de51..1a5fb6153 100644 --- a/config/allconfig/docshelper.go +++ b/config/allconfig/docshelper.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -22,7 +22,6 @@ import ( // This is is just some helpers used to create some JSON used in the Hugo docs. func init() { docsProvider := func() docshelper.DocProvider { - cfg := config.New() for configRoot, v := range allDecoderSetups { if v.internalOrDeprecated { diff --git a/config/allconfig/integration_test.go b/config/allconfig/integration_test.go index fcb92e71d..4f2f1a06e 100644 --- a/config/allconfig/integration_test.go +++ b/config/allconfig/integration_test.go @@ -10,7 +10,6 @@ import ( ) func TestDirsMount(t *testing.T) { - files := ` -- hugo.toml -- baseURL = "https://example.com" @@ -44,7 +43,7 @@ Title: {{ .Title }} hugolib.IntegrationTestConfig{T: t, TxtarString: files}, ).Build() - //b.AssertFileContent("public/p1/index.html", "Title: p1") + // b.AssertFileContent("public/p1/index.html", "Title: p1") sites := b.H.Sites b.Assert(len(sites), qt.Equals, 2) @@ -58,7 +57,7 @@ Title: {{ .Title }} enConcp := sites[0].Conf enConf := enConcp.GetConfig().(*allconfig.Config) - b.Assert(enConcp.BaseURL().String(), qt.Equals, "https://example.com") + b.Assert(enConcp.BaseURL().String(), qt.Equals, "https://example.com/") modConf := enConf.Module b.Assert(modConf.Mounts, qt.HasLen, 8) b.Assert(modConf.Mounts[0].Source, qt.Equals, filepath.FromSlash("content/en")) @@ -67,11 +66,9 @@ Title: {{ .Title }} b.Assert(modConf.Mounts[1].Source, qt.Equals, filepath.FromSlash("content/sv")) b.Assert(modConf.Mounts[1].Target, qt.Equals, "content") b.Assert(modConf.Mounts[1].Lang, qt.Equals, "sv") - } func TestConfigAliases(t *testing.T) { - files := ` -- hugo.toml -- baseURL = "https://example.com" diff --git a/config/allconfig/load.go b/config/allconfig/load.go index 7d706c7e3..eceed31f4 100644 --- a/config/allconfig/load.go +++ b/config/allconfig/load.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -37,6 +37,7 @@ import ( "github.com/spf13/afero" ) +//lint:ignore ST1005 end user message. var ErrNoConfigFile = errors.New("Unable to locate config file or config directory. Perhaps you need to create a new site.\n Run `hugo help new` for details.\n") func LoadConfig(d ConfigSourceDescriptor) (*Configs, error) { @@ -566,15 +567,6 @@ func (l configLoader) deleteMergeStrategies() { }) } -func (l configLoader) loadModulesConfig() (modules.Config, error) { - modConfig, err := modules.DecodeConfig(l.cfg) - if err != nil { - return modules.Config{}, err - } - - return modConfig, nil -} - func (l configLoader) wrapFileError(err error, filename string) error { fe := herrors.UnwrapFileError(err) if fe != nil { diff --git a/config/commonConfig.go b/config/commonConfig.go index ef9d47553..6ca061093 100644 --- a/config/commonConfig.go +++ b/config/commonConfig.go @@ -85,29 +85,22 @@ var defaultBuild = BuildConfig{ BuildStats: BuildStats{}, CacheBusters: []CacheBuster{ - { - Source: `assets/.*\.(js|ts|jsx|tsx)`, - Target: `(js|scripts|javascript)`, - }, - { - Source: `assets/.*\.(css|sass|scss)$`, - Target: cssTargetCachebusterRe, - }, { Source: `(postcss|tailwind)\.config\.js`, Target: cssTargetCachebusterRe, }, - // This is deliberately coarse grained; it will cache bust resources with "json" in the cache key when js files changes, which is good. - { - Source: `assets/.*\.(.*)$`, - Target: `$1`, - }, }, } // BuildConfig holds some build related configuration. type BuildConfig struct { - UseResourceCacheWhen string // never, fallback, always. Default is fallback + // When to use the resource file cache. + // One of never, fallback, always. Default is fallback + UseResourceCacheWhen string + + // When enabled, will duplicate bundled resource files across languages that + // doesn't have a translated version. + DuplicateResourceFiles bool // When enabled, will collect and write a hugo_stats.json with some build // related aggregated data (e.g. CSS class names). @@ -373,7 +366,6 @@ func (c *CacheBuster) CompileConfig(logger loggers.Logger) error { return match } - } return compileErr } @@ -416,7 +408,6 @@ func DecodeServer(cfg Provider) (Server, error) { Status: 404, }, } - } return *s, nil diff --git a/config/commonConfig_test.go b/config/commonConfig_test.go index 8aa1318dd..425d3e970 100644 --- a/config/commonConfig_test.go +++ b/config/commonConfig_test.go @@ -148,21 +148,13 @@ func TestBuildConfigCacheBusters(t *testing.T) { l := loggers.NewDefault() c.Assert(conf.CompileConfig(l), qt.IsNil) - m, err := conf.MatchCacheBuster(l, "assets/foo/main.js") - c.Assert(err, qt.IsNil) + m, _ := conf.MatchCacheBuster(l, "tailwind.config.js") c.Assert(m, qt.IsNotNil) - c.Assert(m("scripts"), qt.IsTrue) - c.Assert(m("asdf"), qt.IsFalse) - - m, _ = conf.MatchCacheBuster(l, "tailwind.config.js") c.Assert(m("css"), qt.IsTrue) c.Assert(m("js"), qt.IsFalse) - m, err = conf.MatchCacheBuster(l, "assets/foo.json") - c.Assert(err, qt.IsNil) - c.Assert(m, qt.IsNotNil) - c.Assert(m("json"), qt.IsTrue) - + m, _ = conf.MatchCacheBuster(l, "foo.bar") + c.Assert(m, qt.IsNil) } func TestBuildConfigCacheBusterstTailwindSetup(t *testing.T) { diff --git a/config/configProvider.go b/config/configProvider.go index 11099e407..2536639ea 100644 --- a/config/configProvider.go +++ b/config/configProvider.go @@ -17,6 +17,7 @@ import ( "time" "github.com/gohugoio/hugo/common/maps" + "github.com/gohugoio/hugo/common/paths" "github.com/gohugoio/hugo/common/types" "github.com/gohugoio/hugo/common/urls" "github.com/gohugoio/hugo/langs" @@ -30,6 +31,7 @@ type AllProvider interface { LanguagePrefix() string BaseURL() urls.BaseURL BaseURLLiveReload() urls.BaseURL + PathParser() paths.PathParser Environment() string IsMultihost() bool IsMultiLingual() bool @@ -54,6 +56,7 @@ type AllProvider interface { BuildFuture() bool BuildDrafts() bool Running() bool + Watching() bool PrintUnusedTemplates() bool EnableMissingTranslationPlaceholders() bool TemplateMetrics() bool diff --git a/config/env.go b/config/env.go index 1e9266b17..0ad5ecaea 100644 --- a/config/env.go +++ b/config/env.go @@ -18,6 +18,12 @@ import ( "runtime" "strconv" "strings" + + "github.com/pbnjay/memory" +) + +const ( + gigabyte = 1 << 30 ) // GetNumWorkerMultiplier returns the base value used to calculate the number @@ -33,6 +39,37 @@ func GetNumWorkerMultiplier() int { return runtime.NumCPU() } +// GetMemoryLimit returns the upper memory limit in bytes for Hugo's in-memory caches. +// Note that this does not represent "all of the memory" that Hugo will use, +// so it needs to be set to a lower number than the available system memory. +// It will read from the HUGO_MEMORYLIMIT (in Gigabytes) environment variable. +// If that is not set, it will set aside a quarter of the total system memory. +func GetMemoryLimit() uint64 { + if mem := os.Getenv("HUGO_MEMORYLIMIT"); mem != "" { + if v := stringToGibabyte(mem); v > 0 { + return v + } + + } + + // There is a FreeMemory function, but as the kernel in most situations + // will take whatever memory that is left and use for caching etc., + // that value is not something that we can use. + m := memory.TotalMemory() + if m != 0 { + return uint64(m / 4) + } + + return 2 * gigabyte +} + +func stringToGibabyte(f string) uint64 { + if v, err := strconv.ParseFloat(f, 32); err == nil && v > 0 { + return uint64(v * gigabyte) + } + return 0 +} + // SetEnvVars sets vars on the form key=value in the oldVars slice. func SetEnvVars(oldVars *[]string, keyValues ...string) { for i := 0; i < len(keyValues); i += 2 { diff --git a/config/namespace.go b/config/namespace.go index 3ecd01014..b518c6c01 100644 --- a/config/namespace.go +++ b/config/namespace.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -20,7 +20,6 @@ import ( ) func DecodeNamespace[S, C any](configSource any, buildConfig func(any) (C, any, error)) (*ConfigNamespace[S, C], error) { - // Calculate the hash of the input (not including any defaults applied later). // This allows us to introduce new config options without breaking the hash. h := identity.HashString(configSource) diff --git a/config/namespace_test.go b/config/namespace_test.go index 008237c13..9bd23e08e 100644 --- a/config/namespace_test.go +++ b/config/namespace_test.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -26,7 +26,7 @@ func TestNamespace(t *testing.T) { c := qt.New(t) c.Assert(true, qt.Equals, true) - //ns, err := config.DecodeNamespace[map[string]DocsMediaTypeConfig](in, defaultMediaTypesConfig, buildConfig) + // ns, err := config.DecodeNamespace[map[string]DocsMediaTypeConfig](in, defaultMediaTypesConfig, buildConfig) ns, err := DecodeNamespace[[]*tstNsExt]( map[string]interface{}{"foo": "bar"}, @@ -46,23 +46,15 @@ func TestNamespace(t *testing.T) { c.Assert(ns.SourceHash, qt.Equals, "14368731254619220105") c.Assert(ns.Config, qt.DeepEquals, &tstNsExt{Foo: "bar"}) c.Assert(ns.Signature(), qt.DeepEquals, []*tstNsExt(nil)) - } type ( tstNsExt struct { Foo string } - tstNsInt struct { - Foo string - } ) func (t *tstNsExt) Init() error { t.Foo = strings.ToUpper(t.Foo) return nil } -func (t *tstNsInt) Compile(ext *tstNsExt) error { - t.Foo = ext.Foo + " qux" - return nil -} diff --git a/config/testconfig/testconfig.go b/config/testconfig/testconfig.go index 4aafd69f0..8f70e6cb7 100644 --- a/config/testconfig/testconfig.go +++ b/config/testconfig/testconfig.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -36,7 +36,7 @@ func GetTestConfigs(fs afero.Fs, cfg config.Provider) *allconfig.Configs { // Make sure that the workingDir exists. workingDir := cfg.GetString("workingDir") if workingDir != "" { - if err := fs.MkdirAll(workingDir, 0777); err != nil { + if err := fs.MkdirAll(workingDir, 0o777); err != nil { panic(err) } } @@ -46,7 +46,6 @@ func GetTestConfigs(fs afero.Fs, cfg config.Provider) *allconfig.Configs { panic(err) } return configs - } func GetTestConfig(fs afero.Fs, cfg config.Provider) config.AllProvider { diff --git a/create/content.go b/create/content.go index 10442c396..5c2327532 100644 --- a/create/content.go +++ b/create/content.go @@ -16,6 +16,7 @@ package create import ( "bytes" + "errors" "fmt" "io" "os" @@ -25,10 +26,9 @@ import ( "github.com/gohugoio/hugo/hugofs/glob" "github.com/gohugoio/hugo/common/hexec" + "github.com/gohugoio/hugo/common/hstrings" "github.com/gohugoio/hugo/common/paths" - "errors" - "github.com/gohugoio/hugo/hugofs/files" "github.com/gohugoio/hugo/hugofs" @@ -53,7 +53,7 @@ draft: true // NewContent creates a new content file in h (or a full bundle if the archetype is a directory) // in targetPath. func NewContent(h *hugolib.HugoSites, kind, targetPath string, force bool) error { - if h.BaseFs.Content.Dirs == nil { + if _, err := h.BaseFs.Content.Fs.Stat(""); err != nil { return errors.New("no existing content directory configured for this project") } @@ -103,7 +103,6 @@ func NewContent(h *hugolib.HugoSites, kind, targetPath string, force bool) error } return b.buildFile() - } filename, err := withBuildLock() @@ -116,7 +115,6 @@ func NewContent(h *hugolib.HugoSites, kind, targetPath string, force bool) error } return nil - } type contentBuilder struct { @@ -128,12 +126,12 @@ type contentBuilder struct { cf hugolib.ContentFactory // Builder state - archetypeFilename string - targetPath string - kind string - isDir bool - dirMap archetypeMap - force bool + archetypeFi hugofs.FileMetaInfo + targetPath string + kind string + isDir bool + dirMap archetypeMap + force bool } func (b *contentBuilder) buildDir() error { @@ -146,7 +144,10 @@ func (b *contentBuilder) buildDir() error { var baseDir string for _, fi := range b.dirMap.contentFiles { - targetFilename := filepath.Join(b.targetPath, strings.TrimPrefix(fi.Meta().Path, b.archetypeFilename)) + + targetFilename := filepath.Join(b.targetPath, strings.TrimPrefix(fi.Meta().PathInfo.Path(), b.archetypeFi.Meta().PathInfo.Path())) + + // ===> post/my-post/pages/bio.md abs, err := b.cf.CreateContentPlaceHolder(targetFilename, b.force) if err != nil { return err @@ -170,7 +171,6 @@ func (b *contentBuilder) buildDir() error { } return false }) - } if err := b.h.Build(hugolib.BuildCfg{NoBuildLock: true, SkipRender: true, ContentInclusionFilter: contentInclusionFilter}); err != nil { @@ -178,22 +178,20 @@ func (b *contentBuilder) buildDir() error { } for i, filename := range contentTargetFilenames { - if err := b.applyArcheType(filename, b.dirMap.contentFiles[i].Meta().Path); err != nil { + if err := b.applyArcheType(filename, b.dirMap.contentFiles[i]); err != nil { return err } } // Copy the rest as is. - for _, f := range b.dirMap.otherFiles { - meta := f.Meta() - filename := meta.Path + for _, fi := range b.dirMap.otherFiles { + meta := fi.Meta() in, err := meta.Open() if err != nil { return fmt.Errorf("failed to open non-content file: %w", err) } - - targetFilename := filepath.Join(baseDir, b.targetPath, strings.TrimPrefix(filename, b.archetypeFilename)) + targetFilename := filepath.Join(baseDir, b.targetPath, strings.TrimPrefix(fi.Meta().Filename, b.archetypeFi.Meta().Filename)) targetDir := filepath.Dir(targetFilename) if err := b.sourceFs.MkdirAll(targetDir, 0o777); err != nil && !os.IsExist(err) { @@ -225,7 +223,7 @@ func (b *contentBuilder) buildFile() (string, error) { return "", err } - usesSite, err := b.usesSiteVar(b.archetypeFilename) + usesSite, err := b.usesSiteVar(b.archetypeFi) if err != nil { return "", err } @@ -243,7 +241,7 @@ func (b *contentBuilder) buildFile() (string, error) { return "", err } - if err := b.applyArcheType(contentPlaceholderAbsFilename, b.archetypeFilename); err != nil { + if err := b.applyArcheType(contentPlaceholderAbsFilename, b.archetypeFi); err != nil { return "", err } @@ -264,15 +262,14 @@ func (b *contentBuilder) setArcheTypeFilenameToUse(ext string) { for _, p := range pathsToCheck { fi, err := b.archeTypeFs.Stat(p) if err == nil { - b.archetypeFilename = p + b.archetypeFi = fi.(hugofs.FileMetaInfo) b.isDir = fi.IsDir() return } } - } -func (b *contentBuilder) applyArcheType(contentFilename, archetypeFilename string) error { +func (b *contentBuilder) applyArcheType(contentFilename string, archetypeFi hugofs.FileMetaInfo) error { p := b.h.GetContentPage(contentFilename) if p == nil { panic(fmt.Sprintf("[BUG] no Page found for %q", contentFilename)) @@ -284,32 +281,39 @@ func (b *contentBuilder) applyArcheType(contentFilename, archetypeFilename strin } defer f.Close() - if archetypeFilename == "" { + if archetypeFi == nil { return b.cf.ApplyArchetypeTemplate(f, p, b.kind, DefaultArchetypeTemplateTemplate) } - return b.cf.ApplyArchetypeFilename(f, p, b.kind, archetypeFilename) - + return b.cf.ApplyArchetypeFi(f, p, b.kind, archetypeFi) } func (b *contentBuilder) mapArcheTypeDir() error { var m archetypeMap - walkFn := func(path string, fi hugofs.FileMetaInfo, err error) error { - if err != nil { - return err - } + seen := map[hstrings.Tuple]bool{} - if fi.IsDir() { + walkFn := func(path string, fim hugofs.FileMetaInfo) error { + if fim.IsDir() { return nil } - fil := fi.(hugofs.FileMetaInfo) + pi := fim.Meta().PathInfo - if files.IsContentFile(path) { - m.contentFiles = append(m.contentFiles, fil) + if pi.IsContent() { + pathLang := hstrings.Tuple{First: pi.PathNoIdentifier(), Second: fim.Meta().Lang} + if seen[pathLang] { + // Duplicate content file, e.g. page.md and page.html. + // In the regular build, we will filter out the duplicates, but + // for archetype folders these are ambiguous and we need to + // fail. + return fmt.Errorf("duplicate content file found in archetype folder: %q; having both e.g. %s.md and %s.html is ambigous", path, pi.BaseNameNoIdentifier(), pi.BaseNameNoIdentifier()) + } + seen[pathLang] = true + m.contentFiles = append(m.contentFiles, fim) if !m.siteUsed { - m.siteUsed, err = b.usesSiteVar(path) + var err error + m.siteUsed, err = b.usesSiteVar(fim) if err != nil { return err } @@ -317,7 +321,7 @@ func (b *contentBuilder) mapArcheTypeDir() error { return nil } - m.otherFiles = append(m.otherFiles, fil) + m.otherFiles = append(m.otherFiles, fim) return nil } @@ -325,13 +329,13 @@ func (b *contentBuilder) mapArcheTypeDir() error { walkCfg := hugofs.WalkwayConfig{ WalkFn: walkFn, Fs: b.archeTypeFs, - Root: b.archetypeFilename, + Root: filepath.FromSlash(b.archetypeFi.Meta().PathInfo.Path()), } w := hugofs.NewWalkway(walkCfg) if err := w.Walk(); err != nil { - return fmt.Errorf("failed to walk archetype dir %q: %w", b.archetypeFilename, err) + return fmt.Errorf("failed to walk archetype dir %q: %w", b.archetypeFi.Meta().Filename, err) } b.dirMap = m @@ -370,17 +374,21 @@ func (b *contentBuilder) openInEditorIfConfigured(filename string) error { return cmd.Run() } -func (b *contentBuilder) usesSiteVar(filename string) (bool, error) { - if filename == "" { +func (b *contentBuilder) usesSiteVar(fi hugofs.FileMetaInfo) (bool, error) { + if fi == nil { return false, nil } - bb, err := afero.ReadFile(b.archeTypeFs, filename) + f, err := fi.Meta().Open() if err != nil { - return false, fmt.Errorf("failed to open archetype file: %w", err) + return false, err + } + defer f.Close() + bb, err := io.ReadAll(f) + if err != nil { + return false, fmt.Errorf("failed to read archetype file: %w", err) } return bytes.Contains(bb, []byte(".Site")) || bytes.Contains(bb, []byte("site.")), nil - } type archetypeMap struct { diff --git a/create/content_test.go b/create/content_test.go index 77c6ca6c9..63045cbea 100644 --- a/create/content_test.go +++ b/create/content_test.go @@ -114,58 +114,6 @@ func TestNewContentFromFile(t *testing.T) { } } -func TestNewContentFromDir(t *testing.T) { - mm := afero.NewMemMapFs() - c := qt.New(t) - - archetypeDir := filepath.Join("archetypes", "my-bundle") - c.Assert(mm.MkdirAll(archetypeDir, 0o755), qt.IsNil) - - archetypeThemeDir := filepath.Join("themes", "mytheme", "archetypes", "my-theme-bundle") - c.Assert(mm.MkdirAll(archetypeThemeDir, 0o755), qt.IsNil) - - contentFile := ` -File: %s -Site Lang: {{ .Site.Language.Lang }} -Name: {{ replace .Name "-" " " | title }} -i18n: {{ T "hugo" }} -` - - c.Assert(afero.WriteFile(mm, filepath.Join(archetypeDir, "index.md"), []byte(fmt.Sprintf(contentFile, "index.md")), 0o755), qt.IsNil) - c.Assert(afero.WriteFile(mm, filepath.Join(archetypeDir, "index.nn.md"), []byte(fmt.Sprintf(contentFile, "index.nn.md")), 0o755), qt.IsNil) - - c.Assert(afero.WriteFile(mm, filepath.Join(archetypeDir, "pages", "bio.md"), []byte(fmt.Sprintf(contentFile, "bio.md")), 0o755), qt.IsNil) - c.Assert(afero.WriteFile(mm, filepath.Join(archetypeDir, "resources", "hugo1.json"), []byte(`hugo1: {{ printf "no template handling in here" }}`), 0o755), qt.IsNil) - c.Assert(afero.WriteFile(mm, filepath.Join(archetypeDir, "resources", "hugo2.xml"), []byte(`hugo2: {{ printf "no template handling in here" }}`), 0o755), qt.IsNil) - - c.Assert(afero.WriteFile(mm, filepath.Join(archetypeThemeDir, "index.md"), []byte(fmt.Sprintf(contentFile, "index.md")), 0o755), qt.IsNil) - c.Assert(afero.WriteFile(mm, filepath.Join(archetypeThemeDir, "resources", "hugo1.json"), []byte(`hugo1: {{ printf "no template handling in here" }}`), 0o755), qt.IsNil) - - c.Assert(initFs(mm), qt.IsNil) - cfg, fs := newTestCfg(c, mm) - - conf := testconfig.GetTestConfigs(fs.Source, cfg) - h, err := hugolib.NewHugoSites(deps.DepsCfg{Configs: conf, Fs: fs}) - c.Assert(err, qt.IsNil) - c.Assert(len(h.Sites), qt.Equals, 2) - - c.Assert(create.NewContent(h, "my-bundle", "post/my-post", false), qt.IsNil) - - cContains(c, readFileFromFs(t, fs.Source, filepath.Join("content", "post/my-post/resources/hugo1.json")), `hugo1: {{ printf "no template handling in here" }}`) - cContains(c, readFileFromFs(t, fs.Source, filepath.Join("content", "post/my-post/resources/hugo2.xml")), `hugo2: {{ printf "no template handling in here" }}`) - - // Content files should get the correct site context. - // TODO(bep) archetype check i18n - cContains(c, readFileFromFs(t, fs.Source, filepath.Join("content", "post/my-post/index.md")), `File: index.md`, `Site Lang: en`, `Name: My Post`, `i18n: Hugo Rocks!`) - cContains(c, readFileFromFs(t, fs.Source, filepath.Join("content", "post/my-post/index.nn.md")), `File: index.nn.md`, `Site Lang: nn`, `Name: My Post`, `i18n: Hugo Rokkar!`) - - cContains(c, readFileFromFs(t, fs.Source, filepath.Join("content", "post/my-post/pages/bio.md")), `File: bio.md`, `Site Lang: en`, `Name: Bio`) - - c.Assert(create.NewContent(h, "my-theme-bundle", "post/my-theme-post", false), qt.IsNil) - cContains(c, readFileFromFs(t, fs.Source, filepath.Join("content", "post/my-theme-post/index.md")), `File: index.md`, `Site Lang: en`, `Name: My Theme Post`, `i18n: Hugo Rocks!`) - cContains(c, readFileFromFs(t, fs.Source, filepath.Join("content", "post/my-theme-post/resources/hugo1.json")), `hugo1: {{ printf "no template handling in here" }}`) -} - func TestNewContentFromDirSiteFunction(t *testing.T) { mm := afero.NewMemMapFs() c := qt.New(t) @@ -206,83 +154,6 @@ site RegularPages: {{ len site.RegularPages }} // Regular files should fall back to the default archetype (we have no regular file archetype). c.Assert(create.NewContent(h, "my-bundle", "mypage.md", false), qt.IsNil) cContains(c, readFileFromFs(t, fs.Source, filepath.Join("content", "mypage.md")), `draft: true`) - -} - -func TestNewContentFromDirNoSite(t *testing.T) { - mm := afero.NewMemMapFs() - c := qt.New(t) - - archetypeDir := filepath.Join("archetypes", "my-bundle") - c.Assert(mm.MkdirAll(archetypeDir, 0o755), qt.IsNil) - - archetypeThemeDir := filepath.Join("themes", "mytheme", "archetypes", "my-theme-bundle") - c.Assert(mm.MkdirAll(archetypeThemeDir, 0o755), qt.IsNil) - - contentFile := ` -File: %s -Name: {{ replace .Name "-" " " | title }} -i18n: {{ T "hugo" }} -` - - c.Assert(afero.WriteFile(mm, filepath.Join(archetypeDir, "index.md"), []byte(fmt.Sprintf(contentFile, "index.md")), 0o755), qt.IsNil) - c.Assert(afero.WriteFile(mm, filepath.Join(archetypeDir, "index.nn.md"), []byte(fmt.Sprintf(contentFile, "index.nn.md")), 0o755), qt.IsNil) - - c.Assert(afero.WriteFile(mm, filepath.Join(archetypeDir, "pages", "bio.md"), []byte(fmt.Sprintf(contentFile, "bio.md")), 0o755), qt.IsNil) - c.Assert(afero.WriteFile(mm, filepath.Join(archetypeDir, "resources", "hugo1.json"), []byte(`hugo1: {{ printf "no template handling in here" }}`), 0o755), qt.IsNil) - c.Assert(afero.WriteFile(mm, filepath.Join(archetypeDir, "resources", "hugo2.xml"), []byte(`hugo2: {{ printf "no template handling in here" }}`), 0o755), qt.IsNil) - - c.Assert(afero.WriteFile(mm, filepath.Join(archetypeThemeDir, "index.md"), []byte(fmt.Sprintf(contentFile, "index.md")), 0o755), qt.IsNil) - c.Assert(afero.WriteFile(mm, filepath.Join(archetypeThemeDir, "resources", "hugo1.json"), []byte(`hugo1: {{ printf "no template handling in here" }}`), 0o755), qt.IsNil) - - c.Assert(initFs(mm), qt.IsNil) - cfg, fs := newTestCfg(c, mm) - conf := testconfig.GetTestConfigs(fs.Source, cfg) - h, err := hugolib.NewHugoSites(deps.DepsCfg{Configs: conf, Fs: fs}) - c.Assert(err, qt.IsNil) - c.Assert(len(h.Sites), qt.Equals, 2) - - c.Assert(create.NewContent(h, "my-bundle", "post/my-post", false), qt.IsNil) - - cContains(c, readFileFromFs(t, fs.Source, filepath.Join("content", "post/my-post/resources/hugo1.json")), `hugo1: {{ printf "no template handling in here" }}`) - cContains(c, readFileFromFs(t, fs.Source, filepath.Join("content", "post/my-post/resources/hugo2.xml")), `hugo2: {{ printf "no template handling in here" }}`) - - cContains(c, readFileFromFs(t, fs.Source, filepath.Join("content", "post/my-post/index.md")), `File: index.md`, `Name: My Post`, `i18n: Hugo Rocks!`) - cContains(c, readFileFromFs(t, fs.Source, filepath.Join("content", "post/my-post/index.nn.md")), `File: index.nn.md`, `Name: My Post`, `i18n: Hugo Rokkar!`) - - cContains(c, readFileFromFs(t, fs.Source, filepath.Join("content", "post/my-post/pages/bio.md")), `File: bio.md`, `Name: Bio`) - - c.Assert(create.NewContent(h, "my-theme-bundle", "post/my-theme-post", false), qt.IsNil) - cContains(c, readFileFromFs(t, fs.Source, filepath.Join("content", "post/my-theme-post/index.md")), `File: index.md`, `Name: My Theme Post`, `i18n: Hugo Rocks!`) - cContains(c, readFileFromFs(t, fs.Source, filepath.Join("content", "post/my-theme-post/resources/hugo1.json")), `hugo1: {{ printf "no template handling in here" }}`) -} - -func TestNewContentForce(t *testing.T) { - mm := afero.NewMemMapFs() - c := qt.New(t) - - archetypeDir := filepath.Join("archetypes", "my-bundle") - c.Assert(mm.MkdirAll(archetypeDir, 0o755), qt.IsNil) - c.Assert(afero.WriteFile(mm, filepath.Join(archetypeDir, "index.md"), []byte(""), 0o755), qt.IsNil) - c.Assert(afero.WriteFile(mm, filepath.Join(archetypeDir, "index.nn.md"), []byte(""), 0o755), qt.IsNil) - - c.Assert(initFs(mm), qt.IsNil) - cfg, fs := newTestCfg(c, mm) - - conf := testconfig.GetTestConfigs(fs.Source, cfg) - h, err := hugolib.NewHugoSites(deps.DepsCfg{Configs: conf, Fs: fs}) - c.Assert(err, qt.IsNil) - c.Assert(len(h.Sites), qt.Equals, 2) - - // from file - c.Assert(create.NewContent(h, "post", "post/my-post.md", false), qt.IsNil) - c.Assert(create.NewContent(h, "post", "post/my-post.md", false), qt.IsNotNil) - c.Assert(create.NewContent(h, "post", "post/my-post.md", true), qt.IsNil) - - // from dir - c.Assert(create.NewContent(h, "my-bundle", "post/my-post", false), qt.IsNil) - c.Assert(create.NewContent(h, "my-bundle", "post/my-post", false), qt.IsNotNil) - c.Assert(create.NewContent(h, "my-bundle", "post/my-post", true), qt.IsNil) } func initFs(fs afero.Fs) error { @@ -308,7 +179,7 @@ func initFs(fs afero.Fs) error { afero.WriteFile(fs, filename, []byte(`--- title: Test --- -`), 0666) +`), 0o666) } // create archetype files diff --git a/create/skeletons/skeletons.go b/create/skeletons/skeletons.go index 7f7fb1bb7..aec79c149 100644 --- a/create/skeletons/skeletons.go +++ b/create/skeletons/skeletons.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -83,7 +83,7 @@ func copyFiles(createpath string, sourceFs afero.Fs, skeleton embed.FS) error { return fs.WalkDir(skeleton, ".", func(path string, d fs.DirEntry, err error) error { _, slug, _ := strings.Cut(path, "/") if d.IsDir() { - return sourceFs.MkdirAll(filepath.Join(createpath, slug), 0777) + return sourceFs.MkdirAll(filepath.Join(createpath, slug), 0o777) } else { if filepath.Base(path) != ".gitkeep" { data, _ := fs.ReadFile(skeleton, path) diff --git a/deploy/deploy.go b/deploy/deploy.go index 26fac8975..7c496c72d 100644 --- a/deploy/deploy.go +++ b/deploy/deploy.go @@ -22,6 +22,7 @@ import ( "context" "crypto/md5" "encoding/hex" + "errors" "fmt" "io" "mime" @@ -33,8 +34,6 @@ import ( "strings" "sync" - "errors" - "github.com/dustin/go-humanize" "github.com/gobwas/glob" "github.com/gohugoio/hugo/common/loggers" @@ -75,7 +74,6 @@ const metaMD5Hash = "md5chksum" // the meta key to store md5hash in // New constructs a new *Deployer. func New(cfg config.AllProvider, logger loggers.Logger, localFs afero.Fs) (*Deployer, error) { - dcfg := cfg.GetConfigSection(deploymentConfigKey).(DeployConfig) targetName := dcfg.Target @@ -675,8 +673,6 @@ func (d *Deployer) findDiffs(localFiles map[string]*localFile, remoteFiles map[s } else if !bytes.Equal(lf.MD5(), remoteFile.MD5) { upload = true reason = reasonMD5Differs - } else { - // Nope! Leave uploaded = false. } found[path] = true } else { diff --git a/deploy/deploy_test.go b/deploy/deploy_test.go index 66eece10b..d220ab7c2 100644 --- a/deploy/deploy_test.go +++ b/deploy/deploy_test.go @@ -31,6 +31,7 @@ import ( "testing" "github.com/gohugoio/hugo/common/loggers" + "github.com/gohugoio/hugo/hugofs" "github.com/gohugoio/hugo/media" "github.com/google/go-cmp/cmp" "github.com/google/go-cmp/cmp/cmpopts" @@ -241,7 +242,7 @@ func TestWalkLocal(t *testing.T) { for _, name := range tc.Given { dir, _ := path.Split(name) if dir != "" { - if err := fs.MkdirAll(dir, 0755); err != nil { + if err := fs.MkdirAll(dir, 0o755); err != nil { t.Fatal(err) } } @@ -530,7 +531,7 @@ func initFsTests(t *testing.T) []*fsTest { membucket := memblob.OpenBucket(nil) t.Cleanup(func() { membucket.Close() }) - filefs := afero.NewBasePathFs(afero.NewOsFs(), tmpfsdir) + filefs := hugofs.NewBasePathFs(afero.NewOsFs(), tmpfsdir) filebucket, err := fileblob.OpenBucket(tmpbucketdir, nil) if err != nil { t.Fatal(err) diff --git a/deps/deps.go b/deps/deps.go index 4d1812015..8778bff80 100644 --- a/deps/deps.go +++ b/deps/deps.go @@ -11,6 +11,8 @@ import ( "sync/atomic" "github.com/bep/logg" + "github.com/gohugoio/hugo/cache/dynacache" + "github.com/gohugoio/hugo/cache/filecache" "github.com/gohugoio/hugo/common/hexec" "github.com/gohugoio/hugo/common/loggers" "github.com/gohugoio/hugo/config" @@ -59,6 +61,9 @@ type Deps struct { // The configuration to use Conf config.AllProvider `json:"-"` + // The memory cache to use. + MemCache *dynacache.Cache + // The translation func to use Translate func(ctx context.Context, translationID string, templateData any) string `json:"-"` @@ -149,6 +154,10 @@ func (d *Deps) Init() error { d.ExecHelper = hexec.New(d.Conf.GetConfigSection("security").(security.Config)) } + if d.MemCache == nil { + d.MemCache = dynacache.New(dynacache.Options{Running: d.Conf.Running(), Log: d.Log}) + } + if d.PathSpec == nil { hashBytesReceiverFunc := func(name string, match bool) { if !match { @@ -190,13 +199,16 @@ func (d *Deps) Init() error { } var common *resources.SpecCommon - var imageCache *resources.ImageCache if d.ResourceSpec != nil { common = d.ResourceSpec.SpecCommon - imageCache = d.ResourceSpec.ImageCache } - resourceSpec, err := resources.NewSpec(d.PathSpec, common, imageCache, d.BuildState, d.Log, d, d.ExecHelper) + fileCaches, err := filecache.NewCaches(d.PathSpec) + if err != nil { + return fmt.Errorf("failed to create file caches from configuration: %w", err) + } + + resourceSpec, err := resources.NewSpec(d.PathSpec, common, fileCaches, d.MemCache, d.BuildState, d.Log, d, d.ExecHelper) if err != nil { return fmt.Errorf("failed to create resource spec: %w", err) } @@ -307,6 +319,9 @@ func (d *Deps) TextTmpl() tpl.TemplateParseFinder { } func (d *Deps) Close() error { + if d.MemCache != nil { + d.MemCache.Stop() + } return d.BuildClosers.Close() } diff --git a/go.mod b/go.mod index 6cb4ad377..e8beedbfb 100644 --- a/go.mod +++ b/go.mod @@ -2,9 +2,8 @@ module github.com/gohugoio/hugo require ( github.com/BurntSushi/locker v0.0.0-20171006230638-a6e239ea1c69 - github.com/PuerkitoBio/purell v1.1.1 github.com/alecthomas/chroma/v2 v2.12.0 - github.com/armon/go-radix v1.0.0 + github.com/armon/go-radix v1.0.1-0.20221118154546-54df44f2176c github.com/aws/aws-sdk-go v1.48.6 github.com/bep/clocks v0.5.0 github.com/bep/debounce v1.2.0 @@ -15,10 +14,10 @@ require ( github.com/bep/golibsass v1.1.1 github.com/bep/gowebp v0.3.0 github.com/bep/helpers v0.4.0 - github.com/bep/lazycache v0.2.0 + github.com/bep/lazycache v0.4.0 github.com/bep/logg v0.4.0 github.com/bep/mclib v1.20400.20402 - github.com/bep/overlayfs v0.6.0 + github.com/bep/overlayfs v0.9.1 github.com/bep/simplecobra v0.4.0 github.com/bep/tmc v0.5.1 github.com/clbanning/mxj/v2 v2.7.0 @@ -53,6 +52,7 @@ require ( github.com/muesli/smartcrop v0.3.0 github.com/niklasfasching/go-org v1.7.0 github.com/olekukonko/tablewriter v0.0.5 + github.com/pbnjay/memory v0.0.0-20210728143218-7b4eea64cf58 github.com/pelletier/go-toml/v2 v2.1.1 github.com/rogpeppe/go-internal v1.12.0 github.com/rwcarlsen/goexif v0.0.0-20190401172101-9e8deecbddbd @@ -60,17 +60,17 @@ require ( github.com/spf13/afero v1.11.0 github.com/spf13/cast v1.6.0 github.com/spf13/cobra v1.8.0 - github.com/spf13/fsync v0.9.0 + github.com/spf13/fsync v0.10.0 github.com/spf13/pflag v1.0.5 github.com/tdewolff/minify/v2 v2.20.13 github.com/tdewolff/parse/v2 v2.7.8 github.com/yuin/goldmark v1.6.0 github.com/yuin/goldmark-emoji v1.0.2 - go.uber.org/atomic v1.11.0 go.uber.org/automaxprocs v1.5.3 gocloud.dev v0.34.0 golang.org/x/exp v0.0.0-20221031165847-c99f073a8326 golang.org/x/image v0.14.0 + golang.org/x/mod v0.14.0 golang.org/x/net v0.20.0 golang.org/x/sync v0.6.0 golang.org/x/text v0.14.0 @@ -92,7 +92,6 @@ require ( github.com/Azure/go-autorest v14.2.0+incompatible // indirect github.com/Azure/go-autorest/autorest/to v0.4.0 // indirect github.com/AzureAD/microsoft-authentication-library-for-go v1.0.0 // indirect - github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578 // indirect github.com/aws/aws-sdk-go-v2 v1.20.0 // indirect github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.4.11 // indirect github.com/aws/aws-sdk-go-v2/config v1.18.32 // indirect @@ -124,7 +123,7 @@ require ( github.com/google/wire v0.5.0 // indirect github.com/googleapis/enterprise-certificate-proxy v0.3.2 // indirect github.com/googleapis/gax-go/v2 v2.12.0 // indirect - github.com/hashicorp/golang-lru/v2 v2.0.1 // indirect + github.com/hashicorp/golang-lru/v2 v2.0.7 // indirect github.com/inconshreveable/mousetrap v1.1.0 // indirect github.com/invopop/yaml v0.2.0 // indirect github.com/jmespath/go-jmespath v0.4.0 // indirect @@ -142,7 +141,6 @@ require ( github.com/russross/blackfriday/v2 v2.1.0 // indirect go.opencensus.io v0.24.0 // indirect golang.org/x/crypto v0.18.0 // indirect - golang.org/x/mod v0.14.0 // indirect golang.org/x/oauth2 v0.15.0 // indirect golang.org/x/sys v0.16.0 // indirect golang.org/x/time v0.5.0 // indirect @@ -158,4 +156,4 @@ require ( software.sslmate.com/src/go-pkcs12 v0.2.0 // indirect ) -go 1.18 +go 1.20 diff --git a/go.sum b/go.sum index 58edbe3ff..3c1cfe7ff 100644 --- a/go.sum +++ b/go.sum @@ -1,14 +1,51 @@ cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU= +cloud.google.com/go v0.44.1/go.mod h1:iSa0KzasP4Uvy3f1mN/7PiObzGgflwredwwASm/v6AU= +cloud.google.com/go v0.44.2/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY= +cloud.google.com/go v0.44.3/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY= +cloud.google.com/go v0.45.1/go.mod h1:RpBamKRgapWJb87xiFSdk4g1CME7QZg3uwTez+TSTjc= +cloud.google.com/go v0.46.3/go.mod h1:a6bKKbmY7er1mI7TEI4lsAkts/mkhTSZK8w33B4RAg0= +cloud.google.com/go v0.50.0/go.mod h1:r9sluTvynVuxRIOHXQEHMFffphuXHOMZMycpNR5e6To= +cloud.google.com/go v0.52.0/go.mod h1:pXajvRH/6o3+F9jDHZWQ5PbGhn+o8w9qiu/CffaVdO4= +cloud.google.com/go v0.53.0/go.mod h1:fp/UouUEsRkN6ryDKNW/Upv/JBKnv6WDthjR6+vze6M= +cloud.google.com/go v0.54.0/go.mod h1:1rq2OEkV3YMf6n/9ZvGWI3GWw0VoqH/1x2nd8Is/bPc= +cloud.google.com/go v0.56.0/go.mod h1:jr7tqZxxKOVYizybht9+26Z/gUq7tiRzu+ACVAMbKVk= +cloud.google.com/go v0.57.0/go.mod h1:oXiQ6Rzq3RAkkY7N6t3TcE6jE+CIBBbA36lwQ1JyzZs= +cloud.google.com/go v0.62.0/go.mod h1:jmCYTdRCQuc1PHIIJ/maLInMho30T/Y0M4hTdTShOYc= +cloud.google.com/go v0.65.0/go.mod h1:O5N8zS7uWy9vkA9vayVHs65eM1ubvY4h553ofrNHObY= +cloud.google.com/go v0.72.0/go.mod h1:M+5Vjvlc2wnp6tjzE102Dw08nGShTscUx2nZMufOKPI= +cloud.google.com/go v0.74.0/go.mod h1:VV1xSbzvo+9QJOxLDaJfTjx5e+MePCpCWwvftOeQmWk= +cloud.google.com/go v0.75.0/go.mod h1:VGuuCn7PG0dwsd5XPVm2Mm3wlh3EL55/79EKB6hlPTY= cloud.google.com/go v0.110.10 h1:LXy9GEO+timppncPIAZoOj3l58LIU9k+kn48AN7IO3Y= cloud.google.com/go v0.110.10/go.mod h1:v1OoFqYxiBkUrruItNM3eT4lLByNjxmJSV/xDKJNnic= +cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o= +cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE= +cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc= +cloud.google.com/go/bigquery v1.5.0/go.mod h1:snEHRnqQbz117VIFhE8bmtwIDY80NLUZUMb4Nv6dBIg= +cloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4gLoIoXIAPc= +cloud.google.com/go/bigquery v1.8.0/go.mod h1:J5hqkt3O0uAFnINi6JXValWIb1v0goeZM77hZzJN/fQ= cloud.google.com/go/compute v1.23.3 h1:6sVlXXBmbd7jNX0Ipq0trII3e4n1/MsADLK6a+aiVlk= cloud.google.com/go/compute v1.23.3/go.mod h1:VCgBUoMnIVIR0CscqQiPJLAG25E3ZRZMzcFZeQ+h8CI= cloud.google.com/go/compute/metadata v0.2.3 h1:mg4jlk7mCAj6xXp9UJ4fjI9VUI5rubuGBW5aJ7UnBMY= cloud.google.com/go/compute/metadata v0.2.3/go.mod h1:VAV5nSsACxMJvgaAuX6Pk2AawlZn8kiOGuCv6gTkwuA= +cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE= +cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk= cloud.google.com/go/iam v1.1.5 h1:1jTsCu4bcsNsE4iiqNT5SHwrDRCfRmIaaaVFhRveTJI= cloud.google.com/go/iam v1.1.5/go.mod h1:rB6P/Ic3mykPbFio+vo7403drjlgvoWfYpJhMXEbzv8= +cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I= +cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw= +cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA= +cloud.google.com/go/pubsub v1.3.1/go.mod h1:i+ucay31+CNRpDW4Lu78I4xXG+O1r/MAHgjpRVR+TSU= +cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw= +cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0ZeosJ0Rtdos= +cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk= +cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs= +cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0= +cloud.google.com/go/storage v1.14.0/go.mod h1:GrKmX003DSIwi9o29oFT7YDnHYwZoctc3fOKtUw0Xmo= cloud.google.com/go/storage v1.35.1 h1:B59ahL//eDfx2IIKFBeT5Atm9wnNmj3+8xG/W4WB//w= cloud.google.com/go/storage v1.35.1/go.mod h1:M6M/3V/D3KpzMTJyPOR/HU6n2Si5QdaXYEsng2xgOs8= +dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= github.com/Azure/azure-sdk-for-go/sdk/azcore v1.7.0 h1:8q4SaHjFsClSvuVne0ID/5Ka8u3fcIHyqkLjcFpNRHQ= github.com/Azure/azure-sdk-for-go/sdk/azcore v1.7.0/go.mod h1:bjGvMhVMb+EEm3VRNQawDMUyMMjo+S5ewNjflkep/0Q= github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.3.0 h1:vcYCAze6p19qBW7MhZybIsqD8sMV8js0NyQM8JDnVtg= @@ -28,16 +65,13 @@ github.com/BurntSushi/locker v0.0.0-20171006230638-a6e239ea1c69 h1:+tu3HOoMXB7RX github.com/BurntSushi/locker v0.0.0-20171006230638-a6e239ea1c69/go.mod h1:L1AbZdiDllfyYH5l5OkAaZtk7VkWe89bPJFmnDBNHxg= github.com/BurntSushi/toml v0.3.1 h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= -github.com/PuerkitoBio/purell v1.1.1 h1:WEQqlqaGbrPkxLJWfBwQmfEAE1Z7ONdDLqrN38tNFfI= -github.com/PuerkitoBio/purell v1.1.1/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0= -github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578 h1:d+Bc7a5rLufV/sSk/8dngufqelfh6jnri85riMAaF/M= -github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE= +github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= github.com/alecthomas/assert/v2 v2.2.1 h1:XivOgYcduV98QCahG8T5XTezV5bylXe+lBxLG2K2ink= github.com/alecthomas/chroma/v2 v2.12.0 h1:Wh8qLEgMMsN7mgyG8/qIpegky2Hvzr4By6gEF7cmWgw= github.com/alecthomas/chroma/v2 v2.12.0/go.mod h1:4TQu7gdfuPjSh76j78ietmqh9LiurGF0EpseFXdKMBw= github.com/alecthomas/repr v0.2.0 h1:HAzS41CIzNW5syS8Mf9UwXhNH1J9aix/BvDRf1Ml2Yk= -github.com/armon/go-radix v1.0.0 h1:F4z6KzEeeQIMeLFa97iZU6vupzoecKdU5TX24SNppXI= -github.com/armon/go-radix v1.0.0/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= +github.com/armon/go-radix v1.0.1-0.20221118154546-54df44f2176c h1:651/eoCRnQ7YtSjAnSzRucrJz+3iGEFt+ysraELS81M= +github.com/armon/go-radix v1.0.1-0.20221118154546-54df44f2176c/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= github.com/aws/aws-sdk-go v1.48.6 h1:hnL/TE3eRigirDLrdRE9AWE1ALZSVLAsC4wK8TGsMqk= github.com/aws/aws-sdk-go v1.48.6/go.mod h1:LF8svs817+Nz+DmiMQKTO3ubZ/6IaTpq3TjupRn3Eqk= github.com/aws/aws-sdk-go-v2 v1.20.0 h1:INUDpYLt4oiPOJl0XwZDK2OVAVf0Rzo+MGVTv9f+gy8= @@ -96,14 +130,14 @@ github.com/bep/gowebp v0.3.0 h1:MhmMrcf88pUY7/PsEhMgEP0T6fDUnRTMpN8OclDrbrY= github.com/bep/gowebp v0.3.0/go.mod h1:ZhFodwdiFp8ehGJpF4LdPl6unxZm9lLFjxD3z2h2AgI= github.com/bep/helpers v0.4.0 h1:ab9veaAiWY4ST48Oxp5usaqivDmYdB744fz+tcZ3Ifs= github.com/bep/helpers v0.4.0/go.mod h1:/QpHdmcPagDw7+RjkLFCvnlUc8lQ5kg4KDrEkb2Yyco= -github.com/bep/lazycache v0.2.0 h1:HKrlZTrDxHIrNKqmnurH42ryxkngCMYLfBpyu40VcwY= -github.com/bep/lazycache v0.2.0/go.mod h1:xUIsoRD824Vx0Q/n57+ZO7kmbEhMBOnTjM/iPixNGbg= +github.com/bep/lazycache v0.4.0 h1:X8yVyWNVupPd4e1jV7efi3zb7ZV/qcjKQgIQ5aPbkYI= +github.com/bep/lazycache v0.4.0/go.mod h1:NmRm7Dexh3pmR1EignYR8PjO2cWybFQ68+QgY3VMCSc= github.com/bep/logg v0.4.0 h1:luAo5mO4ZkhA5M1iDVDqDqnBBnlHjmtZF6VAyTp+nCQ= github.com/bep/logg v0.4.0/go.mod h1:Ccp9yP3wbR1mm++Kpxet91hAZBEQgmWgFgnXX3GkIV0= github.com/bep/mclib v1.20400.20402 h1:olpCE2WSPpOAbFE1R4hnftSEmQ34+xzy2HRzd0m69rA= github.com/bep/mclib v1.20400.20402/go.mod h1:pkrk9Kyfqg34Uj6XlDq9tdEFJBiL1FvCoCgVKRzw1EY= -github.com/bep/overlayfs v0.6.0 h1:sgLcq/qtIzbaQNl2TldGXOkHvqeZB025sPvHOQL+DYo= -github.com/bep/overlayfs v0.6.0/go.mod h1:NFjSmn3kCqG7KX2Lmz8qT8VhPPCwZap3UNogXawoQHM= +github.com/bep/overlayfs v0.9.1 h1:SL54SV8A3zRkmQ+83Jj4TLE88jadHd5d1L4NpfmqJJs= +github.com/bep/overlayfs v0.9.1/go.mod h1:aYY9W7aXQsGcA7V9x/pzeR8LjEgIxbtisZm8Q7zPz40= github.com/bep/simplecobra v0.4.0 h1:ufX/6WcOtEVJdCd7hsztTWURlZkOaWYOD+zCqrM8qUE= github.com/bep/simplecobra v0.4.0/go.mod h1:evSM6iQqRwqpV7W4H4DlYFfe9mZ0x6Hj5GEOnIV7dI4= github.com/bep/tmc v0.5.1 h1:CsQnSC6MsomH64gw0cT5f+EwQDcvZz4AazKunFwTpuI= @@ -111,6 +145,9 @@ github.com/bep/tmc v0.5.1/go.mod h1:tGYHN8fS85aJPhDLgXETVKp+PR382OvFi2+q2GkGsq0= github.com/bep/workers v1.0.0 h1:U+H8YmEaBCEaFZBst7GcRVEoqeRC9dzH2dWOwGmOchg= github.com/bep/workers v1.0.0/go.mod h1:7kIESOB86HfR2379pwoMWNy8B50D7r99fRLUyPSNyCs= github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= +github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= +github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= +github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= github.com/clbanning/mxj/v2 v2.7.0 h1:WA/La7UGCanFe5NpHF0Q3DNtnCsVoxbPKuyBNHWRyME= github.com/clbanning/mxj/v2 v2.7.0/go.mod h1:hNiWqW14h+kc+MdF9C6/YoRfjEJoR3ou6tn/Qo+ve2s= github.com/cli/safeexec v1.0.0/go.mod h1:Z/D4tTN8Vs5gXYHDCbaM1S/anmEDnJb1iW0+EJ5zx3Q= @@ -118,6 +155,8 @@ github.com/cli/safeexec v1.0.1 h1:e/C79PbXF4yYTN/wauC4tviMxEV13BwljGj0N9j+N00= github.com/cli/safeexec v1.0.1/go.mod h1:Z/D4tTN8Vs5gXYHDCbaM1S/anmEDnJb1iW0+EJ5zx3Q= github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= +github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= +github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= github.com/cpuguy83/go-md2man/v2 v2.0.3 h1:qMCsGGgs+MAzDFyp9LpAe1Lqy/fY/qCovCm0qnXZOBM= github.com/cpuguy83/go-md2man/v2 v2.0.3/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= @@ -135,6 +174,8 @@ github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+m github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= +github.com/envoyproxy/go-control-plane v0.9.7/go.mod h1:cwu0lG7PUMfa9snN8LXBig5ynNVH9qI8YYLbd1fK2po= +github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= github.com/evanw/esbuild v0.19.12 h1:p5WGo4o6TCN+kt+uZtYSGS3ZHPa+iIZ0SX+ys8UnP10= github.com/evanw/esbuild v0.19.12/go.mod h1:D2vIQZqV/vIf/VRHtViaUtViZmG7o+kKmlBfVQuRi48= @@ -154,6 +195,9 @@ github.com/getkin/kin-openapi v0.122.0 h1:WB9Jbl0Hp/T79/JF9xlSW5Kl9uYdk/AWD0yAd9 github.com/getkin/kin-openapi v0.122.0/go.mod h1:PCWw/lfBrJY4HcdqE3jj+QFkaFK8ABoqo7PvqVhXXqw= github.com/ghodss/yaml v1.0.0 h1:wQHKEahhL6wmXdzwWG11gIVCkOv05bNOh+Rxn0yngAk= github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= +github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= +github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= +github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/go-openapi/jsonpointer v0.19.6 h1:eCs3fxoIi3Wh6vtgmLTOjdhSpiqphQ+DaPn38N2ZdrE= github.com/go-openapi/jsonpointer v0.19.6/go.mod h1:osyAmYz/mB/C3I+WsTTSgw1ONzaLJoLCyoi6/zppojs= github.com/go-openapi/swag v0.22.3/go.mod h1:UzaqsxGiab7freDnrUUra0MwWfN/q7tE4j+VcZ0yl14= @@ -177,29 +221,47 @@ github.com/gohugoio/testmodBuilder/mods v0.0.0-20190520184928-c56af20f2e95/go.mo github.com/golang-jwt/jwt/v4 v4.5.0 h1:7cYmW1XlMY7h7ii7UhUyChSgS5wUJEnm9uZVTGqOWzg= github.com/golang-jwt/jwt/v4 v4.5.0/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0= github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= +github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da h1:oI5xCqsCo564l8iNU+DwB5epxmsaqB+rhGL0m5jtYqE= github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= +github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= +github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y= +github.com/golang/mock v1.4.0/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.1/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.3/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.4/go.mod h1:l3mdAwkq5BuhzHwde/uurv3sEJeZMXNpwsxVWU71h+4= github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= +github.com/golang/protobuf v1.3.4/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= +github.com/golang/protobuf v1.3.5/go.mod h1:6O5/vntMXwX2lRkT1hjjk0nAC1IDOTvTlVgjlRvqsdk= github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs= github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w= github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0= github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8= +github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= github.com/golang/protobuf v1.5.3 h1:KhyjKVUg7Usr/dYsdSqoFveMYd5ko72D+zANwlG1mmg= github.com/golang/protobuf v1.5.3/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= +github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= +github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.4.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.7/go.mod h1:n+brtR0CgQNWTVd5ZUFpTBC8YFBDLK/h/bpaJ8/DtOE= github.com/google/go-cmp v0.5.8/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= @@ -208,7 +270,22 @@ github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/go-replayers/grpcreplay v1.1.0 h1:S5+I3zYyZ+GQz68OfbURDdt/+cSMqCK1wrvNx7WBzTE= github.com/google/go-replayers/httpreplay v1.2.0 h1:VM1wEyyjaoU53BwrOnaf9VhAyQQEEioJvFYxYcLRKzk= +github.com/google/martian v2.1.0+incompatible h1:/CP5g8u/VJHijgedC/Legn3BAbAaWPgecwXBIDzw5no= +github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= +github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= +github.com/google/martian/v3 v3.1.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= github.com/google/martian/v3 v3.3.2 h1:IqNFLAmvJOgVlpdEBiQbDc2EwKW77amAycfTuWKdfvw= +github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= +github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= +github.com/google/pprof v0.0.0-20191218002539-d4f498aebedc/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200212024743-f11f1df84d12/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200229191704-1ebb73c60ed3/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200430221834-fc25d7d30c6d/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200708004538-1a94d8640e99/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20201023163331-3e6fc7fc9c4c/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20201203190320-1bf35d6f28c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20201218002935-b9804c9f04c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= github.com/google/s2a-go v0.1.7 h1:60BLSyTrOV4/haCDW4zb1guZItoSq8foHCXrAnjBo/o= github.com/google/s2a-go v0.1.7/go.mod h1:50CgR4k1jNlWBu4UfS4AcfhVe1r6pdZPygJ3R8F0Qdw= github.com/google/subcommands v1.0.1/go.mod h1:ZjhPrFU+Olkh9WazFPsl27BQ4UPiG37m3yTrtFlrHVk= @@ -219,15 +296,22 @@ github.com/google/wire v0.5.0 h1:I7ELFeVBr3yfPIcc8+MWvrjk+3VjbcSzoXm3JVa+jD8= github.com/google/wire v0.5.0/go.mod h1:ngWDr9Qvq3yZA10YrxfyGELY/AFWGVpy9c1LTRi1EoU= github.com/googleapis/enterprise-certificate-proxy v0.3.2 h1:Vie5ybvEvT75RniqhfFxPRy3Bf7vr3h0cechB90XaQs= github.com/googleapis/enterprise-certificate-proxy v0.3.2/go.mod h1:VLSiSSBs/ksPL8kq3OBOQ6WRI2QnaFynd1DCjZ62+V0= +github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= +github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= github.com/googleapis/gax-go/v2 v2.12.0 h1:A+gCJKdRfqXkr+BIRGtZLibNXf0m1f9E4HG56etFpas= github.com/googleapis/gax-go/v2 v2.12.0/go.mod h1:y+aIqrI5eb1YGMVJfuV3185Ts/D7qKpsEkdD5+I6QGU= +github.com/googleapis/google-cloud-go-testing v0.0.0-20200911160855-bcd43fbb19e8/go.mod h1:dvDLG8qkwmyD9a/MJJN3XJcT3xFxOKAvTZGvuZmac9g= github.com/gorilla/websocket v1.5.1 h1:gmztn0JnHVt9JZquRuzLw3g4wouNVzKL15iLr/zn/QY= github.com/gorilla/websocket v1.5.1/go.mod h1:x3kM2JMyaluk02fnUJpQuwD2dCS5NDG2ZHL0uE0tcaY= github.com/hairyhenderson/go-codeowners v0.4.0 h1:Wx/tRXb07sCyHeC8mXfio710Iu35uAy5KYiBdLHdv4Q= github.com/hairyhenderson/go-codeowners v0.4.0/go.mod h1:iJgZeCt+W/GzXo5uchFCqvVHZY2T4TAIpvuVlKVkLxc= -github.com/hashicorp/golang-lru/v2 v2.0.1 h1:5pv5N1lT1fjLg2VQ5KWc7kmucp2x/kvFOnxuVTqZ6x4= -github.com/hashicorp/golang-lru/v2 v2.0.1/go.mod h1:QeFd9opnmA6QUJc5vARoKUSoFhyfM2/ZepoAG6RGpeM= +github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= +github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= +github.com/hashicorp/golang-lru/v2 v2.0.7 h1:a+bsQ5rvGLjzHuww6tVxozPZFVghXaHOwFs4luLUK2k= +github.com/hashicorp/golang-lru/v2 v2.0.7/go.mod h1:QeFd9opnmA6QUJc5vARoKUSoFhyfM2/ZepoAG6RGpeM= github.com/hexops/gotextdiff v1.0.3 h1:gitA9+qJrrTCsiCl7+kh75nPqQt1cx4ZkudSTLoUqJM= +github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= +github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= github.com/invopop/yaml v0.2.0 h1:7zky/qH+O0DwAyoobXUqvVBwgBFRxKoQ/3FjcVpjTMY= @@ -241,6 +325,10 @@ github.com/jmespath/go-jmespath/internal/testify v1.5.1 h1:shLQSRRSCCPj3f2gpwzGw github.com/jmespath/go-jmespath/internal/testify v1.5.1/go.mod h1:L3OGu8Wl2/fWfCI6z80xFu9LTZmf1ZRjMHUOPmWr69U= github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY= github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y= +github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= +github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= +github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= +github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= github.com/kr/pretty v0.3.0/go.mod h1:640gp4NfQd8pI5XOwp5fnNeVWj67G7CFk/SaSQn7NBk= @@ -283,6 +371,8 @@ github.com/niklasfasching/go-org v1.7.0 h1:vyMdcMWWTe/XmANk19F4k8XGBYg0GQ/gJGMim github.com/niklasfasching/go-org v1.7.0/go.mod h1:WuVm4d45oePiE0eX25GqTDQIt/qPW1T9DGkRscqLW5o= github.com/olekukonko/tablewriter v0.0.5 h1:P2Ga83D34wi1o9J6Wh1mRuqd4mF/x/lgBS7N7AbDhec= github.com/olekukonko/tablewriter v0.0.5/go.mod h1:hPp6KlRPjbx+hW8ykQs1w3UBbZlj6HuIJcUGPhkA7kY= +github.com/pbnjay/memory v0.0.0-20210728143218-7b4eea64cf58 h1:onHthvaw9LFnH4t2DcNVpwGmV9E1BkGknEliJkfwQj0= +github.com/pbnjay/memory v0.0.0-20210728143218-7b4eea64cf58/go.mod h1:DXv8WO4yhMYhSNPKjeNKa5WY9YCIEBRbNzFFPJbWO6Y= github.com/pelletier/go-toml/v2 v2.1.1 h1:LWAJwfNvjQZCFIDKWYQaM62NcYeYViCmWIwmOStowAI= github.com/pelletier/go-toml/v2 v2.1.1/go.mod h1:tJU2Z3ZkXwnxa4DPO899bsyIoywizdUvyaeZurnPPDc= github.com/perimeterx/marshmallow v1.1.5 h1:a2LALqQ1BlHM8PZblsDdidgv1mWi1DgC2UmX50IvK2s= @@ -292,11 +382,13 @@ github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8/go.mod h1:HKlIX3XHQyzL github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA= github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/sftp v1.13.1/go.mod h1:3HaPG6Dq1ILlpPZRO0HVMrsydcdLt6HRDccSgb87qRg= github.com/pmezard/go-difflib v0.0.0-20151028094244-d8ed2627bdf0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/prashantv/gostub v1.1.0 h1:BTyx3RfQjRHnUWaGF9oQos79AlQ5k8WNktv7VGvVH4g= github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc= github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs= github.com/rogpeppe/go-internal v1.12.0 h1:exVL4IDcn6na9z1rAb56Vxr+CgyK3nn3O+epU5NdKM8= @@ -308,21 +400,25 @@ github.com/rwcarlsen/goexif v0.0.0-20190401172101-9e8deecbddbd/go.mod h1:hPqNNc0 github.com/sanity-io/litter v1.5.5 h1:iE+sBxPBzoK6uaEP5Lt3fHNgpKcHXc/A2HGETy0uJQo= github.com/sanity-io/litter v1.5.5/go.mod h1:9gzJgR2i4ZpjZHsKvUXIRQVk7P+yM3e+jAF7bU2UI5U= github.com/shogo82148/go-shuffle v0.0.0-20180218125048-27e6095f230d/go.mod h1:2htx6lmL0NGLHlO8ZCf+lQBGBHIbEujyywxJArf+2Yc= +github.com/spf13/afero v1.9.2/go.mod h1:iUV7ddyEEZPO5gA3zD4fJt6iStLlL+Lg4m2cihcDf8Y= github.com/spf13/afero v1.11.0 h1:WJQKhtpdm3v2IzqG8VMqrr6Rf3UYpEF239Jy9wNepM8= github.com/spf13/afero v1.11.0/go.mod h1:GH9Y3pIexgf1MTIWtNGyogA5MwRIDXGUr+hbWNoBjkY= github.com/spf13/cast v1.6.0 h1:GEiTHELF+vaR5dhz3VqZfFSzZjYbgeKDpBxQVS4GYJ0= github.com/spf13/cast v1.6.0/go.mod h1:ancEpBxwJDODSW/UG4rDrAqiKolqNNh2DX3mk86cAdo= github.com/spf13/cobra v1.8.0 h1:7aJaZx1B85qltLMc546zn58BxxfZdR/W22ej9CFoEf0= github.com/spf13/cobra v1.8.0/go.mod h1:WXLWApfZ71AjXPya3WOlMsY9yMs7YeiHhFVlvLyhcho= -github.com/spf13/fsync v0.9.0 h1:f9CEt3DOB2mnHxZaftmEOFWjABEvKM/xpf3cUwJrGOY= -github.com/spf13/fsync v0.9.0/go.mod h1:fNtJEfG3HiltN3y4cPOz6MLjos9+2pIEqLIgszqhp/0= +github.com/spf13/fsync v0.10.0 h1:j+zUMN41zWj3sEqueD4mAsPDQwyOvMeJCcrawdmbqXk= +github.com/spf13/fsync v0.10.0/go.mod h1:y+B41vYq5i6Boa3Z+BVoPbDeOvxVkNU5OBXhoT8i4TQ= github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= github.com/stretchr/testify v0.0.0-20161117074351-18a02ba4a312/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= +github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= +github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= @@ -335,66 +431,174 @@ github.com/tdewolff/parse/v2 v2.7.8/go.mod h1:3FbJWZp3XT9OWVN3Hmfp0p/a08v4h8J9W1 github.com/tdewolff/test v1.0.11-0.20231101010635-f1265d231d52/go.mod h1:6DAvZliBAAnD7rhVgwaM7DE5/d9NMOAJ09SqYqeK4QE= github.com/tdewolff/test v1.0.11-0.20240106005702-7de5f7df4739 h1:IkjBCtQOOjIn03u/dMQK9g+Iw9ewps4mCl1nB8Sscbo= github.com/ugorji/go/codec v1.2.7 h1:YPXUKf7fYbp/y8xloBqZOw2qaVggbfwMlI8WM3wZUJ0= +github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= github.com/yuin/goldmark v1.3.7/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= github.com/yuin/goldmark v1.6.0 h1:boZcn2GTjpsynOsC0iJHnBWa4Bi0qzfJjthwauItG68= github.com/yuin/goldmark v1.6.0/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= github.com/yuin/goldmark-emoji v1.0.2 h1:c/RgTShNgHTtc6xdz2KKI74jJr6rWi7FPgnP9GAsO5s= github.com/yuin/goldmark-emoji v1.0.2/go.mod h1:RhP/RWpexdp+KHs7ghKnifRoIs/Bq4nDS7tRbCkOwKY= +go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= +go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= +go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk= go.opencensus.io v0.24.0 h1:y73uSU6J157QMP2kn2r30vwW1A2W2WFwSCGnAVxeaD0= go.opencensus.io v0.24.0/go.mod h1:vNK8G9p7aAivkbmorf4v+7Hgx+Zs0yY+0fOtgBfjQKo= -go.uber.org/atomic v1.11.0 h1:ZvwS0R+56ePWxUNi+Atn9dWONBPp/AUETXlHW0DxSjE= -go.uber.org/atomic v1.11.0/go.mod h1:LUxbIzbOniOlMKjJjyPfpl4v+PKK2cNJn91OQbhoJI0= go.uber.org/automaxprocs v1.5.3 h1:kWazyxZUrS3Gs4qUpbwo5kEIMGe/DAvi5Z4tl2NW4j8= go.uber.org/automaxprocs v1.5.3/go.mod h1:eRbA25aqJrxAbsLO0xy5jVwPt7FQnRgjW+efnwa1WM0= gocloud.dev v0.34.0 h1:LzlQY+4l2cMtuNfwT2ht4+fiXwWf/NmPTnXUlLmGif4= gocloud.dev v0.34.0/go.mod h1:psKOachbnvY3DAOPbsFVmLIErwsbWPUG2H5i65D38vE= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20210421170649-83a5a9bb288b/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= +golang.org/x/crypto v0.0.0-20211108221036-ceb1ce70b4fa/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.0.0-20220331220935-ae2d96664a29/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= golang.org/x/crypto v0.18.0 h1:PGVlW0xEltQnzFZ55hkuX5+KLyrMYhHld1YHO4AKcdc= golang.org/x/crypto v0.18.0/go.mod h1:R0j02AL6hcrfOiy9T4ZYp/rcWeMxM3L6QYxlOuEG1mg= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= +golang.org/x/exp v0.0.0-20190829153037-c13cbed26979/go.mod h1:86+5VVa7VpoJ4kLfm080zCjGlMRFzhUhsZKEZO7MGek= +golang.org/x/exp v0.0.0-20191030013958-a1ab85dbe136/go.mod h1:JXzH8nQsPlswgeRAPE3MuO9GYsAcnJvJ4vnMwN/5qkY= +golang.org/x/exp v0.0.0-20191129062945-2f5052295587/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= +golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= +golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= +golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM= +golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU= golang.org/x/exp v0.0.0-20221031165847-c99f073a8326 h1:QfTh0HpN6hlw6D3vu8DAwC8pBIwikq0AI1evdm+FksE= golang.org/x/exp v0.0.0-20221031165847-c99f073a8326/go.mod h1:CxIveKay+FTh1D0yPZemJVgC/95VzuuOLq5Qi4xnoYc= +golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= +golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= golang.org/x/image v0.0.0-20210220032944-ac19c3e999fb/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= golang.org/x/image v0.14.0 h1:tNgSxAFe3jC4uYqvZdTr84SZoM1KfwdC9SKIFrLjFn4= golang.org/x/image v0.14.0/go.mod h1:HUYqC05R2ZcZ3ejNQsIHQDQiwWM4JBqmm6MKANTp4LE= golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= +golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190409202823-959b441ac422/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190909230951-414d861bb4ac/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f/go.mod h1:5qLYkcX4OjUUV8bRuDixDT3tpyyb+LUpUlRWLxfhWrs= +golang.org/x/lint v0.0.0-20200130185559-910be7a94367/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/lint v0.0.0-20200302205851-738671d3881b/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/lint v0.0.0-20201208152925-83fdc39ff7b5/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE= +golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o= +golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= +golang.org/x/mod v0.1.0/go.mod h1:0QHyrYULN0/3qlju5TqG8bIK38QM8yzMo5ekMj3DlcY= +golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= +golang.org/x/mod v0.1.1-0.20191107180719-034126e5016b/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= +golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.14.0 h1:dGoOF9QVLYng8IHTm7BAyWqCqSheQ5pYWGhzW00YJr0= golang.org/x/mod v0.14.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190628185345-da137c7871d7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200114155413-6afb5195e5aa/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200222125558-5a598a2470a0/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200501053045-e0ff5e5a1de5/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200506145744-7e3656a0809f/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200513185701-a91f0712d120/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200520182314-0ba52f642ac2/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20201031054903-ff519b6c9102/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20201209123823-ac852fbbde11/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20201224014010-6772e930b67b/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.20.0 h1:aCL9BSgETF1k+blQaYUBx9hJ9LOGP3gAVemcZlf1Kpo= golang.org/x/net v0.20.0/go.mod h1:z8BVo6PvndSri0LbOE3hAn0apkU+1YvI6E70E9jsnvY= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= +golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20200902213428-5d25da1a8d43/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20201109201403-9fd604954f58/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20201208152858-08078c50e5b5/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210218202405-ba52d332ba99/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.15.0 h1:s8pnnxNVzjWyrvYdFUQq5llS1PX2zhPXmccZv99h7uQ= golang.org/x/oauth2 v0.15.0/go.mod h1:q48ptWNTY5XWf+JNten23lcvHpLJ0ZSxF5ttTHKVCAM= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.6.0 h1:5BMeUDZ7vkXGfEr1x9B4bRcTH4lpkTkpdh0T/J+qjbQ= golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200113162924-86b910548bc1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200331124033-c3d80250170d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200501052902-10377860bb8e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200511232937-7e40ca221e25/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200515095857-1151b9dac4a9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200905004654-be1d3432aa8f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201201145000-ef89a241ccb3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210104204734-6f8348627aad/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210225134936-a50acf3fe073/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210423185535-09eb48e85fd7/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210616045830-e2b7044e8c71/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= @@ -404,21 +608,68 @@ golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.16.0 h1:xWw16ngr6ZMtmxDyKyIgsE93KNKz5HKmMa3b8ALHidU= golang.org/x/sys v0.16.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= +golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ= golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= +golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.5.0 h1:o7cqy6amK/52YcAKIPlM3a+Fpj35zvRj2TP+e1xFSfk= golang.org/x/time v0.5.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20190422233926-fe54fb35175b/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191125144606-a911d9008d1f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191216173652-a0e659d51361/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20191227053925-7b8e75db28f4/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200117161641-43d50277825c/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200122220014-bf1340f18c4a/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200204074204-1cc6d1ef6c74/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200207183749-b753a1ba74fa/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200212150539-ea181f53ac56/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200224181240-023911ca70b2/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200227222343-706bc42d1f0d/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200304193943-95d2e580d8eb/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= +golang.org/x/tools v0.0.0-20200312045724-11d5b4c81c7d/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= +golang.org/x/tools v0.0.0-20200331025713-a30bf2db82d4/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8= +golang.org/x/tools v0.0.0-20200501065659-ab2804fb9c9d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200512131952-2bc93b1c0c88/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200515010526-7d3b6ebf133d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200618134242-20370b0cb4b2/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200729194436-6467de6f59a7/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200904185747-39188db58858/go.mod h1:Cj7w3i3Rnn0Xh82ur9kSqwfTHTeVxaDqrfMjpcNT6bE= +golang.org/x/tools v0.0.0-20201110124207-079ba7bd75cd/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20201201161351-ac6f37ff4c2a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20201208233053-a543418bbed2/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20210105154028-b0ab187a4818/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20210108195828-e2f9c7f1fc8e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0= golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.17.0 h1:FvmRgNOcs3kOa+T20R1uhfP9F6HgG2mfxDv1vrx1Htc= golang.org/x/tools v0.17.0/go.mod h1:xsh6VxdV005rRVaS6SSAf9oiAqljS7UZUacMZ8Bnsps= @@ -428,15 +679,71 @@ golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8T golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20220907171357-04be3eba64a2 h1:H2TDz8ibqkAF6YGhCdN3jS9O0/s90v0rJh3X/OLHEUk= golang.org/x/xerrors v0.0.0-20220907171357-04be3eba64a2/go.mod h1:K8+ghG5WaK9qNqU5K3HdILfMLy1f3aNYFI/wnl100a8= +google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= +google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M= +google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= +google.golang.org/api v0.9.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= +google.golang.org/api v0.13.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.14.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.15.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.17.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.18.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.19.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.20.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.22.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.24.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= +google.golang.org/api v0.28.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= +google.golang.org/api v0.29.0/go.mod h1:Lcubydp8VUV7KeIHD9z2Bys/sm/vGKnG1UHuDBSrHWM= +google.golang.org/api v0.30.0/go.mod h1:QGmEvQ87FHZNiUVJkT14jQNYJ4ZJjdRF23ZXz5138Fc= +google.golang.org/api v0.35.0/go.mod h1:/XrVsuzM0rZmrsbjJutiuftIzeuTQcEeaYcSk/mQ1dg= +google.golang.org/api v0.36.0/go.mod h1:+z5ficQTmoYpPn8LCUNVpK5I7hwkpjbcgqA7I34qYtE= +google.golang.org/api v0.40.0/go.mod h1:fYKFpnQN0DsDSKRVRcQSDQNtqWPfM9i+zNPxepjRCQ8= google.golang.org/api v0.152.0 h1:t0r1vPnfMc260S2Ci+en7kfCZaLOPs5KI0sVV/6jZrY= google.golang.org/api v0.152.0/go.mod h1:3qNJX5eOmhiWYc67jRA/3GsDw97UFb5ivv7Y2PrriAY= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0= +google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= google.golang.org/appengine v1.6.7 h1:FZR1q0exgwxzPzp/aF+VccGrSfxfPpkBqjIIEq3ru6c= google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= +google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190502173448-54afdca5d873/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190801165951-fa694d86fc64/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= +google.golang.org/genproto v0.0.0-20190911173649-1774047e7e51/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8= +google.golang.org/genproto v0.0.0-20191108220845-16a3f7862a1a/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191115194625-c23dd37a84c9/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191216164720-4f79533eabd1/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191230161307-f3c370f40bfb/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200115191322-ca5a22157cba/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200122232147-0452cf42e150/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200204135345-fa8e72b47b90/go.mod h1:GmwEX6Z4W5gMy59cAlVYjN9JhxgbQH6Gn+gFDQe2lzA= +google.golang.org/genproto v0.0.0-20200212174721-66ed5ce911ce/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200224152610-e50cd9704f63/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200228133532-8c2c7df3a383/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200305110556-506484158171/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200312145019-da6875a35672/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200331122359-1ee6d9798940/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200430143042-b979b6f78d84/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200511104702-f5ebc3bea380/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200515170657-fc4c6c6a6587/go.mod h1:YsZOwe1myG/8QRHRsmBRE1LrgQY60beZKjly0O1fX9U= google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= +google.golang.org/genproto v0.0.0-20200618031413-b414f8b61790/go.mod h1:jDfRM7FcilCzHH/e9qn6dsT145K34l5v+OpcnNgKAAA= +google.golang.org/genproto v0.0.0-20200729003335-053ba62fc06f/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200804131852-c06518451d9c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200825200019-8632dd797987/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200904004341-0bd0a958aa1d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201109203340-2640f1f9cdfb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201201144952-b05cb90ed32e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201210142538-e3217bee35cc/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201214200347-8c77b98c765d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210108203827-ffc7fda8c3d7/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210226172003-ab064af71705/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20231106174013-bbf56f31fb17 h1:wpZ8pe2x1Q3f2KyT5f8oP/fa9rHAKgFPr/HZdNuS+PQ= google.golang.org/genproto v0.0.0-20231106174013-bbf56f31fb17/go.mod h1:J7XzRzVy1+IPwWHZUzoD0IccYZIrXILAQpc+Qy9CMhY= google.golang.org/genproto/googleapis/api v0.0.0-20231106174013-bbf56f31fb17 h1:JpwMPBpFN3uKhdaekDpiNlImDdkUAyiJ6ez/uxGaUSo= @@ -444,10 +751,21 @@ google.golang.org/genproto/googleapis/api v0.0.0-20231106174013-bbf56f31fb17/go. google.golang.org/genproto/googleapis/rpc v0.0.0-20231120223509-83a465c0220f h1:ultW7fxlIvee4HYrtnaRPon9HpEgFk5zYpmfMgtKB5I= google.golang.org/genproto/googleapis/rpc v0.0.0-20231120223509-83a465c0220f/go.mod h1:L9KNLi232K1/xB6f7AlSX692koaRnKaWSR0stBki0Yc= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= +google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= +google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= +google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.27.1/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.28.0/go.mod h1:rpkK4SK4GF4Ach/+MFLZUBavHOvF2JJB5uozKKal+60= +google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk= +google.golang.org/grpc v1.30.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/grpc v1.31.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/grpc v1.31.1/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc= +google.golang.org/grpc v1.34.0/go.mod h1:WotjhfgOW/POjDeRt8vscBtXq+2VjORFy659qA51WJ8= +google.golang.org/grpc v1.35.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= google.golang.org/grpc v1.59.0 h1:Z5Iec2pjwb+LEOqzpB2MR12/eKFhDPhuqW91O+4bwUk= google.golang.org/grpc v1.59.0/go.mod h1:aUPDwccQo6OTjy7Hct4AfBPD1GptF4fyUjIkQ9YtF98= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= @@ -458,6 +776,7 @@ google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzi google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGjtUeSXeh4= google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= @@ -480,8 +799,16 @@ gopkg.in/yaml.v3 v3.0.0/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= +honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= +honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= howett.net/plist v1.0.0 h1:7CrbWYbPPO/PyNy38b2EB/+gYbjCe2DXBxgtOOZbSQM= howett.net/plist v1.0.0/go.mod h1:lqaXoTrLY4hg8tnEzNru53gicrbv7rrk+2xJA/7hw9g= +rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= +rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0= +rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA= software.sslmate.com/src/go-pkcs12 v0.2.0 h1:nlFkj7bTysH6VkC4fGphtjXRbezREPgrHuJG20hBGPE= software.sslmate.com/src/go-pkcs12 v0.2.0/go.mod h1:23rNcYsMabIc1otwLpTkCCPwUq6kQsTyowttG/as0kQ= diff --git a/helpers/content.go b/helpers/content.go index a3abb334d..889294382 100644 --- a/helpers/content.go +++ b/helpers/content.go @@ -30,7 +30,6 @@ import ( "github.com/spf13/afero" "github.com/gohugoio/hugo/markup/converter" - "github.com/gohugoio/hugo/markup/converter/hooks" "github.com/gohugoio/hugo/markup" @@ -38,19 +37,15 @@ import ( ) var ( - openingPTag = []byte("

") - closingPTag = []byte("

") - paragraphIndicator = []byte("") + closingPTag = []byte("

") ) // ContentSpec provides functionality to render markdown content. type ContentSpec struct { Converters markup.ConverterProvider anchorNameSanitizer converter.AnchorNameSanitizer - getRenderer func(t hooks.RendererType, id any) any - - Cfg config.AllProvider + Cfg config.AllProvider } // NewContentSpec returns a ContentSpec initialized diff --git a/helpers/content_test.go b/helpers/content_test.go index 72e3eeb49..e2bf501d2 100644 --- a/helpers/content_test.go +++ b/helpers/content_test.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -24,8 +24,6 @@ import ( "github.com/gohugoio/hugo/helpers" ) -const tstHTMLContent = "

This is some text.
And some more.

" - func TestTrimShortHTML(t *testing.T) { tests := []struct { input, output []byte @@ -68,7 +66,6 @@ func BenchmarkTestTruncateWordsToWholeSentence(b *testing.B) { } func TestTruncateWordsToWholeSentence(t *testing.T) { - type test struct { input, expected string max int @@ -101,7 +98,6 @@ func TestTruncateWordsToWholeSentence(t *testing.T) { } func TestTruncateWordsByRune(t *testing.T) { - type test struct { input, expected string max int diff --git a/helpers/general.go b/helpers/general.go index b16aec0b0..35e35a7e0 100644 --- a/helpers/general.go +++ b/helpers/general.go @@ -196,6 +196,7 @@ func ReaderContains(r io.Reader, subslice []byte) bool { func GetTitleFunc(style string) func(s string) string { switch strings.ToLower(style) { case "go": + //lint:ignore SA1019 keep for now. return strings.Title case "chicago": tc := transform.NewTitleConverter(transform.ChicagoStyle) @@ -263,10 +264,11 @@ func MD5String(f string) string { return hex.EncodeToString(h.Sum([]byte{})) } -// MD5FromFileFast creates a MD5 hash from the given file. It only reads parts of +// MD5FromReaderFast creates a MD5 hash from the given file. It only reads parts of // the file for speed, so don't use it if the files are very subtly different. // It will not close the file. -func MD5FromFileFast(r io.ReadSeeker) (string, error) { +// It will return the MD5 hash and the size of r in bytes. +func MD5FromReaderFast(r io.ReadSeeker) (string, int64, error) { const ( // Do not change once set in stone! maxChunks = 8 @@ -284,7 +286,7 @@ func MD5FromFileFast(r io.ReadSeeker) (string, error) { if err == io.EOF { break } - return "", err + return "", 0, err } } @@ -294,12 +296,14 @@ func MD5FromFileFast(r io.ReadSeeker) (string, error) { h.Write(buff) break } - return "", err + return "", 0, err } h.Write(buff) } - return hex.EncodeToString(h.Sum(nil)), nil + size, _ := r.Seek(0, io.SeekEnd) + + return hex.EncodeToString(h.Sum(nil)), size, nil } // MD5FromReader creates a MD5 hash from the given reader. @@ -328,3 +332,21 @@ func PrintFs(fs afero.Fs, path string, w io.Writer) { return nil }) } + +// FormatByteCount pretty formats b. +func FormatByteCount(bc uint64) string { + const ( + Gigabyte = 1 << 30 + Megabyte = 1 << 20 + Kilobyte = 1 << 10 + ) + switch { + case bc > Gigabyte || -bc > Gigabyte: + return fmt.Sprintf("%.2f GB", float64(bc)/Gigabyte) + case bc > Megabyte || -bc > Megabyte: + return fmt.Sprintf("%.2f MB", float64(bc)/Megabyte) + case bc > Kilobyte || -bc > Kilobyte: + return fmt.Sprintf("%.2f KB", float64(bc)/Kilobyte) + } + return fmt.Sprintf("%d B", bc) +} diff --git a/helpers/general_test.go b/helpers/general_test.go index 1463458fa..54607d699 100644 --- a/helpers/general_test.go +++ b/helpers/general_test.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -259,19 +259,19 @@ func TestUniqueStringsSorted(t *testing.T) { func TestFastMD5FromFile(t *testing.T) { fs := afero.NewMemMapFs() - if err := afero.WriteFile(fs, "small.txt", []byte("abc"), 0777); err != nil { + if err := afero.WriteFile(fs, "small.txt", []byte("abc"), 0o777); err != nil { t.Fatal(err) } - if err := afero.WriteFile(fs, "small2.txt", []byte("abd"), 0777); err != nil { + if err := afero.WriteFile(fs, "small2.txt", []byte("abd"), 0o777); err != nil { t.Fatal(err) } - if err := afero.WriteFile(fs, "bigger.txt", []byte(strings.Repeat("a bc d e", 100)), 0777); err != nil { + if err := afero.WriteFile(fs, "bigger.txt", []byte(strings.Repeat("a bc d e", 100)), 0o777); err != nil { t.Fatal(err) } - if err := afero.WriteFile(fs, "bigger2.txt", []byte(strings.Repeat("c d e f g", 100)), 0777); err != nil { + if err := afero.WriteFile(fs, "bigger2.txt", []byte(strings.Repeat("c d e f g", 100)), 0o777); err != nil { t.Fatal(err) } @@ -292,19 +292,19 @@ func TestFastMD5FromFile(t *testing.T) { defer bf1.Close() defer bf2.Close() - m1, err := helpers.MD5FromFileFast(sf1) + m1, _, err := helpers.MD5FromReaderFast(sf1) c.Assert(err, qt.IsNil) c.Assert(m1, qt.Equals, "e9c8989b64b71a88b4efb66ad05eea96") - m2, err := helpers.MD5FromFileFast(sf2) + m2, _, err := helpers.MD5FromReaderFast(sf2) c.Assert(err, qt.IsNil) c.Assert(m2, qt.Not(qt.Equals), m1) - m3, err := helpers.MD5FromFileFast(bf1) + m3, _, err := helpers.MD5FromReaderFast(bf1) c.Assert(err, qt.IsNil) c.Assert(m3, qt.Not(qt.Equals), m2) - m4, err := helpers.MD5FromFileFast(bf2) + m4, _, err := helpers.MD5FromReaderFast(bf2) c.Assert(err, qt.IsNil) c.Assert(m4, qt.Not(qt.Equals), m3) @@ -320,7 +320,7 @@ func BenchmarkMD5FromFileFast(b *testing.B) { b.Run(fmt.Sprintf("full=%t", full), func(b *testing.B) { for i := 0; i < b.N; i++ { b.StopTimer() - if err := afero.WriteFile(fs, "file.txt", []byte(strings.Repeat("1234567890", 2000)), 0777); err != nil { + if err := afero.WriteFile(fs, "file.txt", []byte(strings.Repeat("1234567890", 2000)), 0o777); err != nil { b.Fatal(err) } f, err := fs.Open("file.txt") @@ -333,7 +333,7 @@ func BenchmarkMD5FromFileFast(b *testing.B) { b.Fatal(err) } } else { - if _, err := helpers.MD5FromFileFast(f); err != nil { + if _, _, err := helpers.MD5FromReaderFast(f); err != nil { b.Fatal(err) } } @@ -350,7 +350,7 @@ func BenchmarkUniqueStrings(b *testing.B) { for i := 0; i < b.N; i++ { result := helpers.UniqueStrings(input) if len(result) != 6 { - b.Fatal(fmt.Sprintf("invalid count: %d", len(result))) + b.Fatalf("invalid count: %d", len(result)) } } }) @@ -369,7 +369,7 @@ func BenchmarkUniqueStrings(b *testing.B) { result := helpers.UniqueStringsReuse(inputc) if len(result) != 6 { - b.Fatal(fmt.Sprintf("invalid count: %d", len(result))) + b.Fatalf("invalid count: %d", len(result)) } } }) @@ -388,7 +388,7 @@ func BenchmarkUniqueStrings(b *testing.B) { result := helpers.UniqueStringsSorted(inputc) if len(result) != 6 { - b.Fatal(fmt.Sprintf("invalid count: %d", len(result))) + b.Fatalf("invalid count: %d", len(result)) } } }) diff --git a/helpers/path.go b/helpers/path.go index 3172d3452..4a6c9a688 100644 --- a/helpers/path.go +++ b/helpers/path.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -23,12 +23,12 @@ import ( "regexp" "sort" "strings" - "unicode" "github.com/gohugoio/hugo/common/herrors" "github.com/gohugoio/hugo/common/text" "github.com/gohugoio/hugo/htesting" + "github.com/gohugoio/hugo/common/paths" "github.com/gohugoio/hugo/hugofs" "github.com/gohugoio/hugo/common/hugio" @@ -41,7 +41,11 @@ import ( // whilst preserving the original casing of the string. // E.g. Social Media -> Social-Media func (p *PathSpec) MakePath(s string) string { - return p.UnicodeSanitize(s) + s = paths.Sanitize(s) + if p.Cfg.RemovePathAccents() { + s = text.RemoveAccentsString(s) + } + return s } // MakePathsSanitized applies MakePathSanitized on every item in the slice @@ -59,74 +63,13 @@ func (p *PathSpec) MakePathSanitized(s string) string { return strings.ToLower(p.MakePath(s)) } -// ToSlashTrimLeading is just a filepath.ToSlaas with an added / prefix trimmer. -func ToSlashTrimLeading(s string) string { - return strings.TrimPrefix(filepath.ToSlash(s), "/") -} - // MakeTitle converts the path given to a suitable title, trimming whitespace // and replacing hyphens with whitespace. func MakeTitle(inpath string) string { return strings.Replace(strings.TrimSpace(inpath), "-", " ", -1) } -// From https://golang.org/src/net/url/url.go -func ishex(c rune) bool { - switch { - case '0' <= c && c <= '9': - return true - case 'a' <= c && c <= 'f': - return true - case 'A' <= c && c <= 'F': - return true - } - return false -} - -// UnicodeSanitize sanitizes string to be used in Hugo URL's, allowing only -// a predefined set of special Unicode characters. -// If RemovePathAccents configuration flag is enabled, Unicode accents -// are also removed. -// Hyphens in the original input are maintained. -// Spaces will be replaced with a single hyphen, and sequential replacement hyphens will be reduced to one. -func (p *PathSpec) UnicodeSanitize(s string) string { - if p.Cfg.RemovePathAccents() { - s = text.RemoveAccentsString(s) - } - - source := []rune(s) - target := make([]rune, 0, len(source)) - var ( - prependHyphen bool - wasHyphen bool - ) - - for i, r := range source { - isAllowed := r == '.' || r == '/' || r == '\\' || r == '_' || r == '#' || r == '+' || r == '~' || r == '-' || r == '@' - isAllowed = isAllowed || unicode.IsLetter(r) || unicode.IsDigit(r) || unicode.IsMark(r) - isAllowed = isAllowed || (r == '%' && i+2 < len(source) && ishex(source[i+1]) && ishex(source[i+2])) - - if isAllowed { - // track explicit hyphen in input; no need to add a new hyphen if - // we just saw one. - wasHyphen = r == '-' - - if prependHyphen { - // if currently have a hyphen, don't prepend an extra one - if !wasHyphen { - target = append(target, '-') - } - prependHyphen = false - } - target = append(target, r) - } else if len(target) > 0 && !wasHyphen && unicode.IsSpace(r) { - prependHyphen = true - } - } - - return string(target) -} - +// MakeTitleInPath converts the path given to a suitable title, trimming whitespace func MakePathRelative(inPath string, possibleDirectories ...string) (string, error) { for _, currentPath := range possibleDirectories { if strings.HasPrefix(inPath, currentPath) { @@ -317,13 +260,12 @@ func FindCWD() (string, error) { return path, nil } -// SymbolicWalk is like filepath.Walk, but it follows symbolic links. -func SymbolicWalk(fs afero.Fs, root string, walker hugofs.WalkFunc) error { +// Walk walks the file tree rooted at root, calling walkFn for each file or +// directory in the tree, including root. +func Walk(fs afero.Fs, root string, walker hugofs.WalkFunc) error { if _, isOs := fs.(*afero.OsFs); isOs { - // Mainly to track symlinks. fs = hugofs.NewBaseFileDecorator(fs) } - w := hugofs.NewWalkway(hugofs.WalkwayConfig{ Fs: fs, Root: root, @@ -333,16 +275,6 @@ func SymbolicWalk(fs afero.Fs, root string, walker hugofs.WalkFunc) error { return w.Walk() } -// LstatIfPossible can be used to call Lstat if possible, else Stat. -func LstatIfPossible(fs afero.Fs, path string) (os.FileInfo, error) { - if lstater, ok := fs.(afero.Lstater); ok { - fi, _, err := lstater.LstatIfPossible(path) - return fi, err - } - - return fs.Stat(path) -} - // SafeWriteToDisk is the same as WriteToDisk // but it also checks to see if file/directory already exists. func SafeWriteToDisk(inpath string, r io.Reader, fs afero.Fs) (err error) { @@ -382,7 +314,7 @@ func OpenFileForWriting(fs afero.Fs, filename string) (afero.File, error) { if !herrors.IsNotExist(err) { return nil, err } - if err = fs.MkdirAll(filepath.Dir(filename), 0777); err != nil { // before umask + if err = fs.MkdirAll(filepath.Dir(filename), 0o777); err != nil { // before umask return nil, err } f, err = fs.Create(filename) @@ -402,7 +334,7 @@ func GetCacheDir(fs afero.Fs, cacheDir string) (string, error) { return "", err } if !exists { - err := fs.MkdirAll(cacheDir, 0777) // Before umask + err := fs.MkdirAll(cacheDir, 0o777) // Before umask if err != nil { return "", fmt.Errorf("failed to create cache dir: %w", err) } @@ -417,7 +349,7 @@ func GetCacheDir(fs afero.Fs, cacheDir string) (string, error) { userCacheDir, err := os.UserCacheDir() if err == nil { cacheDir := filepath.Join(userCacheDir, hugoCacheBase) - if err := fs.Mkdir(cacheDir, 0777); err == nil || os.IsExist(err) { + if err := fs.Mkdir(cacheDir, 0o777); err == nil || os.IsExist(err) { return cacheDir, nil } } @@ -494,12 +426,3 @@ func IsEmpty(path string, fs afero.Fs) (bool, error) { func Exists(path string, fs afero.Fs) (bool, error) { return afero.Exists(fs, path) } - -// AddTrailingSlash adds a trailing Unix styled slash (/) if not already -// there. -func AddTrailingSlash(path string) string { - if !strings.HasSuffix(path, "/") { - path += "/" - } - return path -} diff --git a/helpers/path_test.go b/helpers/path_test.go index 45b692923..6f3699589 100644 --- a/helpers/path_test.go +++ b/helpers/path_test.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -134,7 +134,6 @@ func TestMakePathRelative(t *testing.T) { } func TestGetDottedRelativePath(t *testing.T) { - // on Windows this will receive both kinds, both country and western ... for _, f := range []func(string) string{filepath.FromSlash, func(s string) string { return s }} { doTestGetDottedRelativePath(f, t) } @@ -258,7 +257,7 @@ func createNonZeroSizedFileInTempDir(t *testing.T) *os.File { f := createZeroSizedFileInTempDir(t) byteString := []byte("byteString") - err := os.WriteFile(f.Name(), byteString, 0644) + err := os.WriteFile(f.Name(), byteString, 0o644) if err != nil { t.Error(err) } diff --git a/helpers/pathspec.go b/helpers/pathspec.go index c9bb49038..88571b93c 100644 --- a/helpers/pathspec.go +++ b/helpers/pathspec.go @@ -74,9 +74,5 @@ func NewPathSpecWithBaseBaseFsProvided(fs *hugofs.Fs, cfg config.AllProvider, lo // PermalinkForBaseURL creates a permalink from the given link and baseURL. func (p *PathSpec) PermalinkForBaseURL(link, baseURL string) string { - link = strings.TrimPrefix(link, "/") - if !strings.HasSuffix(baseURL, "/") { - baseURL += "/" - } - return baseURL + link + return baseURL + strings.TrimPrefix(link, "/") } diff --git a/helpers/processing_stats.go b/helpers/processing_stats.go index 3e3e9a3ca..540060aa2 100644 --- a/helpers/processing_stats.go +++ b/helpers/processing_stats.go @@ -31,7 +31,6 @@ type ProcessingStats struct { ProcessedImages uint64 Files uint64 Aliases uint64 - Sitemaps uint64 Cleaned uint64 } @@ -48,7 +47,6 @@ func (s *ProcessingStats) toVals() []processingStatsTitleVal { {"Static files", s.Static}, {"Processed images", s.ProcessedImages}, {"Aliases", s.Aliases}, - {"Sitemaps", s.Sitemaps}, {"Cleaned", s.Cleaned}, } } diff --git a/helpers/url.go b/helpers/url.go index 7d86c529c..d5a613029 100644 --- a/helpers/url.go +++ b/helpers/url.go @@ -20,55 +20,8 @@ import ( "strings" "github.com/gohugoio/hugo/common/paths" - - "github.com/PuerkitoBio/purell" ) -func sanitizeURLWithFlags(in string, f purell.NormalizationFlags) string { - s, err := purell.NormalizeURLString(in, f) - if err != nil { - return in - } - - // Temporary workaround for the bug fix and resulting - // behavioral change in purell.NormalizeURLString(): - // a leading '/' was inadvertently added to relative links, - // but no longer, see #878. - // - // I think the real solution is to allow Hugo to - // make relative URL with relative path, - // e.g. "../../post/hello-again/", as wished by users - // in issues #157, #622, etc., without forcing - // relative URLs to begin with '/'. - // Once the fixes are in, let's remove this kludge - // and restore SanitizeURL() to the way it was. - // -- @anthonyfok, 2015-02-16 - // - // Begin temporary kludge - u, err := url.Parse(s) - if err != nil { - panic(err) - } - if len(u.Path) > 0 && !strings.HasPrefix(u.Path, "/") { - u.Path = "/" + u.Path - } - return u.String() - // End temporary kludge - - // return s - -} - -// SanitizeURL sanitizes the input URL string. -func SanitizeURL(in string) string { - return sanitizeURLWithFlags(in, purell.FlagsSafe|purell.FlagRemoveTrailingSlash|purell.FlagRemoveDotSegments|purell.FlagRemoveDuplicateSlashes|purell.FlagRemoveUnnecessaryHostDots|purell.FlagRemoveEmptyPortSeparator) -} - -// SanitizeURLKeepTrailingSlash is the same as SanitizeURL, but will keep any trailing slash. -func SanitizeURLKeepTrailingSlash(in string) string { - return sanitizeURLWithFlags(in, purell.FlagsSafe|purell.FlagRemoveDotSegments|purell.FlagRemoveDuplicateSlashes|purell.FlagRemoveUnnecessaryHostDots|purell.FlagRemoveEmptyPortSeparator) -} - // URLize is similar to MakePath, but with Unicode handling // Example: // diff --git a/helpers/url_test.go b/helpers/url_test.go index 448756b5b..ce1b24487 100644 --- a/helpers/url_test.go +++ b/helpers/url_test.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -20,7 +20,6 @@ import ( qt "github.com/frankban/quicktest" "github.com/gohugoio/hugo/config" - "github.com/gohugoio/hugo/helpers" ) func TestURLize(t *testing.T) { @@ -193,7 +192,6 @@ func doTestRelURL(t testing.TB, defaultInSubDir, addLanguage, multilingual bool, canonify bool expected string }{ - // Issue 9994 {"/foo/bar", "https://example.org/foo/", false, "MULTI/foo/bar"}, {"foo/bar", "https://example.org/foo/", false, "/fooMULTI/foo/bar"}, @@ -211,7 +209,7 @@ func doTestRelURL(t testing.TB, defaultInSubDir, addLanguage, multilingual bool, {"test/", "http://base/sub/", false, "/subMULTI/test/"}, {"/test/", "http://base/sub/", true, "MULTI/test/"}, {"", "http://base/ace/", false, "/aceMULTI/"}, - {"", "http://base/ace", false, "/aceMULTI"}, + {"", "http://base/ace", false, "/aceMULTI/"}, {"http://abs", "http://base/", false, "http://abs"}, {"//schemaless", "http://base/", false, "//schemaless"}, } @@ -231,7 +229,6 @@ func doTestRelURL(t testing.TB, defaultInSubDir, addLanguage, multilingual bool, for i, test := range tests { c.Run(fmt.Sprintf("%v/defaultInSubDir=%t;addLanguage=%t;multilingual=%t/%s", test, defaultInSubDir, addLanguage, multilingual, lang), func(c *qt.C) { - v.Set("baseURL", test.baseURL) v.Set("canonifyURLs", test.canonify) defaultContentLanguage := lang @@ -255,36 +252,6 @@ func doTestRelURL(t testing.TB, defaultInSubDir, addLanguage, multilingual bool, c.Assert(output, qt.Equals, expected, qt.Commentf("[%d] %s", i, test.input)) }) - - } -} - -func TestSanitizeURL(t *testing.T) { - tests := []struct { - input string - expected string - }{ - {"http://foo.bar/", "http://foo.bar"}, - {"http://foo.bar", "http://foo.bar"}, // issue #1105 - {"http://foo.bar/zoo/", "http://foo.bar/zoo"}, // issue #931 - } - - for i, test := range tests { - o1 := helpers.SanitizeURL(test.input) - o2 := helpers.SanitizeURLKeepTrailingSlash(test.input) - - expected2 := test.expected - - if strings.HasSuffix(test.input, "/") && !strings.HasSuffix(expected2, "/") { - expected2 += "/" - } - - if o1 != test.expected { - t.Errorf("[%d] 1: Expected %#v, got %#v\n", i, test.expected, o1) - } - if o2 != expected2 { - t.Errorf("[%d] 2: Expected %#v, got %#v\n", i, expected2, o2) - } } } diff --git a/htesting/test_helpers.go b/htesting/test_helpers.go index 21b4b831e..ff14de58d 100644 --- a/htesting/test_helpers.go +++ b/htesting/test_helpers.go @@ -20,8 +20,11 @@ import ( "runtime" "strconv" "strings" + "testing" "time" + qt "github.com/frankban/quicktest" + "github.com/spf13/afero" ) @@ -124,6 +127,11 @@ func GoMinorVersion() int { return extractMinorVersionFromGoTag(runtime.Version()) } +// IsWindows reports whether this runs on Windows. +func IsWindows() bool { + return runtime.GOOS == "windows" +} + var goMinorVersionRe = regexp.MustCompile(`go1.(\d*)`) func extractMinorVersionFromGoTag(tag string) int { @@ -140,5 +148,33 @@ func extractMinorVersionFromGoTag(tag string) int { // a commit hash, not useful. return -1 - +} + +// NewPinnedRunner creates a new runner that will only Run tests matching the given regexp. +// This is added mostly to use in combination with https://marketplace.visualstudio.com/items?itemName=windmilleng.vscode-go-autotest +func NewPinnedRunner(t testing.TB, pinnedTestRe string) *PinnedRunner { + if pinnedTestRe == "" { + pinnedTestRe = ".*" + } + pinnedTestRe = strings.ReplaceAll(pinnedTestRe, "_", " ") + re := regexp.MustCompile("(?i)" + pinnedTestRe) + return &PinnedRunner{ + c: qt.New(t), + re: re, + } +} + +type PinnedRunner struct { + c *qt.C + re *regexp.Regexp +} + +func (r *PinnedRunner) Run(name string, f func(c *qt.C)) bool { + if !r.re.MatchString(name) { + if IsGitHubAction() { + r.c.Fatal("found pinned test when running in CI") + } + return true + } + return r.c.Run(name, f) } diff --git a/hugofs/component_fs.go b/hugofs/component_fs.go new file mode 100644 index 000000000..c55f15957 --- /dev/null +++ b/hugofs/component_fs.go @@ -0,0 +1,284 @@ +// Copyright 2024 The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package hugofs + +import ( + iofs "io/fs" + "os" + "path" + "runtime" + "sort" + + "github.com/gohugoio/hugo/common/herrors" + "github.com/gohugoio/hugo/common/hstrings" + "github.com/gohugoio/hugo/common/paths" + "github.com/gohugoio/hugo/hugofs/files" + "github.com/spf13/afero" + "golang.org/x/text/unicode/norm" +) + +// NewComponentFs creates a new component filesystem. +func NewComponentFs(opts ComponentFsOptions) *componentFs { + if opts.Component == "" { + panic("ComponentFsOptions.PathParser.Component must be set") + } + if opts.Fs == nil { + panic("ComponentFsOptions.Fs must be set") + } + bfs := NewBasePathFs(opts.Fs, opts.Component) + return &componentFs{Fs: bfs, opts: opts} +} + +var _ FilesystemUnwrapper = (*componentFs)(nil) + +// componentFs is a filesystem that holds one of the Hugo components, e.g. content, layouts etc. +type componentFs struct { + afero.Fs + + opts ComponentFsOptions +} + +func (fs *componentFs) UnwrapFilesystem() afero.Fs { + return fs.Fs +} + +type componentFsDir struct { + *noOpRegularFileOps + DirOnlyOps + name string // the name passed to Open + fs *componentFs +} + +// ReadDir reads count entries from this virtual directorie and +// sorts the entries according to the component filesystem rules. +func (f *componentFsDir) ReadDir(count int) ([]iofs.DirEntry, error) { + fis, err := f.DirOnlyOps.(iofs.ReadDirFile).ReadDir(-1) + if err != nil { + return nil, err + } + + // Filter out any symlinks. + n := 0 + for _, fi := range fis { + // IsDir will always be false for symlinks. + keep := fi.IsDir() + if !keep { + // This is unfortunate, but is the only way to determine if it is a symlink. + info, err := fi.Info() + if err != nil { + if herrors.IsNotExist(err) { + continue + } + return nil, err + } + if info.Mode()&os.ModeSymlink == 0 { + keep = true + } + } + if keep { + fis[n] = fi + n++ + } + } + + fis = fis[:n] + + for _, fi := range fis { + s := path.Join(f.name, fi.Name()) + _ = f.fs.applyMeta(fi, s) + + } + + sort.Slice(fis, func(i, j int) bool { + fimi, fimj := fis[i].(FileMetaInfo), fis[j].(FileMetaInfo) + if fimi.IsDir() != fimj.IsDir() { + return fimi.IsDir() + } + fimim, fimjm := fimi.Meta(), fimj.Meta() + + if fimim.ModuleOrdinal != fimjm.ModuleOrdinal { + switch f.fs.opts.Component { + case files.ComponentFolderI18n: + // The way the language files gets loaded means that + // we need to provide the least important files first (e.g. the theme files). + return fimim.ModuleOrdinal > fimjm.ModuleOrdinal + default: + return fimim.ModuleOrdinal < fimjm.ModuleOrdinal + } + } + + pii, pij := fimim.PathInfo, fimjm.PathInfo + if pii != nil { + basei, basej := pii.Base(), pij.Base() + exti, extj := pii.Ext(), pij.Ext() + if f.fs.opts.Component == files.ComponentFolderContent { + // Pull bundles to the top. + if pii.IsBundle() != pij.IsBundle() { + return pii.IsBundle() + } + } + + if exti != extj { + // This pulls .md above .html. + return exti > extj + } + + if basei != basej { + return basei < basej + } + } + + if fimim.Weight != fimjm.Weight { + return fimim.Weight > fimjm.Weight + } + + return fimi.Name() < fimj.Name() + }) + + if f.fs.opts.Component == files.ComponentFolderContent { + // Finally filter out any duplicate content files, e.g. page.md and page.html. + n := 0 + seen := map[hstrings.Tuple]bool{} + for _, fi := range fis { + fim := fi.(FileMetaInfo) + pi := fim.Meta().PathInfo + keep := fim.IsDir() || !pi.IsContent() + + if !keep { + baseLang := hstrings.Tuple{First: pi.Base(), Second: fim.Meta().Lang} + if !seen[baseLang] { + keep = true + seen[baseLang] = true + } + } + + if keep { + fis[n] = fi + n++ + } + } + + fis = fis[:n] + } + + return fis, nil +} + +func (f *componentFsDir) Stat() (iofs.FileInfo, error) { + fi, err := f.DirOnlyOps.Stat() + if err != nil { + return nil, err + } + return f.fs.applyMeta(fi, f.name), nil +} + +func (fs *componentFs) Stat(name string) (os.FileInfo, error) { + fi, err := fs.Fs.Stat(name) + if err != nil { + return nil, err + } + return fs.applyMeta(fi, name), nil +} + +func (fs *componentFs) applyMeta(fi FileNameIsDir, name string) FileMetaInfo { + if runtime.GOOS == "darwin" { + name = norm.NFC.String(name) + } + fim := fi.(FileMetaInfo) + meta := fim.Meta() + meta.PathInfo = fs.opts.PathParser.Parse(fs.opts.Component, name) + if !fim.IsDir() { + if fileLang := meta.PathInfo.Lang(); fileLang != "" { + // A valid lang set in filename. + // Give priority to myfile.sv.txt inside the sv filesystem. + meta.Weight++ + meta.Lang = fileLang + } + } + + if meta.Lang == "" { + meta.Lang = fs.opts.DefaultContentLanguage + } + + langIdx, found := fs.opts.PathParser.LanguageIndex[meta.Lang] + if !found { + panic("no language found for " + meta.Lang) + } + meta.LangIndex = langIdx + + if fi.IsDir() { + meta.OpenFunc = func() (afero.File, error) { + return fs.Open(name) + } + } + + return fim +} + +func (f *componentFsDir) Readdir(count int) ([]os.FileInfo, error) { + panic("not supported: Use ReadDir") +} + +func (f *componentFsDir) Readdirnames(count int) ([]string, error) { + dirsi, err := f.DirOnlyOps.(iofs.ReadDirFile).ReadDir(count) + if err != nil { + return nil, err + } + + dirs := make([]string, len(dirsi)) + for i, d := range dirsi { + dirs[i] = d.Name() + } + return dirs, nil +} + +type ComponentFsOptions struct { + // The filesystem where one or more components are mounted. + Fs afero.Fs + + // The component name, e.g. "content", "layouts" etc. + Component string + + DefaultContentLanguage string + + // The parser used to parse paths provided by this filesystem. + PathParser paths.PathParser +} + +func (fs *componentFs) Open(name string) (afero.File, error) { + f, err := fs.Fs.Open(name) + if err != nil { + return nil, err + } + + fi, err := f.Stat() + if err != nil { + if err != errIsDir { + f.Close() + return nil, err + } + } else if !fi.IsDir() { + return f, nil + } + + return &componentFsDir{ + DirOnlyOps: f, + name: name, + fs: fs, + }, nil +} + +func (fs *componentFs) ReadDir(name string) ([]os.FileInfo, error) { + panic("not implemented") +} diff --git a/hugofs/decorators.go b/hugofs/decorators.go index 47b4266df..405c81ce4 100644 --- a/hugofs/decorators.go +++ b/hugofs/decorators.go @@ -15,63 +15,25 @@ package hugofs import ( "fmt" + "io/fs" "os" "path/filepath" - "strings" - "github.com/gohugoio/hugo/common/herrors" "github.com/spf13/afero" ) -var ( - _ FilesystemUnwrapper = (*baseFileDecoratorFs)(nil) -) +var _ FilesystemUnwrapper = (*baseFileDecoratorFs)(nil) func decorateDirs(fs afero.Fs, meta *FileMeta) afero.Fs { ffs := &baseFileDecoratorFs{Fs: fs} - decorator := func(fi os.FileInfo, name string) (os.FileInfo, error) { + decorator := func(fi FileNameIsDir, name string) (FileNameIsDir, error) { if !fi.IsDir() { // Leave regular files as they are. return fi, nil } - return decorateFileInfo(fi, fs, nil, "", "", meta), nil - } - - ffs.decorate = decorator - - return ffs -} - -func decoratePath(fs afero.Fs, createPath func(name string) string) afero.Fs { - ffs := &baseFileDecoratorFs{Fs: fs} - - decorator := func(fi os.FileInfo, name string) (os.FileInfo, error) { - path := createPath(name) - - return decorateFileInfo(fi, fs, nil, "", path, nil), nil - } - - ffs.decorate = decorator - - return ffs -} - -// DecorateBasePathFs adds Path info to files and directories in the -// provided BasePathFs, using the base as base. -func DecorateBasePathFs(base *afero.BasePathFs) afero.Fs { - basePath, _ := base.RealPath("") - if !strings.HasSuffix(basePath, filepathSeparator) { - basePath += filepathSeparator - } - - ffs := &baseFileDecoratorFs{Fs: base} - - decorator := func(fi os.FileInfo, name string) (os.FileInfo, error) { - path := strings.TrimPrefix(name, basePath) - - return decorateFileInfo(fi, base, nil, "", path, nil), nil + return decorateFileInfo(fi, nil, "", meta), nil } ffs.decorate = decorator @@ -84,7 +46,7 @@ func DecorateBasePathFs(base *afero.BasePathFs) afero.Fs { func NewBaseFileDecorator(fs afero.Fs, callbacks ...func(fi FileMetaInfo)) afero.Fs { ffs := &baseFileDecoratorFs{Fs: fs} - decorator := func(fi os.FileInfo, filename string) (os.FileInfo, error) { + decorator := func(fi FileNameIsDir, filename string) (FileNameIsDir, error) { // Store away the original in case it's a symlink. meta := NewFileMeta() meta.Name = fi.Name() @@ -92,38 +54,24 @@ func NewBaseFileDecorator(fs afero.Fs, callbacks ...func(fi FileMetaInfo)) afero if fi.IsDir() { meta.JoinStatFunc = func(name string) (FileMetaInfo, error) { joinedFilename := filepath.Join(filename, name) - fi, _, err := lstatIfPossible(fs, joinedFilename) + fi, err := fs.Stat(joinedFilename) + if err != nil { + return nil, err + } + fim, err := ffs.decorate(fi, joinedFilename) if err != nil { return nil, err } - fi, err = ffs.decorate(fi, joinedFilename) - if err != nil { - return nil, err - } - - return fi.(FileMetaInfo), nil + return fim.(FileMetaInfo), nil } } - isSymlink := isSymlink(fi) - if isSymlink { - meta.OriginalFilename = filename - var link string - var err error - link, fi, err = evalSymlinks(fs, filename) - if err != nil { - return nil, err - } - filename = link - meta.IsSymlink = true - } - opener := func() (afero.File, error) { return ffs.open(filename) } - fim := decorateFileInfo(fi, ffs, opener, filename, "", meta) + fim := decorateFileInfo(fi, opener, filename, meta) for _, cb := range callbacks { cb(fim) @@ -136,23 +84,9 @@ func NewBaseFileDecorator(fs afero.Fs, callbacks ...func(fi FileMetaInfo)) afero return ffs } -func evalSymlinks(fs afero.Fs, filename string) (string, os.FileInfo, error) { - link, err := filepath.EvalSymlinks(filename) - if err != nil { - return "", nil, err - } - - fi, err := fs.Stat(link) - if err != nil { - return "", nil, err - } - - return link, fi, nil -} - type baseFileDecoratorFs struct { afero.Fs - decorate func(fi os.FileInfo, filename string) (os.FileInfo, error) + decorate func(fi FileNameIsDir, name string) (FileNameIsDir, error) } func (fs *baseFileDecoratorFs) UnwrapFilesystem() afero.Fs { @@ -165,29 +99,11 @@ func (fs *baseFileDecoratorFs) Stat(name string) (os.FileInfo, error) { return nil, err } - return fs.decorate(fi, name) -} - -func (fs *baseFileDecoratorFs) LstatIfPossible(name string) (os.FileInfo, bool, error) { - var ( - fi os.FileInfo - err error - ok bool - ) - - if lstater, isLstater := fs.Fs.(afero.Lstater); isLstater { - fi, ok, err = lstater.LstatIfPossible(name) - } else { - fi, err = fs.Fs.Stat(name) - } - + fim, err := fs.decorate(fi, name) if err != nil { - return nil, false, err + return nil, err } - - fi, err = fs.decorate(fi, name) - - return fi, ok, err + return fim.(os.FileInfo), nil } func (fs *baseFileDecoratorFs) Open(name string) (afero.File, error) { @@ -207,35 +123,32 @@ type baseFileDecoratorFile struct { fs *baseFileDecoratorFs } -func (l *baseFileDecoratorFile) Readdir(c int) (ofi []os.FileInfo, err error) { - dirnames, err := l.File.Readdirnames(c) +func (l *baseFileDecoratorFile) ReadDir(n int) ([]fs.DirEntry, error) { + fis, err := l.File.(fs.ReadDirFile).ReadDir(-1) if err != nil { return nil, err } - fisp := make([]os.FileInfo, 0, len(dirnames)) + fisp := make([]fs.DirEntry, len(fis)) - for _, dirname := range dirnames { - filename := dirname - - if l.Name() != "" && l.Name() != filepathSeparator { - filename = filepath.Join(l.Name(), dirname) + for i, fi := range fis { + filename := fi.Name() + if l.Name() != "" { + filename = filepath.Join(l.Name(), fi.Name()) } - // We need to resolve any symlink info. - fi, _, err := lstatIfPossible(l.fs.Fs, filename) - if err != nil { - if herrors.IsNotExist(err) { - continue - } - return nil, err - } - fi, err = l.fs.decorate(fi, filename) + fid, err := l.fs.decorate(fi, filename) if err != nil { return nil, fmt.Errorf("decorate: %w", err) } - fisp = append(fisp, fi) + + fisp[i] = fid.(fs.DirEntry) + } return fisp, err } + +func (l *baseFileDecoratorFile) Readdir(c int) (ofi []os.FileInfo, err error) { + panic("not supported: Use ReadDir") +} diff --git a/hugofs/language_merge.go b/hugofs/dirsmerger.go similarity index 55% rename from hugofs/language_merge.go rename to hugofs/dirsmerger.go index a2fa411a9..392353e27 100644 --- a/hugofs/language_merge.go +++ b/hugofs/dirsmerger.go @@ -1,4 +1,4 @@ -// Copyright 2022 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -14,12 +14,14 @@ package hugofs import ( - "os" + "io/fs" + + "github.com/bep/overlayfs" ) // LanguageDirsMerger implements the overlayfs.DirsMerger func, which is used // to merge two directories. -var LanguageDirsMerger = func(lofi, bofi []os.FileInfo) []os.FileInfo { +var LanguageDirsMerger overlayfs.DirsMerger = func(lofi, bofi []fs.DirEntry) []fs.DirEntry { for _, fi1 := range bofi { fim1 := fi1.(FileMetaInfo) var found bool @@ -37,3 +39,27 @@ var LanguageDirsMerger = func(lofi, bofi []os.FileInfo) []os.FileInfo { return lofi } + +// AppendDirsMerger merges two directories keeping all regular files +// with the first slice as the base. +// Duplicate directories in the secnond slice will be ignored. +// This strategy is used for the i18n and data fs where we need all entries. +var AppendDirsMerger overlayfs.DirsMerger = func(lofi, bofi []fs.DirEntry) []fs.DirEntry { + for _, fi1 := range bofi { + var found bool + // Remove duplicate directories. + if fi1.IsDir() { + for _, fi2 := range lofi { + if fi2.IsDir() && fi2.Name() == fi1.Name() { + found = true + break + } + } + } + if !found { + lofi = append(lofi, fi1) + } + } + + return lofi +} diff --git a/hugofs/fileinfo.go b/hugofs/fileinfo.go index 773352ea8..6d6122c0c 100644 --- a/hugofs/fileinfo.go +++ b/hugofs/fileinfo.go @@ -16,21 +16,25 @@ package hugofs import ( "errors" + "fmt" + "io" + "io/fs" "os" "path/filepath" "reflect" "runtime" "sort" - "strings" + "sync" "time" "github.com/gohugoio/hugo/hugofs/glob" - "github.com/gohugoio/hugo/hugofs/files" "golang.org/x/text/unicode/norm" + "github.com/gohugoio/hugo/common/herrors" "github.com/gohugoio/hugo/common/hreflect" "github.com/gohugoio/hugo/common/htime" + "github.com/gohugoio/hugo/common/paths" "github.com/spf13/afero" ) @@ -39,48 +43,37 @@ func NewFileMeta() *FileMeta { return &FileMeta{} } -// PathFile returns the relative file path for the file source. -func (f *FileMeta) PathFile() string { - if f.BaseDir == "" { - return "" - } - return strings.TrimPrefix(strings.TrimPrefix(f.Filename, f.BaseDir), filepathSeparator) -} - type FileMeta struct { - Name string - Filename string - Path string - PathWalk string - OriginalFilename string - BaseDir string + PathInfo *paths.Path + Name string + Filename string - SourceRoot string - MountRoot string - Module string + BaseDir string + SourceRoot string + Module string + ModuleOrdinal int + Component string - Weight int - IsOrdered bool - IsSymlink bool - IsRootFile bool - IsProject bool - Watch bool + Weight int + IsProject bool + Watch bool - Classifier files.ContentClass + // The lang associated with this file. This may be + // either the language set in the filename or + // the language defined in the source mount configuration. + Lang string + // The language index for the above lang. This is the index + // in the sorted list of languages/sites. + LangIndex int - SkipDir bool - - Lang string - TranslationBaseName string - TranslationBaseNameWithExt string - Translations []string - - Fs afero.Fs OpenFunc func() (afero.File, error) JoinStatFunc func(name string) (FileMetaInfo, error) // Include only files or directories that match. InclusionFilter *glob.FilenameFilter + + // Rename the name part of the file (not the directory). + Rename func(name string, toFrom bool) string } func (m *FileMeta) Copy() *FileMeta { @@ -120,6 +113,15 @@ func (f *FileMeta) Open() (afero.File, error) { return f.OpenFunc() } +func (f *FileMeta) ReadAll() ([]byte, error) { + file, err := f.Open() + if err != nil { + return nil, err + } + defer file.Close() + return io.ReadAll(file) +} + func (f *FileMeta) JoinStat(name string) (FileMetaInfo, error) { if f.JoinStatFunc == nil { return nil, os.ErrNotExist @@ -128,50 +130,123 @@ func (f *FileMeta) JoinStat(name string) (FileMetaInfo, error) { } type FileMetaInfo interface { - os.FileInfo - // Meta is for internal use. + fs.DirEntry + MetaProvider + + // This is a real hybrid as it also implements the fs.FileInfo interface. + FileInfoOptionals +} + +type MetaProvider interface { Meta() *FileMeta } -type fileInfoMeta struct { - os.FileInfo +type FileInfoOptionals interface { + Size() int64 + Mode() fs.FileMode + ModTime() time.Time + Sys() any +} +type FileNameIsDir interface { + Name() string + IsDir() bool +} + +type FileInfoProvider interface { + FileInfo() FileMetaInfo +} + +// DirOnlyOps is a subset of the afero.File interface covering +// the methods needed for directory operations. +type DirOnlyOps interface { + io.Closer + Name() string + Readdir(count int) ([]os.FileInfo, error) + Readdirnames(n int) ([]string, error) + Stat() (os.FileInfo, error) +} + +type dirEntryMeta struct { + fs.DirEntry m *FileMeta + + fi fs.FileInfo + fiInit sync.Once } -type filenameProvider interface { - Filename() string -} - -var _ filenameProvider = (*fileInfoMeta)(nil) - -// Filename returns the full filename. -func (fi *fileInfoMeta) Filename() string { - return fi.m.Filename -} - -// Name returns the file's name. Note that we follow symlinks, -// if supported by the file system, and the Name given here will be the -// name of the symlink, which is what Hugo needs in all situations. -func (fi *fileInfoMeta) Name() string { - if name := fi.m.Name; name != "" { - return name - } - return fi.FileInfo.Name() -} - -func (fi *fileInfoMeta) Meta() *FileMeta { +func (fi *dirEntryMeta) Meta() *FileMeta { return fi.m } -func NewFileMetaInfo(fi os.FileInfo, m *FileMeta) FileMetaInfo { +// Filename returns the full filename. +func (fi *dirEntryMeta) Filename() string { + return fi.m.Filename +} + +func (fi *dirEntryMeta) fileInfo() fs.FileInfo { + var err error + fi.fiInit.Do(func() { + fi.fi, err = fi.DirEntry.Info() + }) + if err != nil { + panic(err) + } + return fi.fi +} + +func (fi *dirEntryMeta) Size() int64 { + return fi.fileInfo().Size() +} + +func (fi *dirEntryMeta) Mode() fs.FileMode { + return fi.fileInfo().Mode() +} + +func (fi *dirEntryMeta) ModTime() time.Time { + return fi.fileInfo().ModTime() +} + +func (fi *dirEntryMeta) Sys() any { + return fi.fileInfo().Sys() +} + +// Name returns the file's name. +func (fi *dirEntryMeta) Name() string { + if name := fi.m.Name; name != "" { + return name + } + return fi.DirEntry.Name() +} + +// dirEntry is an adapter from os.FileInfo to fs.DirEntry +type dirEntry struct { + fs.FileInfo +} + +var _ fs.DirEntry = dirEntry{} + +func (d dirEntry) Type() fs.FileMode { return d.FileInfo.Mode().Type() } + +func (d dirEntry) Info() (fs.FileInfo, error) { return d.FileInfo, nil } + +func NewFileMetaInfo(fi FileNameIsDir, m *FileMeta) FileMetaInfo { if m == nil { panic("FileMeta must be set") } - if fim, ok := fi.(FileMetaInfo); ok { + if fim, ok := fi.(MetaProvider); ok { m.Merge(fim.Meta()) } - return &fileInfoMeta{FileInfo: fi, m: m} + switch v := fi.(type) { + case fs.DirEntry: + return &dirEntryMeta{DirEntry: v, m: m} + case fs.FileInfo: + return &dirEntryMeta{DirEntry: dirEntry{v}, m: m} + case nil: + return &dirEntryMeta{DirEntry: dirEntry{}, m: m} + default: + panic(fmt.Sprintf("Unsupported type: %T", fi)) + } } type dirNameOnlyFileInfo struct { @@ -212,7 +287,6 @@ func newDirNameOnlyFileInfo(name string, meta *FileMeta, fileOpener func() (afer m.Filename = name } m.OpenFunc = fileOpener - m.IsOrdered = false return NewFileMetaInfo( &dirNameOnlyFileInfo{name: base, modTime: htime.Now()}, @@ -220,16 +294,10 @@ func newDirNameOnlyFileInfo(name string, meta *FileMeta, fileOpener func() (afer ) } -func decorateFileInfo( - fi os.FileInfo, - fs afero.Fs, opener func() (afero.File, error), - filename, filepath string, inMeta *FileMeta, -) FileMetaInfo { +func decorateFileInfo(fi FileNameIsDir, opener func() (afero.File, error), filename string, inMeta *FileMeta) FileMetaInfo { var meta *FileMeta var fim FileMetaInfo - filepath = strings.TrimPrefix(filepath, filepathSeparator) - var ok bool if fim, ok = fi.(FileMetaInfo); ok { meta = fim.Meta() @@ -241,14 +309,8 @@ func decorateFileInfo( if opener != nil { meta.OpenFunc = opener } - if fs != nil { - meta.Fs = fs - } - nfilepath := normalizeFilename(filepath) + nfilename := normalizeFilename(filename) - if nfilepath != "" { - meta.Path = nfilepath - } if nfilename != "" { meta.Filename = nfilename } @@ -258,14 +320,11 @@ func decorateFileInfo( return fim } -func isSymlink(fi os.FileInfo) bool { - return fi != nil && fi.Mode()&os.ModeSymlink == os.ModeSymlink -} - -func fileInfosToFileMetaInfos(fis []os.FileInfo) []FileMetaInfo { +func DirEntriesToFileMetaInfos(fis []fs.DirEntry) []FileMetaInfo { fims := make([]FileMetaInfo, len(fis)) for i, v := range fis { - fims[i] = v.(FileMetaInfo) + fim := v.(FileMetaInfo) + fims[i] = fim } return fims } @@ -281,17 +340,49 @@ func normalizeFilename(filename string) string { return filename } -func fileInfosToNames(fis []os.FileInfo) []string { - names := make([]string, len(fis)) - for i, d := range fis { - names[i] = d.Name() - } - return names -} - -func sortFileInfos(fis []os.FileInfo) { +func sortDirEntries(fis []fs.DirEntry) { sort.Slice(fis, func(i, j int) bool { fimi, fimj := fis[i].(FileMetaInfo), fis[j].(FileMetaInfo) return fimi.Meta().Filename < fimj.Meta().Filename }) } + +// AddFileInfoToError adds file info to the given error. +func AddFileInfoToError(err error, fi FileMetaInfo, fs afero.Fs) error { + if err == nil { + return nil + } + + meta := fi.Meta() + filename := meta.Filename + + // Check if it's already added. + for _, ferr := range herrors.UnwrapFileErrors(err) { + pos := ferr.Position() + errfilename := pos.Filename + if errfilename == "" { + pos.Filename = filename + ferr.UpdatePosition(pos) + } + + if errfilename == "" || errfilename == filename { + if filename != "" && ferr.ErrorContext() == nil { + f, ioerr := fs.Open(filename) + if ioerr != nil { + return err + } + defer f.Close() + ferr.UpdateContent(f, nil) + } + return err + } + } + + lineMatcher := herrors.NopLineMatcher + + if textSegmentErr, ok := err.(*herrors.TextSegmentError); ok { + lineMatcher = herrors.ContainsMatcher(textSegmentErr.Segment) + } + + return herrors.NewFileErrorFromFile(err, filename, fs, lineMatcher) +} diff --git a/hugofs/fileinfo_test.go b/hugofs/fileinfo_test.go index 8d6a2ff7a..715798b34 100644 --- a/hugofs/fileinfo_test.go +++ b/hugofs/fileinfo_test.go @@ -25,7 +25,6 @@ func TestFileMeta(t *testing.T) { c.Run("Merge", func(c *qt.C) { src := &FileMeta{ Filename: "fs1", - Path: "ps1", } dst := &FileMeta{ Filename: "fd1", @@ -33,19 +32,16 @@ func TestFileMeta(t *testing.T) { dst.Merge(src) - c.Assert(dst.Path, qt.Equals, "ps1") c.Assert(dst.Filename, qt.Equals, "fd1") }) c.Run("Copy", func(c *qt.C) { src := &FileMeta{ Filename: "fs1", - Path: "ps1", } dst := src.Copy() c.Assert(dst, qt.Not(qt.Equals), src) c.Assert(dst, qt.DeepEquals, src) }) - } diff --git a/hugofs/filename_filter_fs.go b/hugofs/filename_filter_fs.go index c101309c2..5bae4b876 100644 --- a/hugofs/filename_filter_fs.go +++ b/hugofs/filename_filter_fs.go @@ -14,6 +14,7 @@ package hugofs import ( + "io/fs" "os" "strings" "syscall" @@ -45,17 +46,6 @@ func (fs *filenameFilterFs) UnwrapFilesystem() afero.Fs { return fs.fs } -func (fs *filenameFilterFs) LstatIfPossible(name string) (os.FileInfo, bool, error) { - fi, b, err := fs.fs.(afero.Lstater).LstatIfPossible(name) - if err != nil { - return nil, false, err - } - if !fs.filter.Match(name, fi.IsDir()) { - return nil, false, os.ErrNotExist - } - return fi, b, nil -} - func (fs *filenameFilterFs) Open(name string) (afero.File, error) { fi, err := fs.fs.Stat(name) if err != nil { @@ -87,8 +77,14 @@ func (fs *filenameFilterFs) OpenFile(name string, flag int, perm os.FileMode) (a } func (fs *filenameFilterFs) Stat(name string) (os.FileInfo, error) { - fi, _, err := fs.LstatIfPossible(name) - return fi, err + fi, err := fs.fs.Stat(name) + if err != nil { + return nil, err + } + if !fs.filter.Match(name, fi.IsDir()) { + return nil, os.ErrNotExist + } + return fi, nil } type filenameFilterDir struct { @@ -97,31 +93,35 @@ type filenameFilterDir struct { filter *glob.FilenameFilter } -func (f *filenameFilterDir) Readdir(count int) ([]os.FileInfo, error) { - fis, err := f.File.Readdir(-1) +func (f *filenameFilterDir) ReadDir(n int) ([]fs.DirEntry, error) { + des, err := f.File.(fs.ReadDirFile).ReadDir(n) if err != nil { return nil, err } - - var result []os.FileInfo - for _, fi := range fis { - fim := fi.(FileMetaInfo) - if f.filter.Match(strings.TrimPrefix(fim.Meta().Filename, f.base), fim.IsDir()) { - result = append(result, fi) + i := 0 + for _, de := range des { + fim := de.(FileMetaInfo) + rel := strings.TrimPrefix(fim.Meta().Filename, f.base) + if f.filter.Match(rel, de.IsDir()) { + des[i] = de + i++ } } + return des[:i], nil +} - return result, nil +func (f *filenameFilterDir) Readdir(count int) ([]os.FileInfo, error) { + panic("not supported: Use ReadDir") } func (f *filenameFilterDir) Readdirnames(count int) ([]string, error) { - dirsi, err := f.Readdir(count) + des, err := f.ReadDir(count) if err != nil { return nil, err } - dirs := make([]string, len(dirsi)) - for i, d := range dirsi { + dirs := make([]string, len(des)) + for i, d := range des { dirs[i] = d.Name() } return dirs, nil diff --git a/hugofs/filename_filter_fs_test.go b/hugofs/filename_filter_fs_test.go index b3e97a6a6..7b31f0f82 100644 --- a/hugofs/filename_filter_fs_test.go +++ b/hugofs/filename_filter_fs_test.go @@ -36,12 +36,12 @@ func TestFilenameFilterFs(t *testing.T) { for _, letter := range []string{"a", "b", "c"} { for i := 1; i <= 3; i++ { - c.Assert(afero.WriteFile(fs, filepath.Join(base, letter, fmt.Sprintf("my%d.txt", i)), []byte("some text file for"+letter), 0755), qt.IsNil) - c.Assert(afero.WriteFile(fs, filepath.Join(base, letter, fmt.Sprintf("my%d.json", i)), []byte("some json file for"+letter), 0755), qt.IsNil) + c.Assert(afero.WriteFile(fs, filepath.Join(base, letter, fmt.Sprintf("my%d.txt", i)), []byte("some text file for"+letter), 0o755), qt.IsNil) + c.Assert(afero.WriteFile(fs, filepath.Join(base, letter, fmt.Sprintf("my%d.json", i)), []byte("some json file for"+letter), 0o755), qt.IsNil) } } - fs = afero.NewBasePathFs(fs, base) + fs = NewBasePathFs(fs, base) filter, err := glob.NewFilenameFilter(nil, []string{"/b/**.txt"}) c.Assert(err, qt.IsNil) @@ -69,15 +69,16 @@ func TestFilenameFilterFs(t *testing.T) { assertExists("/b/my1.txt", false) dirB, err := fs.Open("/b") - defer dirB.Close() c.Assert(err, qt.IsNil) + defer dirB.Close() dirBEntries, err := dirB.Readdirnames(-1) + c.Assert(err, qt.IsNil) c.Assert(dirBEntries, qt.DeepEquals, []string{"my1.json", "my2.json", "my3.json"}) dirC, err := fs.Open("/c") - defer dirC.Close() c.Assert(err, qt.IsNil) + defer dirC.Close() dirCEntries, err := dirC.Readdirnames(-1) + c.Assert(err, qt.IsNil) c.Assert(dirCEntries, qt.DeepEquals, []string{"my1.json", "my1.txt", "my2.json", "my2.txt", "my3.json", "my3.txt"}) - } diff --git a/hugofs/files/classifier.go b/hugofs/files/classifier.go index bdac2d686..a8d231f73 100644 --- a/hugofs/files/classifier.go +++ b/hugofs/files/classifier.go @@ -14,16 +14,10 @@ package files import ( - "bufio" - "fmt" - "io" "os" "path/filepath" "sort" "strings" - "unicode" - - "github.com/spf13/afero" ) const ( @@ -80,99 +74,14 @@ func IsIndexContentFile(filename string) bool { return strings.HasPrefix(base, "index.") || strings.HasPrefix(base, "_index.") } -func IsHTMLFile(filename string) bool { - return htmlFileExtensionsSet[strings.TrimPrefix(filepath.Ext(filename), ".")] +func IsHTML(ext string) bool { + return htmlFileExtensionsSet[ext] } func IsContentExt(ext string) bool { return contentFileExtensionsSet[ext] } -type ContentClass string - -const ( - ContentClassLeaf ContentClass = "leaf" - ContentClassBranch ContentClass = "branch" - ContentClassFile ContentClass = "zfile" // Sort below - ContentClassContent ContentClass = "zcontent" -) - -func (c ContentClass) IsBundle() bool { - return c == ContentClassLeaf || c == ContentClassBranch -} - -func ClassifyContentFile(filename string, open func() (afero.File, error)) ContentClass { - if !IsContentFile(filename) { - return ContentClassFile - } - - if IsHTMLFile(filename) { - // We need to look inside the file. If the first non-whitespace - // character is a "<", then we treat it as a regular file. - // Eearlier we created pages for these files, but that had all sorts - // of troubles, and isn't what it says in the documentation. - // See https://github.com/gohugoio/hugo/issues/7030 - if open == nil { - panic(fmt.Sprintf("no file opener provided for %q", filename)) - } - - f, err := open() - if err != nil { - return ContentClassFile - } - ishtml := isHTMLContent(f) - f.Close() - if ishtml { - return ContentClassFile - } - - } - - if strings.HasPrefix(filename, "_index.") { - return ContentClassBranch - } - - if strings.HasPrefix(filename, "index.") { - return ContentClassLeaf - } - - return ContentClassContent -} - -var htmlComment = []rune{'<', '!', '-', '-'} - -func isHTMLContent(r io.Reader) bool { - br := bufio.NewReader(r) - i := 0 - for { - c, _, err := br.ReadRune() - if err != nil { - break - } - - if i > 0 { - if i >= len(htmlComment) { - return false - } - - if c != htmlComment[i] { - return true - } - - i++ - continue - } - - if !unicode.IsSpace(c) { - if i == 0 && c != '<' { - return false - } - i++ - } - } - return true -} - const ( ComponentFolderArchetypes = "archetypes" ComponentFolderStatic = "static" diff --git a/hugofs/files/classifier_test.go b/hugofs/files/classifier_test.go index 84036b870..f2fad56ca 100644 --- a/hugofs/files/classifier_test.go +++ b/hugofs/files/classifier_test.go @@ -15,7 +15,6 @@ package files import ( "path/filepath" - "strings" "testing" qt "github.com/frankban/quicktest" @@ -31,16 +30,6 @@ func TestIsContentFile(t *testing.T) { c.Assert(IsContentExt("json"), qt.Equals, false) } -func TestIsHTMLContent(t *testing.T) { - c := qt.New(t) - - c.Assert(isHTMLContent(strings.NewReader(" ")), qt.Equals, true) - c.Assert(isHTMLContent(strings.NewReader(" - -This is content with some shortcodes. - -Shortcode 1: {{< sc >}}. -Shortcode 2: {{< sc >}}. - -` - - const pageContentWithMarkdownShortcodes = `--- -title: Page with markdown shortcode -hugo: "Rocks!" -outputs: ["HTML", "JSON"] +title: Home in English --- +Home Content. +-- content/_index.no.md -- +--- +title: Hjem +cascade: + - _target: + kind: page + path: /posts/** + background: post.jpg + - _target: + kind: term + background: term.jpg +--- +Hjem Innhold. +-- content/posts/f1.txt -- +posts f1 text. +-- content/posts/sub/f1.txt -- +posts sub f1 text. +-- content/posts/p1/index.md -- ++++ +title = "Post 1" +lastMod = "2001-01-01" +tags = ["tag1"] +[[resources]] +src = '**' +[resources.params] +icon = 'enicon' ++++ +Content 1. +-- content/posts/p1/index.no.md -- ++++ +title = "Post 1 no" +lastMod = "2002-02-02" +tags = ["tag1", "tag2"] +[[resources]] +src = '**' +[resources.params] +icon = 'noicon' ++++ +Content 1 no. +-- content/posts/_index.md -- +--- +title: Posts +--- +-- content/posts/p1/f1.txt -- +posts p1 f1 text. +-- content/posts/p1/sub/ps1.md -- +--- +title: Post Sub 1 +--- +Content Sub 1. +-- content/posts/p2.md -- +--- +title: Post 2 +tags: ["tag1", "tag3"] +--- +Content 2. +-- content/posts/p2.no.md -- +--- +title: Post 2 No +--- +Content 2 No. +-- content/tags/_index.md -- +--- +title: Tags +--- +Content Tags. +-- content/tags/tag1/_index.md -- +--- +title: Tag 1 +--- +Content Tag 1. -This is summary. - - - -This is content[^a]. - -# Header above - -{{% markdown-shortcode %}} -# Header inside - -Some **markdown**.[^b] - -{{% /markdown-shortcode %}} - -# Heder below - -Some more content[^c]. - -Footnotes: - -[^a]: Fn 1 -[^b]: Fn 2 -[^c]: Fn 3 ` - pageContentAutoSummary := strings.Replace(pageContentAndSummaryDivider, "", "", 1) + b := NewIntegrationTestBuilder(IntegrationTestConfig{ + T: t, + TxtarString: files, + NeedsOsFS: true, + // Verbose: true, + // LogLevel: logg.LevelTrace, + }).Build() - b := newTestSitesBuilder(t).WithConfigFile("toml", configFile) - b.WithTemplatesAdded("shortcodes/markdown-shortcode.html", ` -Some **Markdown** in shortcode. + b.AssertFileContent("public/en/index.html", + "Home: en|home|/en/|Home in English|

Home Content.

\n|HTML", + "Site last mod: 2001-01-01", + "Home last mod: 2001-01-01", + "Translations: 1|", + "Len home.RegularPagesRecursive: 2|", + "Len site.RegularPages: 2|", + "Len site.Pages: 8|", + "Len site.AllPages: 16|", + "GetPage: /en/posts/p1/|Post 1|", + "RenderString with shortcode: Hello.|", + "Paginate: 1/2|", + ) + b.AssertFileContent("public/en/page/2/index.html", "Paginate: 2/2|") -{{ .Inner }} - - - -`) - - b.WithTemplatesAdded("shortcodes/markdown-shortcode.json", ` -Some **Markdown** in JSON shortcode. -{{ .Inner }} - -`) - - for i := 1; i <= 11; i++ { - if i%2 == 0 { - b.WithContent(fmt.Sprintf("blog/page%d.md", i), pageContentAndSummaryDivider) - b.WithContent(fmt.Sprintf("blog/page%d.no.md", i), pageContentAndSummaryDivider) - } else { - b.WithContent(fmt.Sprintf("blog/page%d.md", i), pageContentAutoSummary) - } - } - - for i := 1; i <= 5; i++ { - // Root section pages - b.WithContent(fmt.Sprintf("root%d.md", i), pageContentAutoSummary) - } - - // https://github.com/gohugoio/hugo/issues/4695 - b.WithContent("blog/markyshort.md", pageContentWithMarkdownShortcodes) - - // Add one bundle - b.WithContent("blog/mybundle/index.md", pageContentAndSummaryDivider) - b.WithContent("blog/mybundle/mydata.csv", "Bundled CSV") - - const ( - commonPageTemplate = `|{{ .Kind }}|{{ .Title }}|{{ .File.Path }}|{{ .Summary }}|{{ .Content }}|RelPermalink: {{ .RelPermalink }}|WordCount: {{ .WordCount }}|Pages: {{ .Pages }}|Data Pages: Pages({{ len .Data.Pages }})|Resources: {{ len .Resources }}|Summary: {{ .Summary }}` - commonPaginatorTemplate = `|Paginator: {{ with .Paginator }}{{ .PageNumber }}{{ else }}NIL{{ end }}` - commonListTemplateNoPaginator = `|{{ $pages := .Pages }}{{ if .IsHome }}{{ $pages = .Site.RegularPages }}{{ end }}{{ range $i, $e := ($pages | first 1) }}|Render {{ $i }}: {{ .Kind }}|{{ .Render "li" }}|{{ end }}|Site params: {{ $.Site.Params.hugo }}|RelPermalink: {{ .RelPermalink }}` - commonListTemplate = commonPaginatorTemplate + `|{{ $pages := .Pages }}{{ if .IsHome }}{{ $pages = .Site.RegularPages }}{{ end }}{{ range $i, $e := ($pages | first 1) }}|Render {{ $i }}: {{ .Kind }}|{{ .Render "li" }}|{{ end }}|Site params: {{ $.Site.Params.hugo }}|RelPermalink: {{ .RelPermalink }}` - commonShortcodeTemplate = `|{{ .Name }}|{{ .Ordinal }}|{{ .Page.Summary }}|{{ .Page.Content }}|WordCount: {{ .Page.WordCount }}` - prevNextTemplate = `|Prev: {{ with .Prev }}{{ .RelPermalink }}{{ end }}|Next: {{ with .Next }}{{ .RelPermalink }}{{ end }}` - prevNextInSectionTemplate = `|PrevInSection: {{ with .PrevInSection }}{{ .RelPermalink }}{{ end }}|NextInSection: {{ with .NextInSection }}{{ .RelPermalink }}{{ end }}` - paramsTemplate = `|Params: {{ .Params.hugo }}` - treeNavTemplate = `|CurrentSection: {{ .CurrentSection }}` + b.AssertFileContent("public/no/index.html", + "Home: no|home|/no/|Hjem|

Hjem Innhold.

\n|HTML", + "Site last mod: 2002-02-02", + "Home last mod: 2002-02-02", + "Translations: 1", + "GetPage: /no/posts/p1/|Post 1 no|", ) - b.WithTemplates( - "_default/list.html", "HTML: List"+commonPageTemplate+commonListTemplate+"|First Site: {{ .Sites.First.Title }}", - "_default/list.json", "JSON: List"+commonPageTemplate+commonListTemplateNoPaginator, - "_default/list.csv", "CSV: List"+commonPageTemplate+commonListTemplateNoPaginator, - "_default/single.html", "HTML: Single"+commonPageTemplate+prevNextTemplate+prevNextInSectionTemplate+treeNavTemplate, - "_default/single.json", "JSON: Single"+commonPageTemplate, + b.AssertFileContent("public/en/index.json", "Home:en|home|/en/|Home in English|

Home Content.

\n|JSON") + b.AssertFileContent("public/no/index.json", "Home:no|home|/no/|Hjem|

Hjem Innhold.

\n|JSON") - // For .Render test - "_default/li.html", `HTML: LI|{{ strings.Contains .Content "HTML: Shortcode: sc" }}`+paramsTemplate, - "_default/li.json", `JSON: LI|{{ strings.Contains .Content "JSON: Shortcode: sc" }}`+paramsTemplate, - "_default/li.csv", `CSV: LI|{{ strings.Contains .Content "CSV: Shortcode: sc" }}`+paramsTemplate, - - "404.html", "{{ .Kind }}|{{ .Title }}|Page not found", - - "shortcodes/sc.html", "HTML: Shortcode: "+commonShortcodeTemplate, - "shortcodes/sc.json", "JSON: Shortcode: "+commonShortcodeTemplate, - "shortcodes/sc.csv", "CSV: Shortcode: "+commonShortcodeTemplate, + b.AssertFileContent("public/en/posts/p1/index.html", + "Single: en|page|/en/posts/p1/|Post 1|

Content 1.

\n|Len Resources: 2|", + "Resources: text|/en/posts/p1/f1.txt|text/plain|map[icon:enicon] - page||application/octet-stream|map[draft:false iscjklanguage:false title:Post Sub 1] -", + "Icon: enicon", + "Icon fingerprinted: enicon|/en/posts/p1/f1.e5746577af5cbfc4f34c558051b7955a9a5a795a84f1c6ab0609cb3473a924cb.txt|", + "NextInSection: |\nPrevInSection: /en/posts/p2/|Post 2|", + "GetTerms: name: tag1, title: Tag 1|", ) - b.CreateSites().Build(BuildCfg{}) - - b.AssertFileContent("public/blog/page1/index.html", - "This is content with some shortcodes.", - "Page with outputs", - "Pages: Pages(0)", - "RelPermalink: /blog/page1/|", - "Shortcode 1: HTML: Shortcode: |sc|0|||WordCount: 0.", - "Shortcode 2: HTML: Shortcode: |sc|1|||WordCount: 0.", - "Prev: /blog/page10/|Next: /blog/mybundle/", - "PrevInSection: /blog/page10/|NextInSection: /blog/mybundle/", - "Summary: This is summary.", - "CurrentSection: Page(/blog)", - ) - - b.AssertFileContent("public/blog/page1/index.json", - "JSON: Single|page|Page with outputs|", - "SON: Shortcode: |sc|0||") - - b.AssertFileContent("public/index.html", - "home|In English", - "Site params: Rules", - "Pages: Pages(6)|Data Pages: Pages(6)", - "Paginator: 1", - "First Site: In English", - "RelPermalink: /", - ) - - b.AssertFileContent("public/no/index.html", "home|På norsk", "RelPermalink: /no/") - - // Check RSS - rssHome := b.FileContent("public/index.xml") - c.Assert(rssHome, qt.Contains, ``) - c.Assert(strings.Count(rssHome, ""), qt.Equals, 3) // rssLimit = 3 - - // .Render should use template/content from the current output format - // even if that output format isn't configured for that page. - b.AssertFileContent( - "public/index.json", - "Render 0: page|JSON: LI|false|Params: Rocks!", - ) - - b.AssertFileContent( - "public/index.html", - "Render 0: page|HTML: LI|false|Params: Rocks!|", - ) - - b.AssertFileContent( - "public/index.csv", - "Render 0: page|CSV: LI|false|Params: Rocks!|", - ) - - // Check bundled resources - b.AssertFileContent( - "public/blog/mybundle/index.html", + b.AssertFileContent("public/no/posts/p1/index.html", "Resources: 1", + "Resources: text|/en/posts/p1/f1.txt|text/plain|map[icon:noicon] -", + "Icon: noicon", + "Icon fingerprinted: noicon|/en/posts/p1/f1.e5746577af5cbfc4f34c558051b7955a9a5a795a84f1c6ab0609cb3473a924cb.txt|", + "Background: post.jpg", + "NextInSection: |\nPrevInSection: /no/posts/p2/|Post 2 No|", ) - // Check pages in root section - b.AssertFileContent( - "public/root3/index.html", - "Single|page|Page with outputs|root3.md|", - "Prev: /root4/|Next: /root2/|PrevInSection: /root4/|NextInSection: /root2/", + b.AssertFileContent("public/en/posts/index.html", + "List: en|section|/en/posts/|Posts||Len Resources: 2|", + "Resources: text|/en/posts/f1.txt|text/plain - text|/en/posts/sub/f1.txt|text/plain -", + "List last mod: 2001-01-01", ) - b.AssertFileContent( - "public/root3/index.json", "Shortcode 1: JSON:") + b.AssertFileContent("public/no/posts/index.html", + "List last mod: 2002-02-02", + ) - // Paginators - b.AssertFileContent("public/page/1/index.html", `rel="canonical" href="https://example.com/"`) - b.AssertFileContent("public/page/2/index.html", "HTML: List|home|In English|", "Paginator: 2") + b.AssertFileContent("public/en/posts/p2/index.html", "Single: en|page|/en/posts/p2/|Post 2|

Content 2.

\n|", + "|Len Resources: 0", + "GetTerms: name: tag1, title: Tag 1|name: tag3, title: Tag3|", + ) + b.AssertFileContent("public/no/posts/p2/index.html", "Single: no|page|/no/posts/p2/|Post 2 No|

Content 2 No.

\n|") - // 404 - b.AssertFileContent("public/404.html", "404|404 Page not found") + b.AssertFileContent("public/no/categories/index.html", + "Kind: taxonomy", + "Type: categories", + ) + b.AssertFileContent("public/no/tags/index.html", + "Kind: taxonomy", + "Type: tags", + ) - // Sitemaps - b.AssertFileContent("public/en/sitemap.xml", "https://example.com/blog/") - b.AssertFileContent("public/no/sitemap.xml", `hreflang="no"`) + b.AssertFileContent("public/no/tags/tag1/index.html", + "Background: term.jpg", + "Kind: term", + "Type: tags", + "Paginate: 1/1|", + ) - b.AssertFileContent("public/sitemap.xml", "https://example.com/en/sitemap.xml", "https://example.com/no/sitemap.xml") + b.AssertFileContent("public/en/tags/tag1/index.html", + "Kind: term", + "Type: tags", + "Paginate: 1/2|", + ) +} - // robots.txt - b.AssertFileContent("public/robots.txt", `User-agent: *`) +// Basic tests that verifies that the different file systems work as expected. +func TestSmokeFilesystems(t *testing.T) { + t.Parallel() - // Aliases - b.AssertFileContent("public/a/b/c/index.html", `refresh`) + files := ` +-- hugo.toml -- +baseURL = "https://example.com" +defaultContentLanguage = "en" +defaultContentLanguageInSubdir = true +[languages] +[languages.en] +title = "In English" +[languages.nn] +title = "På nynorsk" +[module] +[[module.mounts]] +source = "i18n" +target = "i18n" +[[module.mounts]] +source = "data" +target = "data" +[[module.mounts]] +source = "content/en" +target = "content" +lang = "en" +[[module.mounts]] +source = "content/nn" +target = "content" +lang = "nn" +[[module.imports]] +path = "mytheme" +-- layouts/index.html -- +i18n s1: {{ i18n "s1" }}| +i18n s2: {{ i18n "s2" }}| +data s1: {{ site.Data.d1.s1 }}| +data s2: {{ site.Data.d1.s2 }}| +title: {{ .Title }}| +-- themes/mytheme/hugo.toml -- +[[module.mounts]] +source = "i18n" +target = "i18n" +[[module.mounts]] +source = "data" +target = "data" +# i18n files both project and theme. +-- i18n/en.toml -- +[s1] +other = 's1project' +-- i18n/nn.toml -- +[s1] +other = 's1prosjekt' +-- themes/mytheme/i18n/en.toml -- +[s1] +other = 's1theme' +[s2] +other = 's2theme' +# data files both project and theme. +-- data/d1.yaml -- +s1: s1project +-- themes/mytheme/data/d1.yaml -- +s1: s1theme +s2: s2theme +# Content +-- content/en/_index.md -- +--- +title: "Home" +--- +-- content/nn/_index.md -- +--- +title: "Heim" +--- - // Markdown vs shortcodes - // Check that all footnotes are grouped (even those from inside the shortcode) - b.AssertFileContentRe("public/blog/markyshort/index.html", `Footnotes:.*
    .*Fn 1.*Fn 2.*Fn 3.*
`) +` + b := Test(t, files) + + b.AssertFileContent("public/en/index.html", + "i18n s1: s1project", "i18n s2: s2theme", + "data s1: s1project", "data s2: s2theme", + "title: Home", + ) + + b.AssertFileContent("public/nn/index.html", + "i18n s1: s1prosjekt", "i18n s2: s2theme", + "data s1: s1project", "data s2: s2theme", + "title: Heim", + ) } // https://github.com/golang/go/issues/30286 diff --git a/hugolib/image_test.go b/hugolib/image_test.go index db1707c22..b3b933711 100644 --- a/hugolib/image_test.go +++ b/hugolib/image_test.go @@ -73,11 +73,10 @@ SUNSET2: {{ $resized2.RelPermalink }}/{{ $resized2.Width }}/Lat: {{ $resized2.Ex b.Build(BuildCfg{}) b.AssertFileContent("public/index.html", "SUNSET FOR: en: /bundle/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_200x200_resize_q75_box.jpg/200/Lat: 36.59744166666667") - b.AssertFileContent("public/fr/index.html", "SUNSET FOR: fr: /fr/bundle/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_200x200_resize_q75_box.jpg/200/Lat: 36.59744166666667") + b.AssertFileContent("public/fr/index.html", "SUNSET FOR: fr: /bundle/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_200x200_resize_q75_box.jpg/200/Lat: 36.59744166666667") b.AssertFileContent("public/index.html", " SUNSET2: /images/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_123x234_resize_q75_box.jpg/123/Lat: 36.59744166666667") b.AssertFileContent("public/nn/index.html", " SUNSET2: /images/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_123x234_resize_q75_box.jpg/123/Lat: 36.59744166666667") - b.AssertImage(200, 200, "public/fr/bundle/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_200x200_resize_q75_box.jpg") b.AssertImage(200, 200, "public/bundle/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_200x200_resize_q75_box.jpg") // Check the file cache @@ -85,10 +84,10 @@ SUNSET2: {{ $resized2.RelPermalink }}/{{ $resized2.Width }}/Lat: {{ $resized2.Ex b.AssertFileContent("resources/_gen/images/bundle/sunset_3166614710256882113.json", "DateTimeDigitized|time.Time", "PENTAX") - b.AssertImage(123, 234, "resources/_gen/images/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_123x234_resize_q75_box.jpg") - b.AssertFileContent("resources/_gen/images/sunset_3166614710256882113.json", + + b.AssertImage(123, 234, "resources/_gen/images/images/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_123x234_resize_q75_box.jpg") + b.AssertFileContent("resources/_gen/images/images/sunset_3166614710256882113.json", "DateTimeDigitized|time.Time", "PENTAX") - // TODO(bep) add this as a default assertion after Build()? b.AssertNoDuplicateWrites() } diff --git a/hugolib/integration_test.go b/hugolib/integration_test.go index 93468eceb..250c7bcec 100644 --- a/hugolib/integration_test.go +++ b/hugolib/integration_test.go @@ -1,4 +1,4 @@ -// Copyright 2022 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -86,7 +86,6 @@ tags: ['T1'] b.AssertFileContent("public/en/tags/t1/index.html", "
  • T1-en
", ) - } // Issue #11538 @@ -112,7 +111,6 @@ func TestRenderStringBadMarkupOpt(t *testing.T) { if !strings.Contains(err.Error(), want) { t.Errorf("error msg must contain %q, error msg actually contains %q", want, err.Error()) } - } // Issue #11547 diff --git a/hugolib/integrationtest_builder.go b/hugolib/integrationtest_builder.go index a0cae1d95..34d3c5530 100644 --- a/hugolib/integrationtest_builder.go +++ b/hugolib/integrationtest_builder.go @@ -6,6 +6,7 @@ import ( "errors" "fmt" "io" + "math/rand" "os" "path/filepath" "regexp" @@ -29,17 +30,64 @@ import ( "github.com/gohugoio/hugo/htesting" "github.com/gohugoio/hugo/hugofs" "github.com/spf13/afero" + "golang.org/x/text/unicode/norm" "golang.org/x/tools/txtar" ) +type TestOpt func(*IntegrationTestConfig) + +func TestOptRunning() TestOpt { + return func(c *IntegrationTestConfig) { + c.Running = true + } +} + +// Enable tracing in integration tests. +// THis should only be used during development and not committed to the repo. +func TestOptTrace() TestOpt { + return func(c *IntegrationTestConfig) { + c.LogLevel = logg.LevelTrace + } +} + +// TestOptDebug will enable debug logging in integration tests. +func TestOptDebug() TestOpt { + return func(c *IntegrationTestConfig) { + c.LogLevel = logg.LevelDebug + } +} + +// TestOptWithNFDOnDarwin will normalize the Unicode filenames to NFD on Darwin. +func TestOptWithNFDOnDarwin() TestOpt { + return func(c *IntegrationTestConfig) { + c.NFDFormOnDarwin = true + } +} + +// TestOptWithWorkingDir allows setting any config optiona as a function al option. +func TestOptWithConfig(fn func(c *IntegrationTestConfig)) TestOpt { + return func(c *IntegrationTestConfig) { + fn(c) + } +} + // Test is a convenience method to create a new IntegrationTestBuilder from some files and run a build. -func Test(t testing.TB, files string) *IntegrationTestBuilder { - return NewIntegrationTestBuilder(IntegrationTestConfig{T: t, TxtarString: files}).Build() +func Test(t testing.TB, files string, opts ...TestOpt) *IntegrationTestBuilder { + cfg := IntegrationTestConfig{T: t, TxtarString: files} + for _, o := range opts { + o(&cfg) + } + return NewIntegrationTestBuilder(cfg).Build() } // TestRunning is a convenience method to create a new IntegrationTestBuilder from some files with Running set to true and run a build. -func TestRunning(t testing.TB, files string) *IntegrationTestBuilder { - return NewIntegrationTestBuilder(IntegrationTestConfig{T: t, TxtarString: files, Running: true}).Build() +// Deprecated: Use Test with TestOptRunning instead. +func TestRunning(t testing.TB, files string, opts ...TestOpt) *IntegrationTestBuilder { + cfg := IntegrationTestConfig{T: t, TxtarString: files, Running: true} + for _, o := range opts { + o(&cfg) + } + return NewIntegrationTestBuilder(cfg).Build() } func NewIntegrationTestBuilder(conf IntegrationTestConfig) *IntegrationTestBuilder { @@ -50,6 +98,12 @@ func NewIntegrationTestBuilder(conf IntegrationTestConfig) *IntegrationTestBuild data := txtar.Parse([]byte(conf.TxtarString)) + if conf.NFDFormOnDarwin { + for i, f := range data.Files { + data.Files[i].Name = norm.NFD.String(f.Name) + } + } + c, ok := conf.T.(*qt.C) if !ok { c = qt.New(conf.T) @@ -95,10 +149,11 @@ type IntegrationTestBuilder struct { createdFiles []string removedFiles []string renamedFiles []string + renamedDirs []string buildCount int GCCount int - counters *testCounters + counters *buildCounters logBuff lockingBuffer builderInit sync.Once @@ -142,11 +197,6 @@ func (s *IntegrationTestBuilder) AssertBuildCountLayouts(count int) { s.Assert(s.H.init.layouts.InitCount(), qt.Equals, count) } -func (s *IntegrationTestBuilder) AssertBuildCountTranslations(count int) { - s.Helper() - s.Assert(s.H.init.translations.InitCount(), qt.Equals, count) -} - func (s *IntegrationTestBuilder) AssertFileCount(dirname string, expected int) { s.Helper() fs := s.fs.WorkingDirReadOnly @@ -168,6 +218,7 @@ func (s *IntegrationTestBuilder) AssertFileContent(filename string, matches ...s s.Helper() content := strings.TrimSpace(s.FileContent(filename)) for _, m := range matches { + cm := qt.Commentf("File: %s Match %s", filename, m) lines := strings.Split(m, "\n") for _, match := range lines { match = strings.TrimSpace(match) @@ -180,10 +231,10 @@ func (s *IntegrationTestBuilder) AssertFileContent(filename string, matches ...s match = strings.TrimPrefix(match, "! ") } if negate { - s.Assert(content, qt.Not(qt.Contains), match, qt.Commentf(m)) + s.Assert(content, qt.Not(qt.Contains), match, cm) continue } - s.Assert(content, qt.Contains, match, qt.Commentf(m)) + s.Assert(content, qt.Contains, match, cm) } } } @@ -208,24 +259,6 @@ func (s *IntegrationTestBuilder) AssertFileExists(filename string, b bool) { s.Assert(err, checker) } -// Deprecated: Use AssertFileExists instead but remember to prefix with "public/". -// I have had some surprises with this one, hence the deprecation. -func (s *IntegrationTestBuilder) AssertDestinationExists(filename string, b bool) { - checker := qt.IsTrue - if !b { - checker = qt.IsFalse - } - s.Assert(s.destinationExists(filepath.Clean(filename)), checker) -} - -func (s *IntegrationTestBuilder) destinationExists(filename string) bool { - b, err := helpers.Exists(filename, s.fs.PublishDir) - if err != nil { - panic(err) - } - return b -} - func (s *IntegrationTestBuilder) AssertIsFileError(err error) herrors.FileError { s.Assert(err, qt.ErrorAs, new(herrors.FileError)) return herrors.UnwrapFileError(err) @@ -233,12 +266,18 @@ func (s *IntegrationTestBuilder) AssertIsFileError(err error) herrors.FileError func (s *IntegrationTestBuilder) AssertRenderCountContent(count int) { s.Helper() - s.Assert(s.counters.contentRenderCounter, qt.Equals, uint64(count)) + s.Assert(s.counters.contentRenderCounter.Load(), qt.Equals, uint64(count)) } func (s *IntegrationTestBuilder) AssertRenderCountPage(count int) { s.Helper() - s.Assert(s.counters.pageRenderCounter, qt.Equals, uint64(count)) + s.Assert(s.counters.pageRenderCounter.Load(), qt.Equals, uint64(count)) +} + +func (s *IntegrationTestBuilder) AssertRenderCountPageBetween(from, to int) { + s.Helper() + i := int(s.counters.pageRenderCounter.Load()) + s.Assert(i >= from && i <= to, qt.IsTrue) } func (s *IntegrationTestBuilder) Build() *IntegrationTestBuilder { @@ -246,10 +285,22 @@ func (s *IntegrationTestBuilder) Build() *IntegrationTestBuilder { _, err := s.BuildE() if s.Cfg.Verbose || err != nil { fmt.Println(s.logBuff.String()) + if s.H != nil && err == nil { + for _, s := range s.H.Sites { + m := s.pageMap + var buff bytes.Buffer + fmt.Fprintf(&buff, "PageMap for site %q\n\n", s.Language().Lang) + m.debugPrint("", 999, &buff) + fmt.Println(buff.String()) + } + } + } else if s.Cfg.LogLevel <= logg.LevelDebug { + fmt.Println(s.logBuff.String()) } s.Assert(err, qt.IsNil) if s.Cfg.RunGC { s.GCCount, err = s.H.GC() + s.Assert(err, qt.IsNil) } return s @@ -286,7 +337,13 @@ type IntegrationTestDebugConfig struct { PrefixPagemap string } -func (s *IntegrationTestBuilder) EditFileReplace(filename string, replacementFunc func(s string) string) *IntegrationTestBuilder { +func (s *IntegrationTestBuilder) EditFileReplaceAll(filename, old, new string) *IntegrationTestBuilder { + return s.EditFileReplaceFunc(filename, func(s string) string { + return strings.ReplaceAll(s, old, new) + }) +} + +func (s *IntegrationTestBuilder) EditFileReplaceFunc(filename string, replacementFunc func(s string) string) *IntegrationTestBuilder { absFilename := s.absFilename(filename) b, err := afero.ReadFile(s.fs.Source, absFilename) s.Assert(err, qt.IsNil) @@ -337,6 +394,26 @@ func (s *IntegrationTestBuilder) RenameFile(old, new string) *IntegrationTestBui return s } +func (s *IntegrationTestBuilder) RenameDir(old, new string) *IntegrationTestBuilder { + absOldFilename := s.absFilename(old) + absNewFilename := s.absFilename(new) + s.renamedDirs = append(s.renamedDirs, absOldFilename) + s.changedFiles = append(s.changedFiles, absNewFilename) + afero.Walk(s.fs.Source, absOldFilename, func(path string, info os.FileInfo, err error) error { + if err != nil { + return err + } + if info.IsDir() { + return nil + } + s.createdFiles = append(s.createdFiles, strings.Replace(path, absOldFilename, absNewFilename, 1)) + return nil + }) + s.Assert(s.fs.Source.MkdirAll(filepath.Dir(absNewFilename), 0o777), qt.IsNil) + s.Assert(s.fs.Source.Rename(absOldFilename, absNewFilename), qt.IsNil) + return s +} + func (s *IntegrationTestBuilder) FileContent(filename string) string { s.Helper() return s.readWorkingDir(s, s.fs, filepath.FromSlash(filename)) @@ -353,7 +430,7 @@ func (s *IntegrationTestBuilder) initBuilder() error { } if s.Cfg.LogLevel == 0 { - s.Cfg.LogLevel = logg.LevelWarn + s.Cfg.LogLevel = logg.LevelError } isBinaryRe := regexp.MustCompile(`^(.*)(\.png|\.jpg)$`) @@ -365,7 +442,7 @@ func (s *IntegrationTestBuilder) initBuilder() error { data := bytes.TrimSuffix(f.Data, []byte("\n")) datastr := strings.TrimSpace(string(data)) if strings.HasPrefix(datastr, dataSourceFilenamePrefix) { - // Read from file relative to tue current dir. + // Read from file relative to the current dir. var err error wd, _ := os.Getwd() filename := filepath.Join(wd, strings.TrimSpace(strings.TrimPrefix(datastr, dataSourceFilenamePrefix))) @@ -404,7 +481,12 @@ func (s *IntegrationTestBuilder) initBuilder() error { flags.Set("workingDir", s.Cfg.WorkingDir) } - w := &s.logBuff + var w io.Writer + if s.Cfg.LogLevel == logg.LevelTrace { + w = os.Stdout + } else { + w = &s.logBuff + } logger := loggers.New( loggers.Options{ @@ -476,18 +558,22 @@ func (s *IntegrationTestBuilder) absFilename(filename string) string { return filename } +func (s *IntegrationTestBuilder) reset() { + s.changedFiles = nil + s.createdFiles = nil + s.removedFiles = nil + s.renamedFiles = nil +} + func (s *IntegrationTestBuilder) build(cfg BuildCfg) error { s.Helper() defer func() { - s.changedFiles = nil - s.createdFiles = nil - s.removedFiles = nil - s.renamedFiles = nil + s.reset() }() changeEvents := s.changeEvents() s.logBuff.Reset() - s.counters = &testCounters{} + s.counters = &buildCounters{} cfg.testCounters = s.counters if s.buildCount > 0 && (len(changeEvents) == 0) { @@ -522,6 +608,15 @@ func (s *IntegrationTestBuilder) changeEvents() []fsnotify.Event { Op: fsnotify.Rename, }) } + + for _, v := range s.renamedDirs { + events = append(events, fsnotify.Event{ + Name: v, + // This is what we get on MacOS. + Op: fsnotify.Remove | fsnotify.Rename, + }) + } + for _, v := range s.changedFiles { events = append(events, fsnotify.Event{ Name: v, @@ -535,6 +630,12 @@ func (s *IntegrationTestBuilder) changeEvents() []fsnotify.Event { }) } + // Shuffle events. + for i := range events { + j := rand.Intn(i + 1) + events[i], events[j] = events[j], events[i] + } + return events } @@ -598,6 +699,7 @@ type IntegrationTestConfig struct { // Will print the log buffer after the build Verbose bool + // The log level to use. LogLevel logg.Level // Whether it needs the real file system (e.g. for js.Build tests). @@ -612,7 +714,12 @@ type IntegrationTestConfig struct { // Whether to run npm install before Build. NeedsNpmInstall bool + // Whether to normalize the Unicode filenames to NFD on Darwin. + NFDFormOnDarwin bool + + // The working dir to use. If not absolute, a temp dir will be created. WorkingDir string + // The config to pass to Build. BuildCfg BuildCfg } diff --git a/hugolib/language_content_dir_test.go b/hugolib/language_content_dir_test.go index a22201475..e02e118f5 100644 --- a/hugolib/language_content_dir_test.go +++ b/hugolib/language_content_dir_test.go @@ -14,513 +14,45 @@ package hugolib import ( - "context" - "fmt" - "os" - "path/filepath" "testing" - - "github.com/gohugoio/hugo/resources/kinds" - "github.com/spf13/cast" - - qt "github.com/frankban/quicktest" ) -/* - -/en/p1.md -/nn/p1.md - -.Readdir - -- Name() => p1.en.md, p1.nn.md - -.Stat(name) - -.Open() --- real file name - - -*/ - func TestLanguageContentRoot(t *testing.T) { - t.Parallel() - c := qt.New(t) - - config := ` + files := ` +-- hugo.toml -- baseURL = "https://example.org/" - defaultContentLanguage = "en" defaultContentLanguageInSubdir = true - -contentDir = "content/main" -workingDir = "/my/project" - -[Languages] -[Languages.en] -weight = 10 -title = "In English" -languageName = "English" - -[Languages.nn] -weight = 20 -title = "På Norsk" -languageName = "Norsk" -# This tells Hugo that all content in this directory is in the Norwegian language. -# It does not have to have the "my-page.nn.md" format. It can, but that is optional. -contentDir = "content/norsk" - -[Languages.sv] -weight = 30 -title = "På Svenska" -languageName = "Svensk" -contentDir = "content/svensk" -` - - pageTemplate := ` ---- -title: %s -slug: %s -weight: %d ---- - -Content. - -SVP3-REF: {{< ref path="/sect/page3.md" lang="sv" >}} -SVP3-RELREF: {{< relref path="/sect/page3.md" lang="sv" >}} - -` - - pageBundleTemplate := ` ---- -title: %s -weight: %d ---- - -Content. - -` - var contentFiles []string - section := "sect" - - contentRoot := func(lang string) string { - switch lang { - case "nn": - return "content/norsk" - case "sv": - return "content/svensk" - default: - return "content/main" - } - } - - contentSectionRoot := func(lang string) string { - return contentRoot(lang) + "/" + section - } - - for _, lang := range []string{"en", "nn", "sv"} { - for j := 1; j <= 10; j++ { - if (lang == "nn" || lang == "en") && j%4 == 0 { - // Skip 4 and 8 for nn - // We also skip it for en, but that is added to the Swedish directory below. - continue - } - - if lang == "sv" && j%5 == 0 { - // Skip 5 and 10 for sv - continue - } - - base := fmt.Sprintf("p-%s-%d", lang, j) - slug := base - langID := "" - - if lang == "sv" && j%4 == 0 { - // Put an English page in the Swedish content dir. - langID = ".en" - } - - if lang == "en" && j == 8 { - // This should win over the sv variant above. - langID = ".en" - } - - slug += langID - - contentRoot := contentSectionRoot(lang) - - filename := filepath.Join(contentRoot, fmt.Sprintf("page%d%s.md", j, langID)) - contentFiles = append(contentFiles, filename, fmt.Sprintf(pageTemplate, slug, slug, j)) - } - } - - // Put common translations in all of them - for i, lang := range []string{"en", "nn", "sv"} { - contentRoot := contentSectionRoot(lang) - - slug := fmt.Sprintf("common_%s", lang) - - filename := filepath.Join(contentRoot, "common.md") - contentFiles = append(contentFiles, filename, fmt.Sprintf(pageTemplate, slug, slug, 100+i)) - - for j, lang2 := range []string{"en", "nn", "sv"} { - filename := filepath.Join(contentRoot, fmt.Sprintf("translated_all.%s.md", lang2)) - langSlug := slug + "_translated_all_" + lang2 - contentFiles = append(contentFiles, filename, fmt.Sprintf(pageTemplate, langSlug, langSlug, 200+i+j)) - } - - for j, lang2 := range []string{"sv", "nn"} { - if lang == "en" { - continue - } - filename := filepath.Join(contentRoot, fmt.Sprintf("translated_some.%s.md", lang2)) - langSlug := slug + "_translated_some_" + lang2 - contentFiles = append(contentFiles, filename, fmt.Sprintf(pageTemplate, langSlug, langSlug, 300+i+j)) - } - } - - // Add a bundle with some images - for i, lang := range []string{"en", "nn", "sv"} { - contentRoot := contentSectionRoot(lang) - slug := fmt.Sprintf("bundle_%s", lang) - filename := filepath.Join(contentRoot, "mybundle", "index.md") - contentFiles = append(contentFiles, filename, fmt.Sprintf(pageBundleTemplate, slug, 400+i)) - if lang == "en" { - imageFilename := filepath.Join(contentRoot, "mybundle", "logo.png") - contentFiles = append(contentFiles, imageFilename, "PNG Data") - } - imageFilename := filepath.Join(contentRoot, "mybundle", "featured.png") - contentFiles = append(contentFiles, imageFilename, fmt.Sprintf("PNG Data for %s", lang)) - - // Add some bundled pages - contentFiles = append(contentFiles, filepath.Join(contentRoot, "mybundle", "p1.md"), fmt.Sprintf(pageBundleTemplate, slug, 401+i)) - contentFiles = append(contentFiles, filepath.Join(contentRoot, "mybundle", "sub", "p1.md"), fmt.Sprintf(pageBundleTemplate, slug, 402+i)) - - } - - // Add some static files inside the content dir - // https://github.com/gohugoio/hugo/issues/5759 - for _, lang := range []string{"en", "nn", "sv"} { - contentRoot := contentRoot(lang) - for i := 0; i < 2; i++ { - filename := filepath.Join(contentRoot, "mystatic", fmt.Sprintf("file%d.yaml", i)) - contentFiles = append(contentFiles, filename, lang) - } - } - - b := newTestSitesBuilder(t) - b.WithWorkingDir("/my/project").WithConfigFile("toml", config).WithContent(contentFiles...).CreateSites() - - _ = os.Stdout - - err := b.BuildE(BuildCfg{}) - - // dumpPages(b.H.Sites[1].RegularPages()...) - - c.Assert(err, qt.IsNil) - - c.Assert(len(b.H.Sites), qt.Equals, 3) - - enSite := b.H.Sites[0] - nnSite := b.H.Sites[1] - svSite := b.H.Sites[2] - - b.AssertFileContent("public/en/mystatic/file1.yaml", "en") - b.AssertFileContent("public/nn/mystatic/file1.yaml", "nn") - - // dumpPages(nnSite.RegularPages()...) - - c.Assert(len(nnSite.RegularPages()), qt.Equals, 12) - c.Assert(len(enSite.RegularPages()), qt.Equals, 13) - - c.Assert(len(svSite.RegularPages()), qt.Equals, 10) - - svP2, err := svSite.getPageNew(nil, "/sect/page2.md") - c.Assert(err, qt.IsNil) - nnP2, err := nnSite.getPageNew(nil, "/sect/page2.md") - c.Assert(err, qt.IsNil) - - enP2, err := enSite.getPageNew(nil, "/sect/page2.md") - c.Assert(err, qt.IsNil) - c.Assert(enP2.Language().Lang, qt.Equals, "en") - c.Assert(svP2.Language().Lang, qt.Equals, "sv") - c.Assert(nnP2.Language().Lang, qt.Equals, "nn") - - content, _ := nnP2.Content(context.Background()) - contentStr := cast.ToString(content) - c.Assert(contentStr, qt.Contains, "SVP3-REF: https://example.org/sv/sect/p-sv-3/") - c.Assert(contentStr, qt.Contains, "SVP3-RELREF: /sv/sect/p-sv-3/") - - // Test RelRef with and without language indicator. - nn3RefArgs := map[string]any{ - "path": "/sect/page3.md", - "lang": "nn", - } - nnP3RelRef, err := svP2.RelRef( - nn3RefArgs, - ) - c.Assert(err, qt.IsNil) - c.Assert(nnP3RelRef, qt.Equals, "/nn/sect/p-nn-3/") - nnP3Ref, err := svP2.Ref( - nn3RefArgs, - ) - c.Assert(err, qt.IsNil) - c.Assert(nnP3Ref, qt.Equals, "https://example.org/nn/sect/p-nn-3/") - - for i, p := range enSite.RegularPages() { - j := i + 1 - c.Assert(p.Language().Lang, qt.Equals, "en") - c.Assert(p.Section(), qt.Equals, "sect") - if j < 9 { - if j%4 == 0 { - } else { - c.Assert(p.Title(), qt.Contains, "p-en") - } - } - } - - for _, p := range nnSite.RegularPages() { - c.Assert(p.Language().Lang, qt.Equals, "nn") - c.Assert(p.Title(), qt.Contains, "nn") - } - - for _, p := range svSite.RegularPages() { - c.Assert(p.Language().Lang, qt.Equals, "sv") - c.Assert(p.Title(), qt.Contains, "sv") - } - - // Check bundles - bundleEn := enSite.RegularPages()[len(enSite.RegularPages())-1] - bundleNn := nnSite.RegularPages()[len(nnSite.RegularPages())-1] - bundleSv := svSite.RegularPages()[len(svSite.RegularPages())-1] - - c.Assert(bundleEn.RelPermalink(), qt.Equals, "/en/sect/mybundle/") - c.Assert(bundleSv.RelPermalink(), qt.Equals, "/sv/sect/mybundle/") - - c.Assert(len(bundleNn.Resources()), qt.Equals, 4) - c.Assert(len(bundleSv.Resources()), qt.Equals, 4) - c.Assert(len(bundleEn.Resources()), qt.Equals, 4) - - b.AssertFileContent("public/en/sect/mybundle/index.html", "image/png: /en/sect/mybundle/logo.png") - b.AssertFileContent("public/nn/sect/mybundle/index.html", "image/png: /nn/sect/mybundle/logo.png") - b.AssertFileContent("public/sv/sect/mybundle/index.html", "image/png: /sv/sect/mybundle/logo.png") - - b.AssertFileContent("public/sv/sect/mybundle/featured.png", "PNG Data for sv") - b.AssertFileContent("public/nn/sect/mybundle/featured.png", "PNG Data for nn") - b.AssertFileContent("public/en/sect/mybundle/featured.png", "PNG Data for en") - b.AssertFileContent("public/en/sect/mybundle/logo.png", "PNG Data") - b.AssertFileContent("public/sv/sect/mybundle/logo.png", "PNG Data") - b.AssertFileContent("public/nn/sect/mybundle/logo.png", "PNG Data") - - nnSect := nnSite.getPage(kinds.KindSection, "sect") - c.Assert(nnSect, qt.Not(qt.IsNil)) - c.Assert(len(nnSect.Pages()), qt.Equals, 12) - nnHome := nnSite.Home() - c.Assert(nnHome.RelPermalink(), qt.Equals, "/nn/") -} - -// https://github.com/gohugoio/hugo/issues/6463 -func TestLanguageRootSectionsMismatch(t *testing.T) { - t.Parallel() - - config := ` -baseURL: "https://example.org/" -languageCode: "en-us" -title: "My New Hugo Site" -theme: "mytheme" - -contentDir: "content/en" - -languages: - en: - weight: 1 - languageName: "English" - contentDir: content/en - es: - weight: 2 - languageName: "Español" - contentDir: content/es - fr: - weight: 4 - languageName: "Française" - contentDir: content/fr - - -` - createPage := func(title string) string { - return fmt.Sprintf(`--- -title: %q ---- - -`, title) - } - - b := newTestSitesBuilder(t) - b.WithConfigFile("yaml", config) - - b.WithSourceFile("themes/mytheme/layouts/index.html", `MYTHEME`) - b.WithTemplates("index.html", ` -Lang: {{ .Lang }} -{{ range .Site.RegularPages }} -Page: {{ .RelPermalink }}|{{ .Title -}} -{{ end }} - -`) - b.WithSourceFile("static/hello.txt", `hello`) - b.WithContent("en/_index.md", createPage("en home")) - b.WithContent("es/_index.md", createPage("es home")) - b.WithContent("fr/_index.md", createPage("fr home")) - - for i := 1; i < 3; i++ { - b.WithContent(fmt.Sprintf("en/event/page%d.md", i), createPage(fmt.Sprintf("ev-en%d", i))) - b.WithContent(fmt.Sprintf("es/event/page%d.md", i), createPage(fmt.Sprintf("ev-es%d", i))) - b.WithContent(fmt.Sprintf("fr/event/page%d.md", i), createPage(fmt.Sprintf("ev-fr%d", i))) - b.WithContent(fmt.Sprintf("en/blog/page%d.md", i), createPage(fmt.Sprintf("blog-en%d", i))) - b.WithContent(fmt.Sprintf("es/blog/page%d.md", i), createPage(fmt.Sprintf("blog-es%d", i))) - b.WithContent(fmt.Sprintf("fr/other/page%d.md", i), createPage(fmt.Sprintf("other-fr%d", i))) - } - - b.Build(BuildCfg{}) - - b.AssertFileContent("public/index.html", ` -Lang: en -Page: /blog/page1/|blog-en1 -Page: /blog/page2/|blog-en2 -Page: /event/page1/|ev-en1 -Page: /event/page2/|ev-en2 -`) - - b.AssertFileContent("public/es/index.html", ` -Lang: es -Page: /es/blog/page1/|blog-es1 -Page: /es/blog/page2/|blog-es2 -Page: /es/event/page1/|ev-es1 -Page: /es/event/page2/|ev-es2 -`) - b.AssertFileContent("public/fr/index.html", ` -Lang: fr -Page: /fr/event/page1/|ev-fr1 -Page: /fr/event/page2/|ev-fr2 -Page: /fr/other/page1/|other-fr1 -Page: /fr/other/page2/|other-fr2`) -} - -// Issue 9693 -func TestContentMountMerge(t *testing.T) { - t.Parallel() - - files := ` --- config.toml -- -baseURL = 'https://example.org/' -languageCode = 'en-us' -title = 'Hugo Forum Topic #37225' -theme = 'mytheme' - -disableKinds = ['sitemap','RSS','taxonomy','term'] -defaultContentLanguage = 'en' -defaultContentLanguageInSubdir = true - +[languages] [languages.en] -languageName = 'English' -weight = 1 -[languages.de] -languageName = 'Deutsch' -weight = 2 -[languages.nl] -languageName = 'Nederlands' -weight = 3 - -# EN content -[[module.mounts]] -source = 'content/en' -target = 'content' -lang = 'en' - -# DE content -[[module.mounts]] -source = 'content/de' -target = 'content' -lang = 'de' - -# This fills in the gaps in DE content with EN content -[[module.mounts]] -source = 'content/en' -target = 'content' -lang = 'de' - -# NL content -[[module.mounts]] -source = 'content/nl' -target = 'content' -lang = 'nl' - -# This should fill in the gaps in NL content with EN content -[[module.mounts]] -source = 'content/en' -target = 'content' -lang = 'nl' - --- content/de/_index.md -- ---- -title: "home (de)" ---- --- content/de/p1.md -- ---- -title: "p1 (de)" ---- +weight = 10 +contentDir = "content/en" +[languages.nn] +weight = 20 +contentDir = "content/nn" -- content/en/_index.md -- --- -title: "home (en)" +title: "Home" --- --- content/en/p1.md -- +-- content/nn/_index.md -- --- -title: "p1 (en)" ---- --- content/en/p2.md -- ---- -title: "p2 (en)" ---- --- content/en/p3.md -- ---- -title: "p3 (en)" ---- --- content/nl/_index.md -- ---- -title: "home (nl)" ---- --- content/nl/p1.md -- ---- -title: "p1 (nl)" ---- --- content/nl/p3.md -- ---- -title: "p3 (nl)" ---- --- layouts/home.html -- -{{ .Title }}: {{ site.Language.Lang }}: {{ range site.RegularPages }}{{ .Title }}|{{ end }}:END --- themes/mytheme/config.toml -- -[[module.mounts]] -source = 'content/nlt' -target = 'content' -lang = 'nl' --- themes/mytheme/content/nlt/p3.md -- ---- -title: "p3 theme (nl)" ---- --- themes/mytheme/content/nlt/p4.md -- ---- -title: "p4 theme (nl)" +title: "Heim" --- +-- content/en/myfiles/file1.txt -- +file 1 en +-- content/en/myfiles/file2.txt -- +file 2 en +-- content/nn/myfiles/file1.txt -- +file 1 nn +-- layouts/index.html -- +Title: {{ .Title }}| +Len Resources: {{ len .Resources }}| +{{ range $i, $e := .Resources }} +{{ $i }}|{{ .RelPermalink }}|{{ .Content }}| +{{ end }} + ` - - b := NewIntegrationTestBuilder( - IntegrationTestConfig{ - T: t, - TxtarString: files, - }, - ).Build() - - b.AssertFileContent("public/nl/index.html", `home (nl): nl: p1 (nl)|p2 (en)|p3 (nl)|p4 theme (nl)|:END`) - b.AssertFileContent("public/de/index.html", `home (de): de: p1 (de)|p2 (en)|p3 (en)|:END`) - b.AssertFileContent("public/en/index.html", `home (en): en: p1 (en)|p2 (en)|p3 (en)|:END`) - + b := Test(t, files) + b.AssertFileContent("public/en/index.html", "Home", "0|/en/myfiles/file1.txt|file 1 en|\n\n1|/en/myfiles/file2.txt|file 2 en|") + b.AssertFileContent("public/nn/index.html", "Heim", "0|/nn/myfiles/file1.txt|file 1 nn|\n\n1|/en/myfiles/file2.txt|file 2 en|") } diff --git a/hugolib/menu_test.go b/hugolib/menu_test.go index 810b9fe20..77d92d04f 100644 --- a/hugolib/menu_test.go +++ b/hugolib/menu_test.go @@ -161,35 +161,35 @@ menu: b.Build(BuildCfg{}) b.AssertFileContent("public/index.html", - `Default1|0|10|A|/blog/a/|Page(/blog/A.md) - Default1|1|20|B|/blog/b/|Page(/blog/B.md) - Default1|2|30|C|/blog/c/|Page(/blog/C.md) - Default1|3|100|Home|/|Page(/_index.md) + `Default1|0|10|A|/blog/a/|Page(/blog/a) + Default1|1|20|B|/blog/b/|Page(/blog/b) + Default1|2|30|C|/blog/c/|Page(/blog/c) + Default1|3|100|Home|/|Page(/) - ByWeight|0|10|A|/blog/a/|Page(/blog/A.md) - ByWeight|1|20|B|/blog/b/|Page(/blog/B.md) - ByWeight|2|30|C|/blog/c/|Page(/blog/C.md) - ByWeight|3|100|Home|/|Page(/_index.md) + ByWeight|0|10|A|/blog/a/|Page(/blog/a) + ByWeight|1|20|B|/blog/b/|Page(/blog/b) + ByWeight|2|30|C|/blog/c/|Page(/blog/c) + ByWeight|3|100|Home|/|Page(/) - Reverse|0|100|Home|/|Page(/_index.md) - Reverse|1|30|C|/blog/c/|Page(/blog/C.md) - Reverse|2|20|B|/blog/b/|Page(/blog/B.md) - Reverse|3|10|A|/blog/a/|Page(/blog/A.md) + Reverse|0|100|Home|/|Page(/) + Reverse|1|30|C|/blog/c/|Page(/blog/c) + Reverse|2|20|B|/blog/b/|Page(/blog/b) + Reverse|3|10|A|/blog/a/|Page(/blog/a) - Default2|0|10|A|/blog/a/|Page(/blog/A.md) - Default2|1|20|B|/blog/b/|Page(/blog/B.md) - Default2|2|30|C|/blog/c/|Page(/blog/C.md) - Default2|3|100|Home|/|Page(/_index.md) + Default2|0|10|A|/blog/a/|Page(/blog/a) + Default2|1|20|B|/blog/b/|Page(/blog/b) + Default2|2|30|C|/blog/c/|Page(/blog/c) + Default2|3|100|Home|/|Page(/) - ByWeight|0|10|A|/blog/a/|Page(/blog/A.md) - ByWeight|1|20|B|/blog/b/|Page(/blog/B.md) - ByWeight|2|30|C|/blog/c/|Page(/blog/C.md) - ByWeight|3|100|Home|/|Page(/_index.md) + ByWeight|0|10|A|/blog/a/|Page(/blog/a) + ByWeight|1|20|B|/blog/b/|Page(/blog/b) + ByWeight|2|30|C|/blog/c/|Page(/blog/c) + ByWeight|3|100|Home|/|Page(/) - Default3|0|10|A|/blog/a/|Page(/blog/A.md) - Default3|1|20|B|/blog/b/|Page(/blog/B.md) - Default3|2|30|C|/blog/c/|Page(/blog/C.md) - Default3|3|100|Home|/|Page(/_index.md)`, + Default3|0|10|A|/blog/a/|Page(/blog/a) + Default3|1|20|B|/blog/b/|Page(/blog/b) + Default3|2|30|C|/blog/c/|Page(/blog/c) + Default3|3|100|Home|/|Page(/)`, ) } @@ -494,34 +494,34 @@ title: "Contact: With No Menu Defined" b.AssertFileContent("public/index.html", ` Main: 5 -Home|HasMenuCurrent: false|Page: Page(/_index.md) -Blog|HasMenuCurrent: false|Page: Page(/blog/_index.md) -My Post 2: With Menu Defined|HasMenuCurrent: false|Page: Page(/blog/post2.md) -My Post 3|HasMenuCurrent: false|Page: Page(/blog/post3.md) -Contact Us|HasMenuCurrent: false|Page: Page(/contact.md) +Home|HasMenuCurrent: false|Page: Page(/) +Blog|HasMenuCurrent: false|Page: Page(/blog) +My Post 2: With Menu Defined|HasMenuCurrent: false|Page: Page(/blog/post2) +My Post 3|HasMenuCurrent: false|Page: Page(/blog/post3) +Contact Us|HasMenuCurrent: false|Page: Page(/contact) `) b.AssertFileContent("public/blog/post1/index.html", ` -Home|HasMenuCurrent: false|Page: Page(/_index.md) -Blog|HasMenuCurrent: true|Page: Page(/blog/_index.md) +Home|HasMenuCurrent: false|Page: Page(/) +Blog|HasMenuCurrent: true|Page: Page(/blog) `) b.AssertFileContent("public/blog/post2/index.html", ` -Home|HasMenuCurrent: false|Page: Page(/_index.md) -Blog|HasMenuCurrent: true|Page: Page(/blog/_index.md) -Blog|IsMenuCurrent: false|Page: Page(/blog/_index.md) +Home|HasMenuCurrent: false|Page: Page(/) +Blog|HasMenuCurrent: true|Page: Page(/blog) +Blog|IsMenuCurrent: false|Page: Page(/blog) `) b.AssertFileContent("public/blog/post3/index.html", ` -Home|HasMenuCurrent: false|Page: Page(/_index.md) -Blog|HasMenuCurrent: true|Page: Page(/blog/_index.md) +Home|HasMenuCurrent: false|Page: Page(/) +Blog|HasMenuCurrent: true|Page: Page(/blog) `) b.AssertFileContent("public/contact/index.html", ` -Contact Us|HasMenuCurrent: false|Page: Page(/contact.md) -Contact Us|IsMenuCurrent: true|Page: Page(/contact.md) -Blog|HasMenuCurrent: false|Page: Page(/blog/_index.md) -Blog|IsMenuCurrent: false|Page: Page(/blog/_index.md) +Contact Us|HasMenuCurrent: false|Page: Page(/contact) +Contact Us|IsMenuCurrent: true|Page: Page(/contact) +Blog|HasMenuCurrent: false|Page: Page(/blog) +Blog|IsMenuCurrent: false|Page: Page(/blog) `) } @@ -619,7 +619,6 @@ Menu Item: {{ $i }}: {{ .Pre }}{{ .Name }}{{ .Post }}|{{ .URL }}| b.AssertFileContent("public/index.html", ` Menu Item: 0: Home|/| `) - } // Issue #11062 @@ -651,5 +650,4 @@ Menu Item: {{ $i }}|{{ .URL }}| b.AssertFileContent("public/index.html", ` Menu Item: 0|/foo/posts| `) - } diff --git a/hugolib/mount_filters_test.go b/hugolib/mount_filters_test.go index 4f6a448d2..16b062ec6 100644 --- a/hugolib/mount_filters_test.go +++ b/hugolib/mount_filters_test.go @@ -36,7 +36,7 @@ func TestMountFilters(t *testing.T) { defer clean() for _, component := range files.ComponentFolders { - b.Assert(os.MkdirAll(filepath.Join(workingDir, component), 0777), qt.IsNil) + b.Assert(os.MkdirAll(filepath.Join(workingDir, component), 0o777), qt.IsNil) } b.WithWorkingDir(workingDir).WithLogger(loggers.NewDefault()) b.WithConfigFile("toml", fmt.Sprintf(` @@ -109,10 +109,9 @@ Resources: {{ resources.Match "**.js" }} b.AssertFileContent(filepath.Join("public", "index.html"), ` Data: map[mydata:map[b:map[b1:bval]]]:END Template: false -Resource1: js/include.js:END +Resource1: /js/include.js:END Resource2: :END Resource3: :END -Resources: [js/include.js] +Resources: [/js/include.js] `) - } diff --git a/hugolib/page.go b/hugolib/page.go index bf5e19ac4..f8ec5e225 100644 --- a/hugolib/page.go +++ b/hugolib/page.go @@ -14,16 +14,14 @@ package hugolib import ( - "bytes" "context" "fmt" - "path" - "path/filepath" - "sort" - "strings" - - "go.uber.org/atomic" + "strconv" + "sync" + "sync/atomic" + "github.com/gohugoio/hugo/hugofs" + "github.com/gohugoio/hugo/hugolib/doctree" "github.com/gohugoio/hugo/identity" "github.com/gohugoio/hugo/media" "github.com/gohugoio/hugo/output" @@ -35,29 +33,25 @@ import ( "github.com/gohugoio/hugo/tpl" - "github.com/gohugoio/hugo/hugofs/files" - - "github.com/gohugoio/hugo/helpers" - "github.com/gohugoio/hugo/common/herrors" - "github.com/gohugoio/hugo/parser/metadecoders" - - "github.com/gohugoio/hugo/parser/pageparser" + "github.com/gohugoio/hugo/common/maps" "github.com/gohugoio/hugo/source" "github.com/gohugoio/hugo/common/collections" "github.com/gohugoio/hugo/common/text" - "github.com/gohugoio/hugo/resources" "github.com/gohugoio/hugo/resources/kinds" "github.com/gohugoio/hugo/resources/page" "github.com/gohugoio/hugo/resources/resource" ) var ( - _ page.Page = (*pageState)(nil) - _ collections.Grouper = (*pageState)(nil) - _ collections.Slicer = (*pageState)(nil) + _ page.Page = (*pageState)(nil) + _ collections.Grouper = (*pageState)(nil) + _ collections.Slicer = (*pageState)(nil) + _ identity.DependencyManagerScopedProvider = (*pageState)(nil) + _ contentNodeI = (*pageState)(nil) + _ pageContext = (*pageState)(nil) ) var ( @@ -74,15 +68,6 @@ type pageContext interface { posOffset(offset int) text.Position wrapError(err error) error getContentConverter() converter.Converter - addDependency(dep identity.Provider) -} - -// wrapErr adds some context to the given error if possible. -func wrapErr(err error, ctx any) error { - if pc, ok := ctx.(pageContext); ok { - return pc.wrapError(err) - } - return err } type pageSiteAdapter struct { @@ -90,20 +75,9 @@ type pageSiteAdapter struct { s *Site } -func (pa pageSiteAdapter) GetPageWithTemplateInfo(info tpl.Info, ref string) (page.Page, error) { - p, err := pa.GetPage(ref) - if p != nil { - // Track pages referenced by templates/shortcodes - // when in server mode. - if im, ok := info.(identity.Manager); ok { - im.Add(p) - } - } - return p, err -} - func (pa pageSiteAdapter) GetPage(ref string) (page.Page, error) { - p, err := pa.s.getPageNew(pa.p, ref) + p, err := pa.s.getPage(pa.p, ref) + if p == nil { // The nil struct has meaning in some situations, mostly to avoid breaking // existing sites doing $nilpage.IsDescendant($p), which will always return @@ -116,7 +90,7 @@ func (pa pageSiteAdapter) GetPage(ref string) (page.Page, error) { type pageState struct { // Incremented for each new page created. // Note that this will change between builds for a given Page. - id int + pid uint64 // This slice will be of same length as the number of global slice of output // formats (for all sites). @@ -126,16 +100,69 @@ type pageState struct { pageOutputTemplateVariationsState *atomic.Uint32 // This will be shifted out when we start to render a new output format. + pageOutputIdx int *pageOutput // Common for all output formats. *pageCommon + + resource.Staler + dependencyManager identity.Manager + resourcesPublishInit *sync.Once +} + +func (p *pageState) IdentifierBase() string { + return p.Path() +} + +func (p *pageState) GetIdentity() identity.Identity { + return p +} + +func (p *pageState) ForEeachIdentity(f func(identity.Identity) bool) { + f(p) +} + +func (p *pageState) GetDependencyManager() identity.Manager { + return p.dependencyManager +} + +func (p *pageState) GetDependencyManagerForScope(scope int) identity.Manager { + switch scope { + case pageDependencyScopeDefault: + return p.dependencyManagerOutput + case pageDependencyScopeGlobal: + return p.dependencyManager + default: + return identity.NopManager + } +} + +func (p *pageState) Key() string { + return "page-" + strconv.FormatUint(p.pid, 10) +} + +func (p *pageState) resetBuildState() { + p.Scratcher = maps.NewScratcher() } func (p *pageState) reusePageOutputContent() bool { return p.pageOutputTemplateVariationsState.Load() == 1 } +func (po *pageState) isRenderedAny() bool { + for _, o := range po.pageOutputs { + if o.isRendered() { + return true + } + } + return false +} + +func (p *pageState) isContentNodeBranch() bool { + return p.IsNode() +} + func (p *pageState) Err() resource.ResourceError { return nil } @@ -151,11 +178,6 @@ func (p *pageState) Eq(other any) bool { return p == pp } -// GetIdentity is for internal use. -func (p *pageState) GetIdentity() identity.Identity { - return identity.NewPathIdentity(files.ComponentFolderContent, filepath.FromSlash(p.Pathc())) -} - func (p *pageState) HeadingsFiltered(context.Context) tableofcontents.Headings { return nil } @@ -175,10 +197,11 @@ func (p *pageHeadingsFiltered) page() page.Page { // For internal use by the related content feature. func (p *pageState) ApplyFilterToHeadings(ctx context.Context, fn func(*tableofcontents.Heading) bool) related.Document { - if p.pageOutput.cp.tableOfContents == nil { - return p + r, err := p.content.contentToC(ctx, p.pageOutput.pco) + if err != nil { + panic(err) } - headings := p.pageOutput.cp.tableOfContents.Headings.FilterBy(fn) + headings := r.tableOfContents.Headings.FilterBy(fn) return &pageHeadingsFiltered{ pageState: p, headings: headings, @@ -196,72 +219,28 @@ func (p *pageState) CodeOwners() []string { // GetTerms gets the terms defined on this page in the given taxonomy. // The pages returned will be ordered according to the front matter. func (p *pageState) GetTerms(taxonomy string) page.Pages { - if p.treeRef == nil { - return nil - } - - m := p.s.pageMap - - taxonomy = strings.ToLower(taxonomy) - prefix := cleanSectionTreeKey(taxonomy) - self := strings.TrimPrefix(p.treeRef.key, "/") - - var pas page.Pages - - m.taxonomies.WalkQuery(pageMapQuery{Prefix: prefix}, func(s string, n *contentNode) bool { - key := s + self - if tn, found := m.taxonomyEntries.Get(key); found { - vi := tn.(*contentNode).viewInfo - pas = append(pas, pageWithOrdinal{pageState: n.p, ordinal: vi.ordinal}) - } - return false - }) - - page.SortByDefault(pas) - - return pas + return p.s.pageMap.getTermsForPageInTaxonomy(p.Path(), taxonomy) } func (p *pageState) MarshalJSON() ([]byte, error) { return page.MarshalPageToJSON(p) } -func (p *pageState) getPages() page.Pages { - b := p.bucket - if b == nil { - return nil - } - return b.getPages() -} - -func (p *pageState) getPagesRecursive() page.Pages { - b := p.bucket - if b == nil { - return nil - } - return b.getPagesRecursive() -} - -func (p *pageState) getPagesAndSections() page.Pages { - b := p.bucket - if b == nil { - return nil - } - return b.getPagesAndSections() -} - func (p *pageState) RegularPagesRecursive() page.Pages { - p.regularPagesRecursiveInit.Do(func() { - var pages page.Pages - switch p.Kind() { - case kinds.KindSection, kinds.KindHome: - pages = p.getPagesRecursive() - default: - pages = p.RegularPages() - } - p.regularPagesRecursive = pages - }) - return p.regularPagesRecursive + switch p.Kind() { + case kinds.KindSection, kinds.KindHome: + return p.s.pageMap.getPagesInSection( + pageMapQueryPagesInSection{ + pageMapQueryPagesBelowPath: pageMapQueryPagesBelowPath{ + Path: p.Path(), + Include: pagePredicates.ShouldListLocal.And(pagePredicates.KindPage), + }, + Recursive: true, + }, + ) + default: + return p.RegularPages() + } } func (p *pageState) PagesRecursive() page.Pages { @@ -269,110 +248,95 @@ func (p *pageState) PagesRecursive() page.Pages { } func (p *pageState) RegularPages() page.Pages { - p.regularPagesInit.Do(func() { - var pages page.Pages - - switch p.Kind() { - case kinds.KindPage: - case kinds.KindSection, kinds.KindHome, kinds.KindTaxonomy: - pages = p.getPages() - case kinds.KindTerm: - all := p.Pages() - for _, p := range all { - if p.IsPage() { - pages = append(pages, p) - } - } - default: - pages = p.s.RegularPages() - } - - p.regularPages = pages - }) - - return p.regularPages + switch p.Kind() { + case kinds.KindPage: + case kinds.KindSection, kinds.KindHome, kinds.KindTaxonomy: + return p.s.pageMap.getPagesInSection( + pageMapQueryPagesInSection{ + pageMapQueryPagesBelowPath: pageMapQueryPagesBelowPath{ + Path: p.Path(), + Include: pagePredicates.ShouldListLocal.And(pagePredicates.KindPage), + }, + }, + ) + case kinds.KindTerm: + return p.s.pageMap.getPagesWithTerm( + pageMapQueryPagesBelowPath{ + Path: p.Path(), + Include: pagePredicates.ShouldListLocal.And(pagePredicates.KindPage), + }, + ) + default: + return p.s.RegularPages() + } + return nil } func (p *pageState) Pages() page.Pages { - p.pagesInit.Do(func() { - var pages page.Pages - - switch p.Kind() { - case kinds.KindPage: - case kinds.KindSection, kinds.KindHome: - pages = p.getPagesAndSections() - case kinds.KindTerm: - b := p.treeRef.n - viewInfo := b.viewInfo - taxonomy := p.s.Taxonomies()[viewInfo.name.plural].Get(viewInfo.termKey) - pages = taxonomy.Pages() - case kinds.KindTaxonomy: - pages = p.bucket.getTaxonomies() - default: - pages = p.s.Pages() - } - - p.pages = pages - }) - - return p.pages + switch p.Kind() { + case kinds.KindPage: + case kinds.KindSection, kinds.KindHome: + return p.s.pageMap.getPagesInSection( + pageMapQueryPagesInSection{ + pageMapQueryPagesBelowPath: pageMapQueryPagesBelowPath{ + Path: p.Path(), + KeyPart: "page-section", + Include: pagePredicates.ShouldListLocal.And( + pagePredicates.KindPage.Or(pagePredicates.KindSection), + ), + }, + }, + ) + case kinds.KindTerm: + return p.s.pageMap.getPagesWithTerm( + pageMapQueryPagesBelowPath{ + Path: p.Path(), + }, + ) + case kinds.KindTaxonomy: + return p.s.pageMap.getPagesInSection( + pageMapQueryPagesInSection{ + pageMapQueryPagesBelowPath: pageMapQueryPagesBelowPath{ + Path: p.Path(), + KeyPart: "term", + Include: pagePredicates.ShouldListLocal.And(pagePredicates.KindTerm), + }, + Recursive: true, + }, + ) + default: + return p.s.Pages() + } + return nil } // RawContent returns the un-rendered source content without // any leading front matter. func (p *pageState) RawContent() string { - if p.source.parsed == nil { + if p.content.parseInfo.itemsStep2 == nil { return "" } - start := p.source.posMainContent + start := p.content.parseInfo.posMainContent if start == -1 { start = 0 } - - return string(p.source.parsed.Input()[start:]) -} - -func (p *pageState) sortResources() { - sort.SliceStable(p.resources, func(i, j int) bool { - ri, rj := p.resources[i], p.resources[j] - if ri.ResourceType() < rj.ResourceType() { - return true - } - - p1, ok1 := ri.(page.Page) - p2, ok2 := rj.(page.Page) - - if ok1 != ok2 { - return ok2 - } - - if ok1 { - return page.DefaultPageSort(p1, p2) - } - - // Make sure not to use RelPermalink or any of the other methods that - // trigger lazy publishing. - return ri.Name() < rj.Name() - }) + source, err := p.content.contentSource() + if err != nil { + panic(err) + } + return string(source[start:]) } func (p *pageState) Resources() resource.Resources { - p.resourcesInit.Do(func() { - p.sortResources() - if len(p.m.resourcesMetadata) > 0 { - resources.AssignMetadata(p.m.resourcesMetadata, p.resources...) - p.sortResources() - } - }) - return p.resources + return p.s.pageMap.getOrCreateResourcesForPage(p) } func (p *pageState) HasShortcode(name string) bool { - if p.shortcodeState == nil { + if p.content.shortcodeState == nil { return false } - return p.shortcodeState.hasName(name) + return p.content.shortcodeState.hasName(name) } func (p *pageState) Site() page.Site { @@ -380,47 +344,72 @@ func (p *pageState) Site() page.Site { } func (p *pageState) String() string { - if sourceRef := p.sourceRef(); sourceRef != "" { - return fmt.Sprintf("Page(%s)", sourceRef) - } - return fmt.Sprintf("Page(%q)", p.Title()) + return fmt.Sprintf("Page(%s)", p.Path()) } // IsTranslated returns whether this content file is translated to // other language(s). func (p *pageState) IsTranslated() bool { - p.s.h.init.translations.Do(context.Background()) - return len(p.translations) > 0 + return len(p.Translations()) > 0 } -// TranslationKey returns the key used to map language translations of this page. -// It will use the translationKey set in front matter if set, or the content path and -// filename (excluding any language code and extension), e.g. "about/index". -// The Page Kind is always prepended. +// TranslationKey returns the key used to identify a translation of this content. func (p *pageState) TranslationKey() string { - p.translationKeyInit.Do(func() { - if p.m.translationKey != "" { - p.translationKey = p.Kind() + "/" + p.m.translationKey - } else if p.IsPage() && !p.File().IsZero() { - p.translationKey = path.Join(p.Kind(), filepath.ToSlash(p.File().Dir()), p.File().TranslationBaseName()) - } else if p.IsNode() { - p.translationKey = path.Join(p.Kind(), p.SectionsPath()) - } - }) - - return p.translationKey + if p.m.translationKey != "" { + return p.m.translationKey + } + return p.Path() } // AllTranslations returns all translations, including the current Page. func (p *pageState) AllTranslations() page.Pages { - p.s.h.init.translations.Do(context.Background()) - return p.allTranslations + key := p.Path() + "/" + "translations-all" + pages, err := p.s.pageMap.getOrCreatePagesFromCache(key, func(string) (page.Pages, error) { + if p.m.translationKey != "" { + // translationKey set by user. + pas, _ := p.s.h.translationKeyPages.Get(p.m.translationKey) + pasc := make(page.Pages, len(pas)) + copy(pasc, pas) + page.SortByLanguage(pasc) + return pasc, nil + } + var pas page.Pages + p.s.pageMap.treePages.ForEeachInDimension(p.Path(), doctree.DimensionLanguage.Index(), + func(n contentNodeI) bool { + if n != nil { + pas = append(pas, n.(page.Page)) + } + return false + }, + ) + + pas = pagePredicates.ShouldLink.Filter(pas) + page.SortByLanguage(pas) + return pas, nil + }) + if err != nil { + panic(err) + } + + return pages } // Translations returns the translations excluding the current Page. func (p *pageState) Translations() page.Pages { - p.s.h.init.translations.Do(context.Background()) - return p.translations + key := p.Path() + "/" + "translations" + pages, err := p.s.pageMap.getOrCreatePagesFromCache(key, func(string) (page.Pages, error) { + var pas page.Pages + for _, pp := range p.AllTranslations() { + if !pp.Eq(p) { + pas = append(pas, pp) + } + } + return pas, nil + }) + if err != nil { + panic(err) + } + return pages } func (ps *pageState) initCommonProviders(pp pagePaths) error { @@ -450,8 +439,12 @@ func (p *pageState) getLayoutDescriptor() layouts.LayoutDescriptor { section = sections[0] } case kinds.KindTaxonomy, kinds.KindTerm: - b := p.getTreeRef().n - section = b.viewInfo.name.singular + + if p.m.singular != "" { + section = p.m.singular + } else if len(sections) > 0 { + section = sections[0] + } default: } @@ -470,14 +463,6 @@ func (p *pageState) getLayoutDescriptor() layouts.LayoutDescriptor { func (p *pageState) resolveTemplate(layouts ...string) (tpl.Template, bool, error) { f := p.outputFormat() - if len(layouts) == 0 { - selfLayout := p.selfLayoutForOutput(f) - if selfLayout != "" { - templ, found := p.s.Tmpl().Lookup(selfLayout) - return templ, found, nil - } - } - d := p.getLayoutDescriptor() if len(layouts) > 0 { @@ -488,15 +473,6 @@ func (p *pageState) resolveTemplate(layouts ...string) (tpl.Template, bool, erro return p.s.Tmpl().LookupLayout(d, f) } -// This is serialized -func (p *pageState) initOutputFormat(isRenderingSite bool, idx int) error { - if err := p.shiftToOutputFormat(isRenderingSite, idx); err != nil { - return err - } - - return nil -} - // Must be run after the site section tree etc. is built and ready. func (p *pageState) initPage() error { if _, err := p.init.Do(context.Background()); err != nil { @@ -505,12 +481,11 @@ func (p *pageState) initPage() error { return nil } -func (p *pageState) renderResources() (err error) { +func (p *pageState) renderResources() error { + var initErr error + p.resourcesPublishInit.Do(func() { - var toBeDeleted []int - - for i, r := range p.Resources() { - + for _, r := range p.Resources() { if _, ok := r.(page.Page); ok { // Pages gets rendered with the owning page but we count them here. p.s.PathSpec.ProcessingStats.Incr(&p.s.PathSpec.ProcessingStats.Pages) @@ -519,50 +494,21 @@ func (p *pageState) renderResources() (err error) { src, ok := r.(resource.Source) if !ok { - err = fmt.Errorf("Resource %T does not support resource.Source", src) + initErr = fmt.Errorf("resource %T does not support resource.Source", src) return } if err := src.Publish(); err != nil { - if herrors.IsNotExist(err) { - // The resource has been deleted from the file system. - // This should be extremely rare, but can happen on live reload in server - // mode when the same resource is member of different page bundles. - toBeDeleted = append(toBeDeleted, i) - } else { + if !herrors.IsNotExist(err) { p.s.Log.Errorf("Failed to publish Resource for page %q: %s", p.pathOrTitle(), err) } } else { p.s.PathSpec.ProcessingStats.Incr(&p.s.PathSpec.ProcessingStats.Files) } } - - for _, i := range toBeDeleted { - p.deleteResource(i) - } }) - return -} - -func (p *pageState) deleteResource(i int) { - p.resources = append(p.resources[:i], p.resources[i+1:]...) -} - -func (p *pageState) getTargetPaths() page.TargetPaths { - return p.targetPaths() -} - -func (p *pageState) setTranslations(pages page.Pages) { - p.allTranslations = pages - page.SortByLanguage(p.allTranslations) - translations := make(page.Pages, 0) - for _, t := range p.allTranslations { - if !t.Eq(p) { - translations = append(translations, t) - } - } - p.translations = translations + return initErr } func (p *pageState) AlternativeOutputFormats() page.OutputFormats { @@ -588,229 +534,39 @@ var defaultRenderStringOpts = renderStringOpts{ Markup: "", // Will inherit the page's value when not set. } -func (p *pageState) addDependency(dep identity.Provider) { - if !p.s.watching() || p.pageOutput.cp == nil { - return - } - p.pageOutput.cp.dependencyTracker.Add(dep) -} - -// wrapError adds some more context to the given error if possible/needed -func (p *pageState) wrapError(err error) error { +func (p *pageMeta) wrapError(err error) error { if err == nil { panic("wrapError with nil") } - if p.File().IsZero() { + if p.File() == nil { // No more details to add. - return fmt.Errorf("%q: %w", p.Pathc(), err) + return fmt.Errorf("%q: %w", p.Path(), err) } - filename := p.File().Filename() + return hugofs.AddFileInfoToError(err, p.File().FileInfo(), p.s.SourceSpec.Fs.Source) +} - // Check if it's already added. - for _, ferr := range herrors.UnwrapFileErrors(err) { - errfilename := ferr.Position().Filename - if errfilename == filename { - if ferr.ErrorContext() == nil { - f, ioerr := p.s.SourceSpec.Fs.Source.Open(filename) - if ioerr != nil { - return err - } - defer f.Close() - ferr.UpdateContent(f, nil) - } - return err - } - } - - lineMatcher := herrors.NopLineMatcher - - if textSegmentErr, ok := err.(*herrors.TextSegmentError); ok { - lineMatcher = herrors.ContainsMatcher(textSegmentErr.Segment) - } - - return herrors.NewFileErrorFromFile(err, filename, p.s.SourceSpec.Fs.Source, lineMatcher) +// wrapError adds some more context to the given error if possible/needed +func (p *pageState) wrapError(err error) error { + return p.m.wrapError(err) } func (p *pageState) getContentConverter() converter.Converter { var err error - p.m.contentConverterInit.Do(func() { + p.contentConverterInit.Do(func() { markup := p.m.markup if markup == "html" { // Only used for shortcode inner content. markup = "markdown" } - p.m.contentConverter, err = p.m.newContentConverter(p, markup) + p.contentConverter, err = p.m.newContentConverter(p, markup) }) if err != nil { p.s.Log.Errorln("Failed to create content converter:", err) } - return p.m.contentConverter -} - -func (p *pageState) mapContent(bucket *pagesMapBucket, meta *pageMeta) error { - p.cmap = &pageContentMap{ - items: make([]any, 0, 20), - } - - return p.mapContentForResult( - p.source.parsed, - p.shortcodeState, - p.cmap, - meta.markup, - func(m map[string]interface{}) error { - return meta.setMetadata(bucket, p, m) - }, - ) -} - -func (p *pageState) mapContentForResult( - result pageparser.Result, - s *shortcodeHandler, - rn *pageContentMap, - markup string, - withFrontMatter func(map[string]any) error, -) error { - iter := result.Iterator() - - fail := func(err error, i pageparser.Item) error { - if fe, ok := err.(herrors.FileError); ok { - return fe - } - return p.parseError(err, result.Input(), i.Pos()) - } - - // the parser is guaranteed to return items in proper order or fail, so … - // … it's safe to keep some "global" state - var currShortcode shortcode - var ordinal int - var frontMatterSet bool - -Loop: - for { - it := iter.Next() - - switch { - case it.Type == pageparser.TypeIgnore: - case it.IsFrontMatter(): - f := pageparser.FormatFromFrontMatterType(it.Type) - m, err := metadecoders.Default.UnmarshalToMap(it.Val(result.Input()), f) - if err != nil { - if fe, ok := err.(herrors.FileError); ok { - pos := fe.Position() - // Apply the error to the content file. - pos.Filename = p.File().Filename() - // Offset the starting position of front matter. - offset := iter.LineNumber(result.Input()) - 1 - if f == metadecoders.YAML { - offset -= 1 - } - pos.LineNumber += offset - - fe.UpdatePosition(pos) - - return fe - } else { - return err - } - } - - if withFrontMatter != nil { - if err := withFrontMatter(m); err != nil { - return err - } - } - - frontMatterSet = true - - next := iter.Peek() - p.source.posMainContent = next.Pos() - - if !p.s.shouldBuild(p) { - // Nothing more to do. - return nil - } - - case it.Type == pageparser.TypeLeadSummaryDivider: - posBody := -1 - f := func(item pageparser.Item) bool { - if posBody == -1 && !item.IsDone() { - posBody = item.Pos() - } - - if item.IsNonWhitespace(result.Input()) { - p.truncated = true - - // Done - return false - } - return true - } - iter.PeekWalk(f) - - p.source.posSummaryEnd = it.Pos() - p.source.posBodyStart = posBody - p.source.hasSummaryDivider = true - - if markup != "html" { - // The content will be rendered by Goldmark or similar, - // and we need to track the summary. - rn.AddReplacement(internalSummaryDividerPre, it) - } - - // Handle shortcode - case it.IsLeftShortcodeDelim(): - // let extractShortcode handle left delim (will do so recursively) - iter.Backup() - - currShortcode, err := s.extractShortcode(ordinal, 0, result.Input(), iter) - if err != nil { - return fail(err, it) - } - - currShortcode.pos = it.Pos() - currShortcode.length = iter.Current().Pos() - it.Pos() - if currShortcode.placeholder == "" { - currShortcode.placeholder = createShortcodePlaceholder("s", p.id, currShortcode.ordinal) - } - - if currShortcode.name != "" { - s.addName(currShortcode.name) - } - - if currShortcode.params == nil { - var s []string - currShortcode.params = s - } - - currShortcode.placeholder = createShortcodePlaceholder("s", p.id, ordinal) - ordinal++ - s.shortcodes = append(s.shortcodes, currShortcode) - - rn.AddShortcode(currShortcode) - case it.IsEOF(): - break Loop - case it.IsError(): - err := fail(it.Err, it) - currShortcode.err = err - return err - - default: - rn.AddBytes(it) - } - } - - if !frontMatterSet && withFrontMatter != nil { - // Page content without front matter. Assign default front matter from - // cascades etc. - if err := withFrontMatter(nil); err != nil { - return err - } - } - - return nil + return p.contentConverter } func (p *pageState) errorf(err error, format string, a ...any) error { @@ -835,47 +591,33 @@ func (p *pageState) outputFormat() (f output.Format) { } func (p *pageState) parseError(err error, input []byte, offset int) error { - pos := p.posFromInput(input, offset) + pos := posFromInput("", input, offset) return herrors.NewFileErrorFromName(err, p.File().Filename()).UpdatePosition(pos) } func (p *pageState) pathOrTitle() string { - if !p.File().IsZero() { + if p.File() != nil { return p.File().Filename() } - if p.Pathc() != "" { - return p.Pathc() + if p.Path() != "" { + return p.Path() } return p.Title() } func (p *pageState) posFromInput(input []byte, offset int) text.Position { - if offset < 0 { - return text.Position{ - Filename: p.pathOrTitle(), - } - } - lf := []byte("\n") - input = input[:offset] - lineNumber := bytes.Count(input, lf) + 1 - endOfLastLine := bytes.LastIndex(input, lf) - - return text.Position{ - Filename: p.pathOrTitle(), - LineNumber: lineNumber, - ColumnNumber: offset - endOfLastLine, - Offset: offset, - } + return posFromInput(p.pathOrTitle(), input, offset) } func (p *pageState) posOffset(offset int) text.Position { - return p.posFromInput(p.source.parsed.Input(), offset) + return p.posFromInput(p.content.mustSource(), offset) } // shiftToOutputFormat is serialized. The output format idx refers to the // full set of output formats for all sites. +// This is serialized. func (p *pageState) shiftToOutputFormat(isRenderingSite bool, idx int) error { if err := p.initPage(); err != nil { return err @@ -885,6 +627,7 @@ func (p *pageState) shiftToOutputFormat(isRenderingSite bool, idx int) error { idx = 0 } + p.pageOutputIdx = idx p.pageOutput = p.pageOutputs[idx] if p.pageOutput == nil { panic(fmt.Sprintf("pageOutput is nil for output idx %d", idx)) @@ -897,7 +640,7 @@ func (p *pageState) shiftToOutputFormat(isRenderingSite bool, idx int) error { } if isRenderingSite { - cp := p.pageOutput.cp + cp := p.pageOutput.pco if cp == nil && p.reusePageOutputContent() { // Look for content to reuse. for i := 0; i < len(p.pageOutputs); i++ { @@ -906,8 +649,8 @@ func (p *pageState) shiftToOutputFormat(isRenderingSite bool, idx int) error { } po := p.pageOutputs[i] - if po.cp != nil { - cp = po.cp + if po.pco != nil { + cp = po.pco break } } @@ -915,12 +658,12 @@ func (p *pageState) shiftToOutputFormat(isRenderingSite bool, idx int) error { if cp == nil { var err error - cp, err = newPageContentOutput(p, p.pageOutput) + cp, err = newPageContentOutput(p.pageOutput) if err != nil { return err } } - p.pageOutput.initContentProvider(cp) + p.pageOutput.setContentProvider(cp) } else { // We attempt to assign pageContentOutputs while preparing each site // for rendering and before rendering each site. This lets us share @@ -932,7 +675,7 @@ func (p *pageState) shiftToOutputFormat(isRenderingSite bool, idx int) error { lcp.Reset() } else { lcp = page.NewLazyContentProvider(func() (page.OutputFormatContentProvider, error) { - cp, err := newPageContentOutput(p, p.pageOutput) + cp, err := newPageContentOutput(p.pageOutput) if err != nil { return nil, err } @@ -948,48 +691,6 @@ func (p *pageState) shiftToOutputFormat(isRenderingSite bool, idx int) error { return nil } -// sourceRef returns the reference used by GetPage and ref/relref shortcodes to refer to -// this page. It is prefixed with a "/". -// -// For pages that have a source file, it is returns the path to this file as an -// absolute path rooted in this site's content dir. -// For pages that do not (sections without content page etc.), it returns the -// virtual path, consistent with where you would add a source file. -func (p *pageState) sourceRef() string { - if !p.File().IsZero() { - sourcePath := p.File().Path() - if sourcePath != "" { - return "/" + filepath.ToSlash(sourcePath) - } - } - - if len(p.SectionsEntries()) > 0 { - // no backing file, return the virtual source path - return "/" + p.SectionsPath() - } - - return "" -} - -func (s *Site) sectionsFromFile(fi source.File) []string { - dirname := fi.Dir() - - dirname = strings.Trim(dirname, helpers.FilePathSeparator) - if dirname == "" { - return nil - } - parts := strings.Split(dirname, helpers.FilePathSeparator) - - if fii, ok := fi.(*fileInfo); ok { - if len(parts) > 0 && fii.FileInfo().Meta().Classifier == files.ContentClassLeaf { - // my-section/mybundle/index.md => my-section - return parts[:len(parts)-1] - } - } - - return parts -} - var ( _ page.Page = (*pageWithOrdinal)(nil) _ collections.Order = (*pageWithOrdinal)(nil) @@ -1008,3 +709,16 @@ func (p pageWithOrdinal) Ordinal() int { func (p pageWithOrdinal) page() page.Page { return p.pageState } + +type pageWithWeight0 struct { + weight0 int + *pageState +} + +func (p pageWithWeight0) Weight0() int { + return p.weight0 +} + +func (p pageWithWeight0) page() page.Page { + return p.pageState +} diff --git a/hugolib/page__common.go b/hugolib/page__common.go index 0069bdf89..0881affe7 100644 --- a/hugolib/page__common.go +++ b/hugolib/page__common.go @@ -19,6 +19,7 @@ import ( "github.com/gohugoio/hugo/common/maps" "github.com/gohugoio/hugo/compare" "github.com/gohugoio/hugo/lazy" + "github.com/gohugoio/hugo/markup/converter" "github.com/gohugoio/hugo/navigation" "github.com/gohugoio/hugo/output/layouts" "github.com/gohugoio/hugo/resources/page" @@ -26,14 +27,6 @@ import ( "github.com/gohugoio/hugo/source" ) -type treeRefProvider interface { - getTreeRef() *contentTreeRef -} - -func (p *pageCommon) getTreeRef() *contentTreeRef { - return p.treeRef -} - type nextPrevProvider interface { getNextPrev() *nextPrev } @@ -56,9 +49,6 @@ type pageCommon struct { sWrapped page.Site - bucket *pagesMapBucket - treeRef *contentTreeRef - // Lazily initialized dependencies. init *lazy.Init @@ -87,7 +77,7 @@ type pageCommon struct { page.TreeProvider resource.LanguageProvider resource.ResourceDataProvider - resource.ResourceMetaProvider + resource.ResourceNameTitleProvider resource.ResourceParamsProvider resource.ResourceTypeProvider resource.MediaTypeProvider @@ -101,11 +91,8 @@ type pageCommon struct { layoutDescriptor layouts.LayoutDescriptor layoutDescriptorInit sync.Once - // The parsed page content. - pageContent - - // Keeps track of the shortcodes on a page. - shortcodeState *shortcodeHandler + // The source and the parsed page content. + content *cachedContent // Set if feature enabled and this is in a Git repo. gitInfo source.GitInfo @@ -121,38 +108,10 @@ type pageCommon struct { // Internal use page.InternalDependencies - // The children. Regular pages will have none. - *pagePages - - // Any bundled resources - resources resource.Resources - resourcesInit sync.Once - resourcesPublishInit sync.Once - - translations page.Pages - allTranslations page.Pages - - // Calculated an cached translation mapping key - translationKey string - translationKeyInit sync.Once - - // Will only be set for bundled pages. - parent *pageState - - // Set in fast render mode to force render a given page. - forceRender bool + contentConverterInit sync.Once + contentConverter converter.Converter } func (p *pageCommon) Store() *maps.Scratch { return p.store } - -type pagePages struct { - pagesInit sync.Once - pages page.Pages - - regularPagesInit sync.Once - regularPages page.Pages - regularPagesRecursiveInit sync.Once - regularPagesRecursive page.Pages -} diff --git a/hugolib/page__content.go b/hugolib/page__content.go index 89c38bd84..64ce83f0e 100644 --- a/hugolib/page__content.go +++ b/hugolib/page__content.go @@ -14,36 +14,147 @@ package hugolib import ( + "bytes" "context" + "errors" "fmt" + "html/template" + "io" + "strings" + "unicode/utf8" - "github.com/gohugoio/hugo/output" + "github.com/bep/logg" + "github.com/gohugoio/hugo/common/hcontext" + "github.com/gohugoio/hugo/common/herrors" + "github.com/gohugoio/hugo/common/hugio" + "github.com/gohugoio/hugo/helpers" + "github.com/gohugoio/hugo/identity" + "github.com/gohugoio/hugo/markup/converter" + "github.com/gohugoio/hugo/markup/tableofcontents" + "github.com/gohugoio/hugo/parser/metadecoders" "github.com/gohugoio/hugo/parser/pageparser" + "github.com/gohugoio/hugo/resources" + "github.com/gohugoio/hugo/resources/resource" + "github.com/gohugoio/hugo/tpl" +) + +const ( + internalSummaryDividerBase = "HUGOMORE42" ) var ( - internalSummaryDividerBase = "HUGOMORE42" internalSummaryDividerBaseBytes = []byte(internalSummaryDividerBase) internalSummaryDividerPre = []byte("\n\n" + internalSummaryDividerBase + "\n\n") ) -// The content related items on a Page. -type pageContent struct { - selfLayout string - truncated bool +type pageContentReplacement struct { + val []byte - cmap *pageContentMap - - source rawPageContent + source pageparser.Item } -// returns the content to be processed by Goldmark or similar. -func (p pageContent) contentToRender(ctx context.Context, parsed pageparser.Result, pm *pageContentMap, renderedShortcodes map[string]shortcodeRenderer) ([]byte, bool, error) { - source := parsed.Input() +func newCachedContent(m *pageMeta, pid uint64) (*cachedContent, error) { + var openSource hugio.OpenReadSeekCloser + var filename string + if m.f != nil { + meta := m.f.FileInfo().Meta() + openSource = func() (hugio.ReadSeekCloser, error) { + r, err := meta.Open() + if err != nil { + return nil, fmt.Errorf("failed to open file %q: %w", meta.Filename, err) + } + return r, nil + } + filename = m.f.Filename() + } + + c := &cachedContent{ + pm: m.s.pageMap, + StaleInfo: m, + shortcodeState: newShortcodeHandler(filename, m.s), + parseInfo: &contentParseInfo{ + pid: pid, + }, + cacheBaseKey: m.pathInfo.PathNoLang(), + openSource: openSource, + enableEmoji: m.s.conf.EnableEmoji, + } + + source, err := c.contentSource() + if err != nil { + return nil, err + } + + if err := c.parseContentFile(source); err != nil { + return nil, err + } + + return c, nil +} + +type cachedContent struct { + pm *pageMap + + cacheBaseKey string + + // The source bytes. + openSource hugio.OpenReadSeekCloser + + resource.StaleInfo + + shortcodeState *shortcodeHandler + + // Parsed content. + parseInfo *contentParseInfo + + enableEmoji bool +} + +type contentParseInfo struct { + pid uint64 + frontMatter map[string]any + + // Whether the parsed content contains a summary separator. + hasSummaryDivider bool + + // Whether there are more content after the summary divider. + summaryTruncated bool + + // Returns the position in bytes after any front matter. + posMainContent int + + // Indicates whether we must do placeholder replacements. + hasNonMarkdownShortcode bool + + // Items from the page parser. + // These maps directly to the source + itemsStep1 pageparser.Items + + // *shortcode, pageContentReplacement or pageparser.Item + itemsStep2 []any +} + +func (p *contentParseInfo) AddBytes(item pageparser.Item) { + p.itemsStep2 = append(p.itemsStep2, item) +} + +func (p *contentParseInfo) AddReplacement(val []byte, source pageparser.Item) { + p.itemsStep2 = append(p.itemsStep2, pageContentReplacement{val: val, source: source}) +} + +func (p *contentParseInfo) AddShortcode(s *shortcode) { + p.itemsStep2 = append(p.itemsStep2, s) + if s.insertPlaceholder() { + p.hasNonMarkdownShortcode = true + } +} + +// contentToRenderForItems returns the content to be processed by Goldmark or similar. +func (pi *contentParseInfo) contentToRender(ctx context.Context, source []byte, renderedShortcodes map[string]shortcodeRenderer) ([]byte, bool, error) { var hasVariants bool c := make([]byte, 0, len(source)+(len(source)/10)) - for _, it := range pm.items { + for _, it := range pi.itemsStep2 { switch v := it.(type) { case pageparser.Item: c = append(c, source[v.Pos():v.Pos()+len(v.Val(source))]...) @@ -78,59 +189,556 @@ func (p pageContent) contentToRender(ctx context.Context, parsed pageparser.Resu return c, hasVariants, nil } -func (p pageContent) selfLayoutForOutput(f output.Format) string { - if p.selfLayout == "" { - return "" +func (c *cachedContent) IsZero() bool { + return len(c.parseInfo.itemsStep2) == 0 +} + +func (c *cachedContent) parseContentFile(source []byte) error { + if source == nil || c.openSource == nil { + return nil } - return p.selfLayout + f.Name -} -type rawPageContent struct { - hasSummaryDivider bool - - // The AST of the parsed page. Contains information about: - // shortcodes, front matter, summary indicators. - parsed pageparser.Result - - // Returns the position in bytes after any front matter. - posMainContent int - - // These are set if we're able to determine this from the source. - posSummaryEnd int - posBodyStart int -} - -type pageContentReplacement struct { - val []byte - - source pageparser.Item -} - -type pageContentMap struct { - - // If not, we can skip any pre-rendering of shortcodes. - hasMarkdownShortcode bool - - // Indicates whether we must do placeholder replacements. - hasNonMarkdownShortcode bool - - // *shortcode, pageContentReplacement or pageparser.Item - items []any -} - -func (p *pageContentMap) AddBytes(item pageparser.Item) { - p.items = append(p.items, item) -} - -func (p *pageContentMap) AddReplacement(val []byte, source pageparser.Item) { - p.items = append(p.items, pageContentReplacement{val: val, source: source}) -} - -func (p *pageContentMap) AddShortcode(s *shortcode) { - p.items = append(p.items, s) - if s.insertPlaceholder() { - p.hasNonMarkdownShortcode = true - } else { - p.hasMarkdownShortcode = true + items, err := pageparser.ParseBytes( + source, + pageparser.Config{}, + ) + if err != nil { + return err } + + c.parseInfo.itemsStep1 = items + + return c.parseInfo.mapItems(source, c.shortcodeState) +} + +func (c *contentParseInfo) parseFrontMatter(it pageparser.Item, iter *pageparser.Iterator, source []byte) error { + if c.frontMatter != nil { + return nil + } + + f := pageparser.FormatFromFrontMatterType(it.Type) + var err error + c.frontMatter, err = metadecoders.Default.UnmarshalToMap(it.Val(source), f) + if err != nil { + if fe, ok := err.(herrors.FileError); ok { + pos := fe.Position() + + // Offset the starting position of front matter. + offset := iter.LineNumber(source) - 1 + if f == metadecoders.YAML { + offset -= 1 + } + pos.LineNumber += offset + + fe.UpdatePosition(pos) + fe.SetFilename("") // It will be set later. + + return fe + } else { + return err + } + } + + return nil +} + +func (rn *contentParseInfo) mapItems( + source []byte, + s *shortcodeHandler, +) error { + if len(rn.itemsStep1) == 0 { + return nil + } + + fail := func(err error, i pageparser.Item) error { + if fe, ok := err.(herrors.FileError); ok { + return fe + } + + pos := posFromInput("", source, i.Pos()) + + return herrors.NewFileErrorFromPos(err, pos) + } + + iter := pageparser.NewIterator(rn.itemsStep1) + + // the parser is guaranteed to return items in proper order or fail, so … + // … it's safe to keep some "global" state + var ordinal int + +Loop: + for { + it := iter.Next() + + switch { + case it.Type == pageparser.TypeIgnore: + case it.IsFrontMatter(): + if err := rn.parseFrontMatter(it, iter, source); err != nil { + return err + } + next := iter.Peek() + if !next.IsDone() { + rn.posMainContent = next.Pos() + } + case it.Type == pageparser.TypeLeadSummaryDivider: + posBody := -1 + f := func(item pageparser.Item) bool { + if posBody == -1 && !item.IsDone() { + posBody = item.Pos() + } + + if item.IsNonWhitespace(source) { + rn.summaryTruncated = true + + // Done + return false + } + return true + } + iter.PeekWalk(f) + + rn.hasSummaryDivider = true + + // The content may be rendered by Goldmark or similar, + // and we need to track the summary. + rn.AddReplacement(internalSummaryDividerPre, it) + + // Handle shortcode + case it.IsLeftShortcodeDelim(): + // let extractShortcode handle left delim (will do so recursively) + iter.Backup() + + currShortcode, err := s.extractShortcode(ordinal, 0, source, iter) + if err != nil { + return fail(err, it) + } + + currShortcode.pos = it.Pos() + currShortcode.length = iter.Current().Pos() - it.Pos() + if currShortcode.placeholder == "" { + currShortcode.placeholder = createShortcodePlaceholder("s", rn.pid, currShortcode.ordinal) + } + + if currShortcode.name != "" { + s.addName(currShortcode.name) + } + + if currShortcode.params == nil { + var s []string + currShortcode.params = s + } + + currShortcode.placeholder = createShortcodePlaceholder("s", rn.pid, ordinal) + ordinal++ + s.shortcodes = append(s.shortcodes, currShortcode) + + rn.AddShortcode(currShortcode) + + case it.IsEOF(): + break Loop + case it.IsError(): + return fail(it.Err, it) + default: + rn.AddBytes(it) + } + } + + return nil +} + +func (c *cachedContent) mustSource() []byte { + source, err := c.contentSource() + if err != nil { + panic(err) + } + return source +} + +func (c *cachedContent) contentSource() ([]byte, error) { + key := c.cacheBaseKey + v, err := c.pm.cacheContentSource.GetOrCreate(key, func(string) (*resources.StaleValue[[]byte], error) { + b, err := c.readSourceAll() + if err != nil { + return nil, err + } + + return &resources.StaleValue[[]byte]{ + Value: b, + IsStaleFunc: func() bool { + return c.IsStale() + }, + }, nil + }) + if err != nil { + return nil, err + } + + return v.Value, nil +} + +func (c *cachedContent) readSourceAll() ([]byte, error) { + if c.openSource == nil { + return []byte{}, nil + } + r, err := c.openSource() + if err != nil { + return nil, err + } + defer r.Close() + + return io.ReadAll(r) +} + +type contentTableOfContents struct { + // For Goldmark we split Parse and Render. + astDoc any + + tableOfContents *tableofcontents.Fragments + tableOfContentsHTML template.HTML + + // Temporary storage of placeholders mapped to their content. + // These are shortcodes etc. Some of these will need to be replaced + // after any markup is rendered, so they share a common prefix. + contentPlaceholders map[string]shortcodeRenderer + + contentToRender []byte +} + +type contentSummary struct { + content template.HTML + summary template.HTML + summaryTruncated bool +} + +type contentPlainPlainWords struct { + plain string + plainWords []string + + summary template.HTML + summaryTruncated bool + + wordCount int + fuzzyWordCount int + readingTime int +} + +func (c *cachedContent) contentRendered(ctx context.Context, cp *pageContentOutput) (contentSummary, error) { + ctx = tpl.Context.DependencyScope.Set(ctx, pageDependencyScopeGlobal) + key := c.cacheBaseKey + "/" + cp.po.f.Name + versionv := cp.contentRenderedVersion + + v, err := c.pm.cacheContentRendered.GetOrCreate(key, func(string) (*resources.StaleValue[contentSummary], error) { + cp.po.p.s.Log.Trace(logg.StringFunc(func() string { + return fmt.Sprintln("contentRendered", key) + })) + + cp.po.p.s.h.contentRenderCounter.Add(1) + cp.contentRendered = true + po := cp.po + + ct, err := c.contentToC(ctx, cp) + if err != nil { + return nil, err + } + + rs := &resources.StaleValue[contentSummary]{ + IsStaleFunc: func() bool { + return c.IsStale() || cp.contentRenderedVersion != versionv + }, + } + + if len(c.parseInfo.itemsStep2) == 0 { + // Nothing to do. + return rs, nil + } + + var b []byte + + if ct.astDoc != nil { + // The content is parsed, but not rendered. + r, ok, err := po.contentRenderer.RenderContent(ctx, ct.contentToRender, ct.astDoc) + if err != nil { + return nil, err + } + if !ok { + return nil, errors.New("invalid state: astDoc is set but RenderContent returned false") + } + + b = r.Bytes() + + } else { + // Copy the content to be rendered. + b = make([]byte, len(ct.contentToRender)) + copy(b, ct.contentToRender) + } + + // There are one or more replacement tokens to be replaced. + var hasShortcodeVariants bool + tokenHandler := func(ctx context.Context, token string) ([]byte, error) { + if token == tocShortcodePlaceholder { + return []byte(ct.tableOfContentsHTML), nil + } + renderer, found := ct.contentPlaceholders[token] + if found { + repl, more, err := renderer.renderShortcode(ctx) + if err != nil { + return nil, err + } + hasShortcodeVariants = hasShortcodeVariants || more + return repl, nil + } + // This should never happen. + panic(fmt.Errorf("unknown shortcode token %q (number of tokens: %d)", token, len(ct.contentPlaceholders))) + } + + b, err = expandShortcodeTokens(ctx, b, tokenHandler) + if err != nil { + return nil, err + } + if hasShortcodeVariants { + cp.po.p.pageOutputTemplateVariationsState.Add(1) + } + + var result contentSummary // hasVariants bool + + if c.parseInfo.hasSummaryDivider { + isHTML := cp.po.p.m.markup == "html" + if isHTML { + // Use the summary sections as provided by the user. + i := bytes.Index(b, internalSummaryDividerPre) + result.summary = helpers.BytesToHTML(b[:i]) + b = b[i+len(internalSummaryDividerPre):] + + } else { + summary, content, err := splitUserDefinedSummaryAndContent(cp.po.p.m.markup, b) + if err != nil { + cp.po.p.s.Log.Errorf("Failed to set user defined summary for page %q: %s", cp.po.p.pathOrTitle(), err) + } else { + b = content + result.summary = helpers.BytesToHTML(summary) + } + } + result.summaryTruncated = c.parseInfo.summaryTruncated + } + result.content = helpers.BytesToHTML(b) + rs.Value = result + + return rs, nil + }) + if err != nil { + return contentSummary{}, cp.po.p.wrapError(err) + } + + return v.Value, nil +} + +func (c *cachedContent) mustContentToC(ctx context.Context, cp *pageContentOutput) contentTableOfContents { + ct, err := c.contentToC(ctx, cp) + if err != nil { + panic(err) + } + return ct +} + +var setGetContentCallbackInContext = hcontext.NewContextDispatcher[func(*pageContentOutput, contentTableOfContents)]("contentCallback") + +func (c *cachedContent) contentToC(ctx context.Context, cp *pageContentOutput) (contentTableOfContents, error) { + key := c.cacheBaseKey + "/" + cp.po.f.Name + versionv := cp.contentRenderedVersion + + v, err := c.pm.contentTableOfContents.GetOrCreate(key, func(string) (*resources.StaleValue[contentTableOfContents], error) { + source, err := c.contentSource() + if err != nil { + return nil, err + } + + var ct contentTableOfContents + if err := cp.initRenderHooks(); err != nil { + return nil, err + } + f := cp.po.f + po := cp.po + p := po.p + ct.contentPlaceholders, err = c.shortcodeState.prepareShortcodesForPage(ctx, p, f, false) + if err != nil { + return nil, err + } + + // Callback called from above (e.g. in .RenderString) + ctxCallback := func(cp2 *pageContentOutput, ct2 contentTableOfContents) { + // Merge content placeholders + for k, v := range ct2.contentPlaceholders { + ct.contentPlaceholders[k] = v + } + + if p.s.conf.Internal.Watch { + for _, s := range cp2.po.p.content.shortcodeState.shortcodes { + for _, templ := range s.templs { + cp.trackDependency(templ.(identity.IdentityProvider)) + } + } + } + + // Transfer shortcode names so HasShortcode works for shortcodes from included pages. + cp.po.p.content.shortcodeState.transferNames(cp2.po.p.content.shortcodeState) + if cp2.po.p.pageOutputTemplateVariationsState.Load() > 0 { + cp.po.p.pageOutputTemplateVariationsState.Add(1) + } + } + + ctx = setGetContentCallbackInContext.Set(ctx, ctxCallback) + + var hasVariants bool + ct.contentToRender, hasVariants, err = c.parseInfo.contentToRender(ctx, source, ct.contentPlaceholders) + if err != nil { + return nil, err + } + + if hasVariants { + p.pageOutputTemplateVariationsState.Add(1) + } + + isHTML := cp.po.p.m.markup == "html" + + if !isHTML { + createAndSetToC := func(tocProvider converter.TableOfContentsProvider) { + cfg := p.s.ContentSpec.Converters.GetMarkupConfig() + ct.tableOfContents = tocProvider.TableOfContents() + ct.tableOfContentsHTML = template.HTML( + ct.tableOfContents.ToHTML( + cfg.TableOfContents.StartLevel, + cfg.TableOfContents.EndLevel, + cfg.TableOfContents.Ordered, + ), + ) + } + + // If the converter supports doing the parsing separately, we do that. + parseResult, ok, err := po.contentRenderer.ParseContent(ctx, ct.contentToRender) + if err != nil { + return nil, err + } + if ok { + // This is Goldmark. + // Store away the parse result for later use. + createAndSetToC(parseResult) + + ct.astDoc = parseResult.Doc() + + } else { + + // This is Asciidoctor etc. + r, err := po.contentRenderer.ParseAndRenderContent(ctx, ct.contentToRender, true) + if err != nil { + return nil, err + } + + ct.contentToRender = r.Bytes() + + if tocProvider, ok := r.(converter.TableOfContentsProvider); ok { + createAndSetToC(tocProvider) + } else { + tmpContent, tmpTableOfContents := helpers.ExtractTOC(ct.contentToRender) + ct.tableOfContentsHTML = helpers.BytesToHTML(tmpTableOfContents) + ct.tableOfContents = tableofcontents.Empty + ct.contentToRender = tmpContent + } + } + } + + return &resources.StaleValue[contentTableOfContents]{ + Value: ct, + IsStaleFunc: func() bool { + return c.IsStale() || cp.contentRenderedVersion != versionv + }, + }, nil + }) + if err != nil { + return contentTableOfContents{}, err + } + + return v.Value, nil +} + +func (c *cachedContent) contentPlain(ctx context.Context, cp *pageContentOutput) (contentPlainPlainWords, error) { + key := c.cacheBaseKey + "/" + cp.po.f.Name + + versionv := cp.contentRenderedVersion + + v, err := c.pm.cacheContentPlain.GetOrCreateWitTimeout(key, cp.po.p.s.Conf.Timeout(), func(string) (*resources.StaleValue[contentPlainPlainWords], error) { + var result contentPlainPlainWords + rs := &resources.StaleValue[contentPlainPlainWords]{ + IsStaleFunc: func() bool { + return c.IsStale() || cp.contentRenderedVersion != versionv + }, + } + + rendered, err := c.contentRendered(ctx, cp) + if err != nil { + return nil, err + } + + result.plain = tpl.StripHTML(string(rendered.content)) + result.plainWords = strings.Fields(result.plain) + + isCJKLanguage := cp.po.p.m.isCJKLanguage + + if isCJKLanguage { + result.wordCount = 0 + for _, word := range result.plainWords { + runeCount := utf8.RuneCountInString(word) + if len(word) == runeCount { + result.wordCount++ + } else { + result.wordCount += runeCount + } + } + } else { + result.wordCount = helpers.TotalWords(result.plain) + } + + // TODO(bep) is set in a test. Fix that. + if result.fuzzyWordCount == 0 { + result.fuzzyWordCount = (result.wordCount + 100) / 100 * 100 + } + + if isCJKLanguage { + result.readingTime = (result.wordCount + 500) / 501 + } else { + result.readingTime = (result.wordCount + 212) / 213 + } + + if rendered.summary != "" { + result.summary = rendered.summary + result.summaryTruncated = rendered.summaryTruncated + } else if cp.po.p.m.summary != "" { + b, err := cp.po.contentRenderer.ParseAndRenderContent(ctx, []byte(cp.po.p.m.summary), false) + if err != nil { + return nil, err + } + html := cp.po.p.s.ContentSpec.TrimShortHTML(b.Bytes()) + result.summary = helpers.BytesToHTML(html) + } else { + var summary string + var truncated bool + if isCJKLanguage { + summary, truncated = cp.po.p.s.ContentSpec.TruncateWordsByRune(result.plainWords) + } else { + summary, truncated = cp.po.p.s.ContentSpec.TruncateWordsToWholeSentence(result.plain) + } + result.summary = template.HTML(summary) + result.summaryTruncated = truncated + } + + rs.Value = result + + return rs, nil + }) + if err != nil { + if herrors.IsTimeoutError(err) { + err = fmt.Errorf("timed out rendering the page content. You may have a circular loop in a shortcode, or your site may have resources that take longer to build than the `timeout` limit in your Hugo config file: %w", err) + } + return contentPlainPlainWords{}, err + } + return v.Value, nil } diff --git a/hugolib/page__data.go b/hugolib/page__data.go index ad6ba126e..9712f1b4a 100644 --- a/hugolib/page__data.go +++ b/hugolib/page__data.go @@ -14,6 +14,7 @@ package hugolib import ( + "strings" "sync" "github.com/gohugoio/hugo/resources/kinds" @@ -37,23 +38,18 @@ func (p *pageData) Data() any { switch p.Kind() { case kinds.KindTerm: - b := p.treeRef.n - name := b.viewInfo.name - termKey := b.viewInfo.termKey - - taxonomy := p.s.Taxonomies()[name.plural].Get(termKey) - - p.data[name.singular] = taxonomy + path := p.Path() + name := p.s.pageMap.cfg.getTaxonomyConfig(path) + term := p.s.Taxonomies()[name.plural].Get(strings.TrimPrefix(path, name.pluralTreeKey)) + p.data[name.singular] = term p.data["Singular"] = name.singular p.data["Plural"] = name.plural - p.data["Term"] = b.viewInfo.term() + p.data["Term"] = p.Title() case kinds.KindTaxonomy: - b := p.treeRef.n - name := b.viewInfo.name - - p.data["Singular"] = name.singular - p.data["Plural"] = name.plural - p.data["Terms"] = p.s.Taxonomies()[name.plural] + viewCfg := p.s.pageMap.cfg.getTaxonomyConfig(p.Path()) + p.data["Singular"] = viewCfg.singular + p.data["Plural"] = viewCfg.plural + p.data["Terms"] = p.s.Taxonomies()[viewCfg.plural] // keep the following just for legacy reasons p.data["OrderedIndex"] = p.data["Terms"] p.data["Index"] = p.data["Terms"] diff --git a/hugolib/page__fragments_test.go b/hugolib/page__fragments_test.go index cce006e9f..c30fa829e 100644 --- a/hugolib/page__fragments_test.go +++ b/hugolib/page__fragments_test.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -65,7 +65,6 @@ Fragments : {{ $p1.Fragments.Identifiers }} b.AssertFileContent("public/en/p1/index.html", "HTML") b.AssertFileContent("public/en/p1/index.json", "ToC: \nFragments : [heading-1-fr]") - } // Issue #10866 @@ -108,5 +107,4 @@ Fragments: {{ .Fragments.Identifiers }}| b.AssertFileContent("public/p1/index.html", "Fragments: [heading-p1-1 heading-p2-1 heading-p2-2]|") b.AssertFileContent("public/p2/index.html", "Fragments: [heading-p2-1 heading-p2-2]|") - } diff --git a/hugolib/page__meta.go b/hugolib/page__meta.go index eb1559fb1..0ffdb0b84 100644 --- a/hugolib/page__meta.go +++ b/hugolib/page__meta.go @@ -14,28 +14,26 @@ package hugolib import ( + "context" "fmt" - "path" "path/filepath" "regexp" "strings" - "sync" "time" - "github.com/gohugoio/hugo/langs" - "github.com/gobuffalo/flect" + "github.com/gohugoio/hugo/identity" + "github.com/gohugoio/hugo/langs" "github.com/gohugoio/hugo/markup/converter" - - "github.com/gohugoio/hugo/hugofs/files" - - "github.com/gohugoio/hugo/common/hugo" + xmaps "golang.org/x/exp/maps" "github.com/gohugoio/hugo/related" "github.com/gohugoio/hugo/source" + "github.com/gohugoio/hugo/common/hugo" "github.com/gohugoio/hugo/common/maps" + "github.com/gohugoio/hugo/common/paths" "github.com/gohugoio/hugo/config" "github.com/gohugoio/hugo/helpers" @@ -50,79 +48,76 @@ import ( var cjkRe = regexp.MustCompile(`\p{Han}|\p{Hangul}|\p{Hiragana}|\p{Katakana}`) type pageMeta struct { - // kind is the discriminator that identifies the different page types - // in the different page collections. This can, as an example, be used - // to to filter regular pages, find sections etc. - // Kind will, for the pages available to the templates, be one of: - // page, home, section, taxonomy and term. - // It is of string type to make it easy to reason about in - // the templates. - kind string + kind string // Page kind. + term string // Set for kind == KindTerm. + singular string // Set for kind == KindTerm and kind == KindTaxonomy. - // This is a standalone page not part of any page collection. These - // include sitemap, robotsTXT and similar. It will have no pageOutputs, but - // a fixed pageOutput. - standalone bool + resource.Staler + pageMetaParams - draft bool // Only published when running with -D flag - buildConfig pagemeta.BuildConfig + pageMetaFrontMatter - bundleType files.ContentClass + // Set for standalone pages, e.g. robotsTXT. + standaloneOutputFormat output.Format - // Params contains configuration defined in the params section of page frontmatter. - params map[string]any + resourcePath string // Set for bundled pages; path relative to its bundle root. + bundled bool // Set if this page is bundled inside another. - title string - linkTitle string + pathInfo *paths.Path // Always set. This the canonical path to the Page. + f *source.File - summary string + s *Site // The site this page belongs to. +} - resourcePath string +// Prepare for a rebuild of the data passed in from front matter. +func (m *pageMeta) setMetaPostPrepareRebuild() { + params := xmaps.Clone[map[string]any](m.paramsOriginal) + m.pageMetaParams.params = params + m.pageMetaFrontMatter = pageMetaFrontMatter{} +} - weight int +type pageMetaParams struct { + setMetaPostCount int + setMetaPostCascadeChanged bool - markup string - contentType string + params map[string]any // Params contains configuration defined in the params section of page frontmatter. + cascade map[page.PageMatcher]maps.Params // cascade contains default configuration to be cascaded downwards. - // whether the content is in a CJK language. - isCJKLanguage bool + // These are only set in watch mode. + datesOriginal pageMetaDates + paramsOriginal map[string]any // contains the original params as defined in the front matter. + cascadeOriginal map[page.PageMatcher]maps.Params // contains the original cascade as defined in the front matter. +} - layout string +// From page front matter. +type pageMetaFrontMatter struct { + draft bool // Only published when running with -D flag + title string + linkTitle string + summary string + weight int + markup string + contentType string // type in front matter. + isCJKLanguage bool // whether the content is in a CJK language. + layout string + aliases []string + description string + keywords []string + translationKey string // maps to translation(s) of this page. - aliases []string + buildConfig pagemeta.BuildConfig + configuredOutputFormats output.Formats // outputs defiend in front matter. + pageMetaDates // The 4 front matter dates that Hugo cares about. + resourcesMetadata []map[string]any // Raw front matter metadata that is going to be assigned to the page resources. + sitemap config.SitemapConfig // Sitemap overrides from front matter. + urlPaths pagemeta.URLPath +} - description string - keywords []string - - urlPaths pagemeta.URLPath - - resource.Dates - - // Set if this page is bundled inside another. - bundled bool - - // A key that maps to translation(s) of this page. This value is fetched - // from the page front matter. - translationKey string - - // From front matter. - configuredOutputFormats output.Formats - - // This is the raw front matter metadata that is going to be assigned to - // the Resources above. - resourcesMetadata []map[string]any - - f source.File - - sections []string - - // Sitemap overrides from front matter. - sitemap config.SitemapConfig - - s *Site - - contentConverterInit sync.Once - contentConverter converter.Converter +func (m *pageMetaParams) init(preserveOringal bool) { + if preserveOringal { + m.paramsOriginal = xmaps.Clone[maps.Params](m.params) + m.cascadeOriginal = xmaps.Clone[map[page.PageMatcher]maps.Params](m.cascade) + } } func (p *pageMeta) Aliases() []string { @@ -144,8 +139,15 @@ func (p *pageMeta) Authors() page.AuthorList { return nil } -func (p *pageMeta) BundleType() files.ContentClass { - return p.bundleType +func (p *pageMeta) BundleType() string { + switch p.pathInfo.BundleType() { + case paths.PathTypeLeaf: + return "leaf" + case paths.PathTypeBranch: + return "branch" + default: + return "" + } } func (p *pageMeta) Description() string { @@ -160,7 +162,7 @@ func (p *pageMeta) Draft() bool { return p.draft } -func (p *pageMeta) File() source.File { +func (p *pageMeta) File() *source.File { return p.f } @@ -192,6 +194,9 @@ func (p *pageMeta) Name() string { if p.resourcePath != "" { return p.resourcePath } + if p.kind == kinds.KindTerm { + return p.pathInfo.Unmormalized().BaseNameNoIdentifier() + } return p.Title() } @@ -217,28 +222,11 @@ func (p *pageMeta) Params() maps.Params { } func (p *pageMeta) Path() string { - if !p.File().IsZero() { - const example = ` - {{ $path := "" }} - {{ with .File }} - {{ $path = .Path }} - {{ else }} - {{ $path = .Path }} - {{ end }} -` - p.s.Log.Warnln(".Path when the page is backed by a file is deprecated. We plan to use Path for a canonical source path and you probably want to check the source is a file. To get the current behaviour, you can use a construct similar to the one below:\n" + example) - - } - - return p.Pathc() + return p.pathInfo.Base() } -// This is just a bridge method, use Path in templates. -func (p *pageMeta) Pathc() string { - if !p.File().IsZero() { - return p.File().Path() - } - return p.SectionsPath() +func (p *pageMeta) PathInfo() *paths.Path { + return p.pathInfo } // RelatedKeywords implements the related.Document interface needed for fast page searches. @@ -256,31 +244,7 @@ func (p *pageMeta) IsSection() bool { } func (p *pageMeta) Section() string { - if p.IsHome() { - return "" - } - - if p.IsNode() { - if len(p.sections) == 0 { - // May be a sitemap or similar. - return "" - } - return p.sections[0] - } - - if !p.File().IsZero() { - return p.File().Section() - } - - panic("invalid page state") -} - -func (p *pageMeta) SectionsEntries() []string { - return p.sections -} - -func (p *pageMeta) SectionsPath() string { - return path.Join(p.SectionsEntries()...) + return p.pathInfo.Section() } func (p *pageMeta) Sitemap() config.SitemapConfig { @@ -309,79 +273,114 @@ func (p *pageMeta) Weight() int { return p.weight } -func (pm *pageMeta) mergeBucketCascades(b1, b2 *pagesMapBucket) { - if b1.cascade == nil { - b1.cascade = make(map[page.PageMatcher]maps.Params) - } - - if b2 != nil && b2.cascade != nil { - for k, v := range b2.cascade { - - vv, found := b1.cascade[k] - if !found { - b1.cascade[k] = v - } else { - // Merge - for ck, cv := range v { - if _, found := vv[ck]; !found { - vv[ck] = cv - } - } - } - } - } -} - -func (pm *pageMeta) setMetadata(parentBucket *pagesMapBucket, p *pageState, frontmatter map[string]any) error { - pm.params = make(maps.Params) - - if frontmatter == nil && (parentBucket == nil || parentBucket.cascade == nil) { - return nil - } +func (ps *pageState) setMetaPre() error { + pm := ps.m + p := ps + frontmatter := p.content.parseInfo.frontMatter + watching := p.s.watching() if frontmatter != nil { // Needed for case insensitive fetching of params values maps.PrepareParams(frontmatter) - if p.bucket != nil { + pm.pageMetaParams.params = frontmatter + if p.IsNode() { // Check for any cascade define on itself. if cv, found := frontmatter["cascade"]; found { var err error - p.bucket.cascade, err = page.DecodeCascade(cv) + cascade, err := page.DecodeCascade(cv) if err != nil { return err } + pm.pageMetaParams.cascade = cascade + } } - } else { - frontmatter = make(map[string]any) + } else if pm.pageMetaParams.params == nil { + pm.pageMetaParams.params = make(maps.Params) } - var cascade map[page.PageMatcher]maps.Params + pm.pageMetaParams.init(watching) - if p.bucket != nil { - if parentBucket != nil { - // Merge missing keys from parent into this. - pm.mergeBucketCascades(p.bucket, parentBucket) + return nil +} + +func (ps *pageState) setMetaPost(cascade map[page.PageMatcher]maps.Params) error { + ps.m.setMetaPostCount++ + var cascadeHashPre uint64 + if ps.m.setMetaPostCount > 1 { + cascadeHashPre = identity.HashUint64(ps.m.cascade) + ps.m.cascade = xmaps.Clone[map[page.PageMatcher]maps.Params](ps.m.cascadeOriginal) + + } + + // Apply cascades first so they can be overriden later. + if cascade != nil { + if ps.m.cascade != nil { + for k, v := range cascade { + vv, found := ps.m.cascade[k] + if !found { + ps.m.cascade[k] = v + } else { + // Merge + for ck, cv := range v { + if _, found := vv[ck]; !found { + vv[ck] = cv + } + } + } + } + cascade = ps.m.cascade + } else { + ps.m.cascade = cascade } - cascade = p.bucket.cascade - } else if parentBucket != nil { - cascade = parentBucket.cascade } + if cascade == nil { + cascade = ps.m.cascade + } + + if ps.m.setMetaPostCount > 1 { + ps.m.setMetaPostCascadeChanged = cascadeHashPre != identity.HashUint64(ps.m.cascade) + if !ps.m.setMetaPostCascadeChanged { + // No changes, restore any value that may be changed by aggregation. + ps.m.dates = ps.m.datesOriginal.dates + return nil + } + ps.m.setMetaPostPrepareRebuild() + + } + + // Cascade is also applied to itself. for m, v := range cascade { - if !m.Matches(p) { + if !m.Matches(ps) { continue } for kk, vv := range v { - if _, found := frontmatter[kk]; !found { - frontmatter[kk] = vv + if _, found := ps.m.params[kk]; !found { + ps.m.params[kk] = vv } } } + if err := ps.setMetaPostParams(); err != nil { + return err + } + + if err := ps.m.applyDefaultValues(); err != nil { + return err + } + + // Store away any original values that may be changed from aggregation. + ps.m.datesOriginal = ps.m.pageMetaDates + + return nil +} + +func (p *pageState) setMetaPostParams() error { + pm := p.m var mtime time.Time var contentBaseName string - if !p.File().IsZero() { + if p.File() != nil { contentBaseName = p.File().ContentBaseName() if p.File().FileInfo() != nil { mtime = p.File().FileInfo().ModTime() @@ -393,10 +392,12 @@ func (pm *pageMeta) setMetadata(parentBucket *pagesMapBucket, p *pageState, fron gitAuthorDate = p.gitInfo.AuthorDate } + pm.pageMetaDates = pageMetaDates{} + pm.urlPaths = pagemeta.URLPath{} + descriptor := &pagemeta.FrontMatterDescriptor{ - Frontmatter: frontmatter, Params: pm.params, - Dates: &pm.Dates, + Dates: &pm.pageMetaDates.dates, PageURLs: &pm.urlPaths, BaseFilename: contentBaseName, ModTime: mtime, @@ -412,7 +413,7 @@ func (pm *pageMeta) setMetadata(parentBucket *pagesMapBucket, p *pageState, fron p.s.Log.Errorf("Failed to handle dates for page %q: %s", p.pathOrTitle(), err) } - pm.buildConfig, err = pagemeta.DecodeBuildConfig(frontmatter["_build"]) + pm.buildConfig, err = pagemeta.DecodeBuildConfig(pm.params["_build"]) if err != nil { return err } @@ -420,7 +421,7 @@ func (pm *pageMeta) setMetadata(parentBucket *pagesMapBucket, p *pageState, fron var sitemapSet bool var draft, published, isCJKLanguage *bool - for k, v := range frontmatter { + for k, v := range pm.params { loki := strings.ToLower(k) if loki == "published" { // Intentionally undocumented @@ -458,15 +459,6 @@ func (pm *pageMeta) setMetadata(parentBucket *pagesMapBucket, p *pageState, fron if strings.HasPrefix(url, "http://") || strings.HasPrefix(url, "https://") { return fmt.Errorf("URLs with protocol (http*) not supported: %q. In page %q", url, p.pathOrTitle()) } - lang := p.s.GetLanguagePrefix() - if lang != "" && !strings.HasPrefix(url, "/") && strings.HasPrefix(url, lang+"/") { - if strings.HasPrefix(hugo.CurrentVersion.String(), "0.55") { - // We added support for page relative URLs in Hugo 0.55 and - // this may get its language path added twice. - // TODO(bep) eventually remove this. - p.s.Log.Warnf(`Front matter in %q with the url %q with no leading / has what looks like the language prefix added. In Hugo 0.55 we added support for page relative URLs in front matter, no language prefix needed. Check the URL and consider to either add a leading / or remove the language prefix.`, p.pathOrTitle(), url) - } - } pm.urlPaths.URL = url pm.params[loki] = url case "type": @@ -615,8 +607,8 @@ func (pm *pageMeta) setMetadata(parentBucket *pagesMapBucket, p *pageState, fron if isCJKLanguage != nil { pm.isCJKLanguage = *isCJKLanguage - } else if p.s.conf.HasCJKLanguage && p.source.parsed != nil { - if cjkRe.Match(p.source.parsed.Input()) { + } else if p.s.conf.HasCJKLanguage && p.content.openSource != nil { + if cjkRe.Match(p.content.mustSource()) { pm.isCJKLanguage = true } else { pm.isCJKLanguage = false @@ -628,28 +620,39 @@ func (pm *pageMeta) setMetadata(parentBucket *pagesMapBucket, p *pageState, fron return nil } -func (p *pageMeta) noListAlways() bool { - return p.buildConfig.List != pagemeta.Always +// shouldList returns whether this page should be included in the list of pages. +// glogal indicates site.Pages etc. +func (p *pageMeta) shouldList(global bool) bool { + if p.isStandalone() { + // Never list 404, sitemap and similar. + return false + } + + switch p.buildConfig.List { + case pagemeta.Always: + return true + case pagemeta.Never: + return false + case pagemeta.ListLocally: + return !global + } + return false } -func (p *pageMeta) getListFilter(local bool) contentTreeNodeCallback { - return newContentTreeFilter(func(n *contentNode) bool { - if n == nil { - return true - } +func (p *pageMeta) shouldListAny() bool { + return p.shouldList(true) || p.shouldList(false) +} - var shouldList bool - switch n.p.m.buildConfig.List { - case pagemeta.Always: - shouldList = true - case pagemeta.Never: - shouldList = false - case pagemeta.ListLocally: - shouldList = local - } +func (p *pageMeta) isStandalone() bool { + return !p.standaloneOutputFormat.IsZero() +} - return !shouldList - }) +func (p *pageMeta) shouldBeCheckedForMenuDefinitions() bool { + if !p.shouldList(false) { + return false + } + + return p.kind == kinds.KindHome || p.kind == kinds.KindSection || p.kind == kinds.KindPage } func (p *pageMeta) noRender() bool { @@ -660,17 +663,17 @@ func (p *pageMeta) noLink() bool { return p.buildConfig.Render == pagemeta.Never } -func (p *pageMeta) applyDefaultValues(n *contentNode) error { +func (p *pageMeta) applyDefaultValues() error { if p.buildConfig.IsZero() { p.buildConfig, _ = pagemeta.DecodeBuildConfig(nil) } - if !p.s.isEnabled(p.Kind()) { + if !p.s.conf.IsKindEnabled(p.Kind()) { (&p.buildConfig).Disable() } if p.markup == "" { - if !p.File().IsZero() { + if p.File() != nil { // Fall back to file extension p.markup = p.s.ContentSpec.ResolveMarkup(p.File().Ext()) } @@ -679,43 +682,26 @@ func (p *pageMeta) applyDefaultValues(n *contentNode) error { } } - if p.title == "" && p.f.IsZero() { + if p.title == "" && p.f == nil { switch p.Kind() { case kinds.KindHome: p.title = p.s.Title() case kinds.KindSection: - var sectionName string - if n != nil { - sectionName = n.rootSection() - } else { - sectionName = p.sections[0] - } + sectionName := p.pathInfo.Unmormalized().BaseNameNoIdentifier() if p.s.conf.PluralizeListTitles { sectionName = flect.Pluralize(sectionName) } p.title = p.s.conf.C.CreateTitle(sectionName) case kinds.KindTerm: - // TODO(bep) improve - key := p.sections[len(p.sections)-1] - p.title = strings.Replace(p.s.conf.C.CreateTitle(key), "-", " ", -1) - case kinds.KindTaxonomy: - p.title = p.s.conf.C.CreateTitle(p.sections[0]) - case kinds.Kind404: - p.title = "404 Page not found" - - } - } - - if p.IsNode() { - p.bundleType = files.ContentClassBranch - } else { - source := p.File() - if fi, ok := source.(*fileInfo); ok { - class := fi.FileInfo().Meta().Classifier - switch class { - case files.ContentClassBranch, files.ContentClassLeaf: - p.bundleType = class + if p.term != "" { + p.title = p.s.conf.C.CreateTitle(p.term) + } else { + panic("term not set") } + case kinds.KindTaxonomy: + p.title = strings.Replace(p.s.conf.C.CreateTitle(p.pathInfo.Unmormalized().BaseNameNoIdentifier()), "-", " ", -1) + case kinds.KindStatus404: + p.title = "404 Page not found" } } @@ -734,12 +720,12 @@ func (p *pageMeta) newContentConverter(ps *pageState, markup string) (converter. var id string var filename string var path string - if !p.f.IsZero() { + if p.f != nil { id = p.f.UniqueID() filename = p.f.Filename() path = p.f.Path() } else { - path = p.Pathc() + path = p.Path() } cpp, err := cp.New( @@ -803,3 +789,89 @@ func getParam(m resource.ResourceParamsProvider, key string, stringToLower bool) func getParamToLower(m resource.ResourceParamsProvider, key string) any { return getParam(m, key, true) } + +type pageMetaDates struct { + dates resource.Dates +} + +func (d *pageMetaDates) Date() time.Time { + return d.dates.Date() +} + +func (d *pageMetaDates) Lastmod() time.Time { + return d.dates.Lastmod() +} + +func (d *pageMetaDates) PublishDate() time.Time { + return d.dates.PublishDate() +} + +func (d *pageMetaDates) ExpiryDate() time.Time { + return d.dates.ExpiryDate() +} + +func (ps *pageState) initLazyProviders() error { + ps.init.Add(func(ctx context.Context) (any, error) { + pp, err := newPagePaths(ps) + if err != nil { + return nil, err + } + + var outputFormatsForPage output.Formats + var renderFormats output.Formats + + if ps.m.standaloneOutputFormat.IsZero() { + outputFormatsForPage = ps.m.outputFormats() + renderFormats = ps.s.h.renderFormats + } else { + // One of the fixed output format pages, e.g. 404. + outputFormatsForPage = output.Formats{ps.m.standaloneOutputFormat} + renderFormats = outputFormatsForPage + } + + // Prepare output formats for all sites. + // We do this even if this page does not get rendered on + // its own. It may be referenced via one of the site collections etc. + // it will then need an output format. + ps.pageOutputs = make([]*pageOutput, len(renderFormats)) + created := make(map[string]*pageOutput) + shouldRenderPage := !ps.m.noRender() + + for i, f := range renderFormats { + + if po, found := created[f.Name]; found { + ps.pageOutputs[i] = po + continue + } + + render := shouldRenderPage + if render { + _, render = outputFormatsForPage.GetByName(f.Name) + } + + po := newPageOutput(ps, pp, f, render) + + // Create a content provider for the first, + // we may be able to reuse it. + if i == 0 { + contentProvider, err := newPageContentOutput(po) + if err != nil { + return nil, err + } + po.setContentProvider(contentProvider) + } + + ps.pageOutputs[i] = po + created[f.Name] = po + + } + + if err := ps.initCommonProviders(pp); err != nil { + return nil, err + } + + return nil, nil + }) + + return nil +} diff --git a/hugolib/page__new.go b/hugolib/page__new.go index 108e5717f..89eeb2e0e 100644 --- a/hugolib/page__new.go +++ b/hugolib/page__new.go @@ -14,207 +14,173 @@ package hugolib import ( - "context" - "html/template" - "strings" + "fmt" + "sync" + "sync/atomic" - "go.uber.org/atomic" - - "github.com/gohugoio/hugo/common/hugo" + "github.com/gohugoio/hugo/identity" + "github.com/gohugoio/hugo/resources" "github.com/gohugoio/hugo/common/maps" - "github.com/gohugoio/hugo/output" - "github.com/gohugoio/hugo/lazy" + "github.com/gohugoio/hugo/resources/kinds" "github.com/gohugoio/hugo/resources/page" ) -var pageIdCounter atomic.Int64 +var pageIDCounter atomic.Uint64 -func newPageBase(metaProvider *pageMeta) (*pageState, error) { - if metaProvider.s == nil { - panic("must provide a Site") - } - - id := int(pageIdCounter.Add(1)) - - s := metaProvider.s - - ps := &pageState{ - id: id, - pageOutput: nopPageOutput, - pageOutputTemplateVariationsState: atomic.NewUint32(0), - pageCommon: &pageCommon{ - FileProvider: metaProvider, - AuthorProvider: metaProvider, - Scratcher: maps.NewScratcher(), - store: maps.NewScratch(), - Positioner: page.NopPage, - InSectionPositioner: page.NopPage, - ResourceMetaProvider: metaProvider, - ResourceParamsProvider: metaProvider, - PageMetaProvider: metaProvider, - RelatedKeywordsProvider: metaProvider, - OutputFormatsProvider: page.NopPage, - ResourceTypeProvider: pageTypesProvider, - MediaTypeProvider: pageTypesProvider, - RefProvider: page.NopPage, - ShortcodeInfoProvider: page.NopPage, - LanguageProvider: s, - pagePages: &pagePages{}, - - InternalDependencies: s, - init: lazy.New(), - m: metaProvider, - s: s, - sWrapped: page.WrapSite(s), - }, - } - - ps.shortcodeState = newShortcodeHandler(ps, ps.s) - - siteAdapter := pageSiteAdapter{s: s, p: ps} - - ps.pageMenus = &pageMenus{p: ps} - ps.PageMenusProvider = ps.pageMenus - ps.GetPageProvider = siteAdapter - ps.GitInfoProvider = ps - ps.TranslationsProvider = ps - ps.ResourceDataProvider = &pageData{pageState: ps} - ps.RawContentProvider = ps - ps.ChildCareProvider = ps - ps.TreeProvider = pageTree{p: ps} - ps.Eqer = ps - ps.TranslationKeyProvider = ps - ps.ShortcodeInfoProvider = ps - ps.AlternativeOutputFormatsProvider = ps - - return ps, nil -} - -func newPageBucket(p *pageState) *pagesMapBucket { - return &pagesMapBucket{owner: p, pagesMapBucketPages: &pagesMapBucketPages{}} -} - -func newPageFromMeta( - n *contentNode, - parentBucket *pagesMapBucket, - meta map[string]any, - metaProvider *pageMeta) (*pageState, error) { - if metaProvider.f == nil { - metaProvider.f = page.NewZeroFile(metaProvider.s.Log) - } - - ps, err := newPageBase(metaProvider) - if err != nil { - return nil, err - } - - bucket := parentBucket - - if ps.IsNode() { - ps.bucket = newPageBucket(ps) - } - - if meta != nil || parentBucket != nil { - if err := metaProvider.setMetadata(bucket, ps, meta); err != nil { - return nil, ps.wrapError(err) +func (h *HugoSites) newPage(m *pageMeta) (*pageState, error) { + if m.pathInfo == nil { + if m.f != nil { + m.pathInfo = m.f.FileInfo().Meta().PathInfo + } + if m.pathInfo == nil { + panic(fmt.Sprintf("missing pathInfo in %v", m)) } } - if err := metaProvider.applyDefaultValues(n); err != nil { - return nil, err - } + m.Staler = &resources.AtomicStaler{} - ps.init.Add(func(context.Context) (any, error) { - pp, err := newPagePaths(metaProvider.s, ps, metaProvider) - if err != nil { - return nil, err - } - - makeOut := func(f output.Format, render bool) *pageOutput { - return newPageOutput(ps, pp, f, render) - } - - shouldRenderPage := !ps.m.noRender() - - if ps.m.standalone { - ps.pageOutput = makeOut(ps.m.outputFormats()[0], shouldRenderPage) - } else { - outputFormatsForPage := ps.m.outputFormats() - - // Prepare output formats for all sites. - // We do this even if this page does not get rendered on - // its own. It may be referenced via .Site.GetPage and - // it will then need an output format. - ps.pageOutputs = make([]*pageOutput, len(ps.s.h.renderFormats)) - created := make(map[string]*pageOutput) - for i, f := range ps.s.h.renderFormats { - po, found := created[f.Name] - if !found { - render := shouldRenderPage - if render { - _, render = outputFormatsForPage.GetByName(f.Name) - } - po = makeOut(f, render) - created[f.Name] = po + ps, err := func() (*pageState, error) { + if m.s == nil { + // Identify the Site/language to associate this Page with. + var lang string + if m.f != nil { + meta := m.f.FileInfo().Meta() + lang = meta.Lang + m.s = h.Sites[meta.LangIndex] + } else { + lang = m.pathInfo.Lang() + } + var found bool + for _, ss := range h.Sites { + if ss.Lang() == lang { + m.s = ss + found = true + break } - ps.pageOutputs[i] = po + } + if !found { + return nil, fmt.Errorf("no site found for language %q", lang) + } + + } + + // Identify Page Kind. + if m.kind == "" { + m.kind = kinds.KindSection + if m.pathInfo.Base() == "/" { + m.kind = kinds.KindHome + } else if m.pathInfo.IsBranchBundle() { + // A section, taxonomy or term. + tc := m.s.pageMap.cfg.getTaxonomyConfig(m.Path()) + if !tc.IsZero() { + // Either a taxonomy or a term. + if tc.pluralTreeKey == m.Path() { + m.kind = kinds.KindTaxonomy + } else { + m.kind = kinds.KindTerm + } + } + } else if m.f != nil { + m.kind = kinds.KindPage } } - if err := ps.initCommonProviders(pp); err != nil { - return nil, err + if m.kind == kinds.KindPage && !m.s.conf.IsKindEnabled(m.kind) { + return nil, nil } - return nil, nil - }) + pid := pageIDCounter.Add(1) + + // Parse page content. + cachedContent, err := newCachedContent(m, pid) + if err != nil { + return nil, m.wrapError(err) + } + + var dependencyManager identity.Manager = identity.NopManager + + if m.s.conf.Internal.Watch { + dependencyManager = identity.NewManager(m.Path()) + } + + ps := &pageState{ + pid: pid, + pageOutput: nopPageOutput, + pageOutputTemplateVariationsState: &atomic.Uint32{}, + resourcesPublishInit: &sync.Once{}, + Staler: m, + dependencyManager: dependencyManager, + pageCommon: &pageCommon{ + content: cachedContent, + FileProvider: m, + AuthorProvider: m, + Scratcher: maps.NewScratcher(), + store: maps.NewScratch(), + Positioner: page.NopPage, + InSectionPositioner: page.NopPage, + ResourceNameTitleProvider: m, + ResourceParamsProvider: m, + PageMetaProvider: m, + RelatedKeywordsProvider: m, + OutputFormatsProvider: page.NopPage, + ResourceTypeProvider: pageTypesProvider, + MediaTypeProvider: pageTypesProvider, + RefProvider: page.NopPage, + ShortcodeInfoProvider: page.NopPage, + LanguageProvider: m.s, + + InternalDependencies: m.s, + init: lazy.New(), + m: m, + s: m.s, + sWrapped: page.WrapSite(m.s), + }, + } + + if m.f != nil { + gi, err := m.s.h.gitInfoForPage(ps) + if err != nil { + return nil, fmt.Errorf("failed to load Git data: %w", err) + } + ps.gitInfo = gi + owners, err := m.s.h.codeownersForPage(ps) + if err != nil { + return nil, fmt.Errorf("failed to load CODEOWNERS: %w", err) + } + ps.codeowners = owners + } + + ps.pageMenus = &pageMenus{p: ps} + ps.PageMenusProvider = ps.pageMenus + ps.GetPageProvider = pageSiteAdapter{s: m.s, p: ps} + ps.GitInfoProvider = ps + ps.TranslationsProvider = ps + ps.ResourceDataProvider = &pageData{pageState: ps} + ps.RawContentProvider = ps + ps.ChildCareProvider = ps + ps.TreeProvider = pageTree{p: ps} + ps.Eqer = ps + ps.TranslationKeyProvider = ps + ps.ShortcodeInfoProvider = ps + ps.AlternativeOutputFormatsProvider = ps + + if err := ps.setMetaPre(); err != nil { + return nil, ps.wrapError(err) + } + + if err := ps.initLazyProviders(); err != nil { + return nil, ps.wrapError(err) + } + return ps, nil + }() + // Make sure to evict any cached and now stale data. + if err != nil { + m.MarkStale() + } return ps, err } - -// Used by the legacy 404, sitemap and robots.txt rendering -func newPageStandalone(m *pageMeta, f output.Format) (*pageState, error) { - m.configuredOutputFormats = output.Formats{f} - m.standalone = true - p, err := newPageFromMeta(nil, nil, nil, m) - if err != nil { - return nil, err - } - - if err := p.initPage(); err != nil { - return nil, err - } - - return p, nil -} - -type pageDeprecatedWarning struct { - p *pageState -} - -func (p *pageDeprecatedWarning) IsDraft() bool { return p.p.m.draft } -func (p *pageDeprecatedWarning) Hugo() hugo.HugoInfo { return p.p.s.Hugo() } -func (p *pageDeprecatedWarning) LanguagePrefix() string { return p.p.s.GetLanguagePrefix() } -func (p *pageDeprecatedWarning) GetParam(key string) any { - return p.p.m.params[strings.ToLower(key)] -} - -func (p *pageDeprecatedWarning) RSSLink() template.URL { - f := p.p.OutputFormats().Get("RSS") - if f == nil { - return "" - } - return template.URL(f.Permalink()) -} - -func (p *pageDeprecatedWarning) URL() string { - if p.p.IsPage() && p.p.m.urlPaths.URL != "" { - // This is the url set in front matter - return p.p.m.urlPaths.URL - } - // Fall back to the relative permalink. - return p.p.RelPermalink() -} diff --git a/hugolib/page__output.go b/hugolib/page__output.go index 21f58e795..6fae10740 100644 --- a/hugolib/page__output.go +++ b/hugolib/page__output.go @@ -14,6 +14,7 @@ package hugolib import ( + "github.com/gohugoio/hugo/identity" "github.com/gohugoio/hugo/output" "github.com/gohugoio/hugo/resources/page" "github.com/gohugoio/hugo/resources/resource" @@ -23,7 +24,8 @@ func newPageOutput( ps *pageState, pp pagePaths, f output.Format, - render bool) *pageOutput { + render bool, +) *pageOutput { var targetPathsProvider targetPathsHolder var linksProvider resource.ResourceLinksProvider @@ -43,6 +45,11 @@ func newPageOutput( paginatorProvider = pag } + var dependencyManager identity.Manager = identity.NopManager + if ps.s.conf.Internal.Watch { + dependencyManager = identity.NewManager(ps.Path() + "/" + f.Name) + } + providers := struct { page.PaginatorProvider resource.ResourceLinksProvider @@ -54,6 +61,7 @@ func newPageOutput( } po := &pageOutput{ + p: ps, f: f, pagePerOutputProviders: providers, ContentProvider: page.NopPage, @@ -61,6 +69,7 @@ func newPageOutput( TableOfContentsProvider: page.NopPage, render: render, paginator: pag, + dependencyManagerOutput: dependencyManager, } return po @@ -69,6 +78,8 @@ func newPageOutput( // We create a pageOutput for every output format combination, even if this // particular page isn't configured to be rendered to that format. type pageOutput struct { + p *pageState + // Set if this page isn't configured to be rendered to this format. render bool @@ -89,10 +100,39 @@ type pageOutput struct { page.RenderShortcodesProvider // May be nil. - cp *pageContentOutput + pco *pageContentOutput + + dependencyManagerOutput identity.Manager + + renderState int // Reset when it needs to be rendered again. + renderOnce bool // To make sure we at least try to render it once. } -func (p *pageOutput) initContentProvider(cp *pageContentOutput) { +func (po *pageOutput) incrRenderState() { + po.renderState++ + po.renderOnce = true +} + +// isRendered reports whether this output format or its content has been rendered. +func (po *pageOutput) isRendered() bool { + if po.renderState > 0 { + return true + } + if po.pco != nil && po.pco.contentRendered { + return true + } + return false +} + +func (po *pageOutput) IdentifierBase() string { + return po.p.f.Name +} + +func (po *pageOutput) GetDependencyManager() identity.Manager { + return po.dependencyManagerOutput +} + +func (p *pageOutput) setContentProvider(cp *pageContentOutput) { if cp == nil { return } @@ -101,12 +141,5 @@ func (p *pageOutput) initContentProvider(cp *pageContentOutput) { p.PageRenderProvider = cp p.TableOfContentsProvider = cp p.RenderShortcodesProvider = cp - p.cp = cp - -} - -func (p *pageOutput) enablePlaceholders() { - if p.cp != nil { - p.cp.enablePlaceholders() - } + p.pco = cp } diff --git a/hugolib/page__paginator.go b/hugolib/page__paginator.go index 6a1b4bfab..b6a778a21 100644 --- a/hugolib/page__paginator.go +++ b/hugolib/page__paginator.go @@ -16,7 +16,6 @@ package hugolib import ( "sync" - "github.com/gohugoio/hugo/common/herrors" "github.com/gohugoio/hugo/resources/kinds" "github.com/gohugoio/hugo/resources/page" ) @@ -71,8 +70,6 @@ func (p *pagePaginator) Paginate(seq any, options ...any) (*page.Pager, error) { } func (p *pagePaginator) Paginator(options ...any) (*page.Pager, error) { - defer herrors.Recover() - var initErr error p.init.Do(func() { pagerSize, err := page.ResolvePagerSize(p.source.s.Conf, options...) diff --git a/hugolib/page__paths.go b/hugolib/page__paths.go index 9a6caa05e..6e7980a6d 100644 --- a/hugolib/page__paths.go +++ b/hugolib/page__paths.go @@ -17,29 +17,34 @@ import ( "net/url" "strings" - "github.com/gohugoio/hugo/helpers" - "github.com/gohugoio/hugo/hugofs/files" + "github.com/gohugoio/hugo/output" "github.com/gohugoio/hugo/resources/kinds" "github.com/gohugoio/hugo/resources/page" ) -func newPagePaths( - s *Site, - p page.Page, - pm *pageMeta) (pagePaths, error) { - targetPathDescriptor, err := createTargetPathDescriptor(s, p, pm) +func newPagePaths(ps *pageState) (pagePaths, error) { + s := ps.s + pm := ps.m + + targetPathDescriptor, err := createTargetPathDescriptor(ps) if err != nil { return pagePaths{}, err } - outputFormats := pm.outputFormats() - if len(outputFormats) == 0 { - return pagePaths{}, nil - } + var outputFormats output.Formats - if pm.noRender() { - outputFormats = outputFormats[:1] + if ps.m.isStandalone() { + outputFormats = output.Formats{ps.m.standaloneOutputFormat} + } else { + outputFormats = pm.outputFormats() + if len(outputFormats) == 0 { + return pagePaths{}, nil + } + + if pm.noRender() { + outputFormats = outputFormats[:1] + } } pageOutputFormats := make(page.OutputFormats, len(outputFormats)) @@ -102,46 +107,35 @@ func (l pagePaths) OutputFormats() page.OutputFormats { return l.outputFormats } -func createTargetPathDescriptor(s *Site, p page.Page, pm *pageMeta) (page.TargetPathDescriptor, error) { - var ( - dir string - baseName string - contentBaseName string - ) - +func createTargetPathDescriptor(p *pageState) (page.TargetPathDescriptor, error) { + s := p.s d := s.Deps - var classifier files.ContentClass - - if !p.File().IsZero() { - dir = p.File().Dir() - baseName = p.File().TranslationBaseName() - contentBaseName = p.File().ContentBaseName() - classifier = p.File().FileInfo().Meta().Classifier - } - - if classifier == files.ContentClassLeaf { - // See https://github.com/gohugoio/hugo/issues/4870 - // A leaf bundle - dir = strings.TrimSuffix(dir, contentBaseName+helpers.FilePathSeparator) - baseName = contentBaseName - } - + pm := p.m alwaysInSubDir := p.Kind() == kinds.KindSitemap + pageInfoPage := p.PathInfo() + pageInfoCurrentSection := p.CurrentSection().PathInfo() + if p.s.Conf.DisablePathToLower() { + pageInfoPage = pageInfoPage.Unmormalized() + pageInfoCurrentSection = pageInfoCurrentSection.Unmormalized() + } + desc := page.TargetPathDescriptor{ PathSpec: d.PathSpec, Kind: p.Kind(), - Sections: p.SectionsEntries(), + Path: pageInfoPage, + Section: pageInfoCurrentSection, UglyURLs: s.h.Conf.IsUglyURLs(p.Section()), ForcePrefix: s.h.Conf.IsMultihost() || alwaysInSubDir, - Dir: dir, URL: pm.urlPaths.URL, } if pm.Slug() != "" { desc.BaseName = pm.Slug() + } else if pm.isStandalone() && pm.standaloneOutputFormat.BaseName != "" { + desc.BaseName = pm.standaloneOutputFormat.BaseName } else { - desc.BaseName = baseName + desc.BaseName = pageInfoPage.BaseNameNoIdentifier() } desc.PrefixFilePath = s.getLanguageTargetPathLang(alwaysInSubDir) @@ -162,10 +156,10 @@ func createTargetPathDescriptor(s *Site, p page.Page, pm *pageMeta) (page.Target desc.ExpandedPermalink = opath - if !p.File().IsZero() { + if p.File() != nil { s.Log.Debugf("Set expanded permalink path for %s %s to %#v", p.Kind(), p.File().Path(), opath) } else { - s.Log.Debugf("Set expanded permalink path for %s in %v to %#v", p.Kind(), desc.Sections, opath) + s.Log.Debugf("Set expanded permalink path for %s in %v to %#v", p.Kind(), desc.Section.Path(), opath) } } diff --git a/hugolib/page__per_output.go b/hugolib/page__per_output.go index e806ca339..3d86cdece 100644 --- a/hugolib/page__per_output.go +++ b/hugolib/page__per_output.go @@ -16,13 +16,11 @@ package hugolib import ( "bytes" "context" + "errors" "fmt" "html/template" "strings" "sync" - "unicode/utf8" - - "errors" "github.com/gohugoio/hugo/common/text" "github.com/gohugoio/hugo/common/types/hstring" @@ -37,8 +35,6 @@ import ( "github.com/gohugoio/hugo/markup/converter" - "github.com/gohugoio/hugo/lazy" - bp "github.com/gohugoio/hugo/bufferpool" "github.com/gohugoio/hugo/tpl" @@ -70,235 +66,11 @@ var ( } ) -var pageContentOutputDependenciesID = identity.KeyValueIdentity{Key: "pageOutput", Value: "dependencies"} - -func newPageContentOutput(p *pageState, po *pageOutput) (*pageContentOutput, error) { - parent := p.init - - var dependencyTracker identity.Manager - if p.s.watching() { - dependencyTracker = identity.NewManager(pageContentOutputDependenciesID) - } - +func newPageContentOutput(po *pageOutput) (*pageContentOutput, error) { cp := &pageContentOutput{ - dependencyTracker: dependencyTracker, - p: p, - f: po.f, - renderHooks: &renderHooks{}, + po: po, + renderHooks: &renderHooks{}, } - - initToC := func(ctx context.Context) (err error) { - if p.cmap == nil { - // Nothing to do. - return nil - } - - if err := po.cp.initRenderHooks(); err != nil { - return err - } - - f := po.f - cp.contentPlaceholders, err = p.shortcodeState.prepareShortcodesForPage(ctx, p, f) - if err != nil { - return err - } - - ctxCallback := func(cp2 *pageContentOutput) { - cp.p.cmap.hasNonMarkdownShortcode = cp.p.cmap.hasNonMarkdownShortcode || cp2.p.cmap.hasNonMarkdownShortcode - // Merge content placeholders - for k, v := range cp2.contentPlaceholders { - cp.contentPlaceholders[k] = v - } - - if p.s.watching() { - for _, s := range cp2.p.shortcodeState.shortcodes { - for _, templ := range s.templs { - dependencyTracker.Add(templ.(identity.Manager)) - } - } - } - - // Transfer shortcode names so HasShortcode works for shortcodes from included pages. - cp.p.shortcodeState.transferNames(cp2.p.shortcodeState) - if cp2.p.pageOutputTemplateVariationsState.Load() == 2 { - cp.p.pageOutputTemplateVariationsState.Store(2) - } - } - - ctx = tpl.SetCallbackFunctionInContext(ctx, ctxCallback) - - var hasVariants bool - cp.workContent, hasVariants, err = p.contentToRender(ctx, p.source.parsed, p.cmap, cp.contentPlaceholders) - if err != nil { - return err - } - if hasVariants { - p.pageOutputTemplateVariationsState.Store(2) - } - - isHTML := cp.p.m.markup == "html" - - if !isHTML { - createAndSetToC := func(tocProvider converter.TableOfContentsProvider) { - cfg := p.s.ContentSpec.Converters.GetMarkupConfig() - cp.tableOfContents = tocProvider.TableOfContents() - cp.tableOfContentsHTML = template.HTML( - cp.tableOfContents.ToHTML( - cfg.TableOfContents.StartLevel, - cfg.TableOfContents.EndLevel, - cfg.TableOfContents.Ordered, - ), - ) - } - // If the converter supports doing the parsing separately, we do that. - parseResult, ok, err := po.contentRenderer.ParseContent(ctx, cp.workContent) - if err != nil { - return err - } - if ok { - // This is Goldmark. - // Store away the parse result for later use. - createAndSetToC(parseResult) - cp.astDoc = parseResult.Doc() - - return nil - } - - // This is Asciidoctor etc. - r, err := po.contentRenderer.ParseAndRenderContent(ctx, cp.workContent, true) - if err != nil { - return err - } - - cp.workContent = r.Bytes() - - if tocProvider, ok := r.(converter.TableOfContentsProvider); ok { - createAndSetToC(tocProvider) - } else { - tmpContent, tmpTableOfContents := helpers.ExtractTOC(cp.workContent) - cp.tableOfContentsHTML = helpers.BytesToHTML(tmpTableOfContents) - cp.tableOfContents = tableofcontents.Empty - cp.workContent = tmpContent - } - } - - return nil - - } - - initContent := func(ctx context.Context) (err error) { - - p.s.h.IncrContentRender() - - if p.cmap == nil { - // Nothing to do. - return nil - } - - if cp.astDoc != nil { - // The content is parsed, but not rendered. - r, ok, err := po.contentRenderer.RenderContent(ctx, cp.workContent, cp.astDoc) - if err != nil { - return err - } - if !ok { - return errors.New("invalid state: astDoc is set but RenderContent returned false") - } - - cp.workContent = r.Bytes() - } - - if p.cmap.hasNonMarkdownShortcode || cp.placeholdersEnabled { - // There are one or more replacement tokens to be replaced. - var hasShortcodeVariants bool - tokenHandler := func(ctx context.Context, token string) ([]byte, error) { - if token == tocShortcodePlaceholder { - // The Page's TableOfContents was accessed in a shortcode. - if cp.tableOfContentsHTML == "" { - cp.p.s.initInit(ctx, cp.initToC, cp.p) - } - return []byte(cp.tableOfContentsHTML), nil - } - renderer, found := cp.contentPlaceholders[token] - if found { - repl, more, err := renderer.renderShortcode(ctx) - if err != nil { - return nil, err - } - hasShortcodeVariants = hasShortcodeVariants || more - return repl, nil - } - // This should never happen. - return nil, fmt.Errorf("unknown shortcode token %q", token) - } - - cp.workContent, err = expandShortcodeTokens(ctx, cp.workContent, tokenHandler) - if err != nil { - return err - } - if hasShortcodeVariants { - p.pageOutputTemplateVariationsState.Store(2) - } - } - - if cp.p.source.hasSummaryDivider { - isHTML := cp.p.m.markup == "html" - if isHTML { - src := p.source.parsed.Input() - - // Use the summary sections as they are provided by the user. - if p.source.posSummaryEnd != -1 { - cp.summary = helpers.BytesToHTML(src[p.source.posMainContent:p.source.posSummaryEnd]) - } - - if cp.p.source.posBodyStart != -1 { - cp.workContent = src[cp.p.source.posBodyStart:] - } - - } else { - summary, content, err := splitUserDefinedSummaryAndContent(cp.p.m.markup, cp.workContent) - if err != nil { - cp.p.s.Log.Errorf("Failed to set user defined summary for page %q: %s", cp.p.pathOrTitle(), err) - } else { - cp.workContent = content - cp.summary = helpers.BytesToHTML(summary) - } - } - } else if cp.p.m.summary != "" { - b, err := po.contentRenderer.ParseAndRenderContent(ctx, []byte(cp.p.m.summary), false) - if err != nil { - return err - } - html := cp.p.s.ContentSpec.TrimShortHTML(b.Bytes()) - cp.summary = helpers.BytesToHTML(html) - } - - cp.content = helpers.BytesToHTML(cp.workContent) - - return nil - } - - cp.initToC = parent.Branch(func(ctx context.Context) (any, error) { - return nil, initToC(ctx) - }) - - // There may be recursive loops in shortcodes and render hooks. - cp.initMain = cp.initToC.BranchWithTimeout(p.s.conf.C.Timeout, func(ctx context.Context) (any, error) { - return nil, initContent(ctx) - }) - - cp.initPlain = cp.initMain.Branch(func(context.Context) (any, error) { - cp.plain = tpl.StripHTML(string(cp.content)) - cp.plainWords = strings.Fields(cp.plain) - cp.setWordCounts(p.m.isCJKLanguage) - - if err := cp.setAutoSummary(); err != nil { - return err, nil - } - - return nil, nil - }) - return cp, nil } @@ -309,86 +81,51 @@ type renderHooks struct { // pageContentOutput represents the Page content for a given output format. type pageContentOutput struct { - f output.Format + po *pageOutput - p *pageState - - // Lazy load dependencies - initToC *lazy.Init - initMain *lazy.Init - initPlain *lazy.Init - - placeholdersEnabled bool - placeholdersEnabledInit sync.Once + contentRenderedVersion int // Incremented on reset. + contentRendered bool // Set on content render. // Renders Markdown hooks. renderHooks *renderHooks - - workContent []byte - dependencyTracker identity.Manager // Set in server mode. - - // Temporary storage of placeholders mapped to their content. - // These are shortcodes etc. Some of these will need to be replaced - // after any markup is rendered, so they share a common prefix. - contentPlaceholders map[string]shortcodeRenderer - - // Content sections - content template.HTML - summary template.HTML - tableOfContents *tableofcontents.Fragments - tableOfContentsHTML template.HTML - // For Goldmark we split Parse and Render. - astDoc any - - truncated bool - - plainWords []string - plain string - fuzzyWordCount int - wordCount int - readingTime int } -func (p *pageContentOutput) trackDependency(id identity.Provider) { - if p.dependencyTracker != nil { - p.dependencyTracker.Add(id) +func (pco *pageContentOutput) trackDependency(idp identity.IdentityProvider) { + pco.po.p.dependencyManagerOutput.AddIdentity(idp.GetIdentity()) +} + +func (pco *pageContentOutput) Reset() { + if pco == nil { + return + } + pco.contentRenderedVersion++ + pco.contentRendered = false + pco.renderHooks = &renderHooks{} +} + +func (pco *pageContentOutput) Fragments(ctx context.Context) *tableofcontents.Fragments { + return pco.po.p.content.mustContentToC(ctx, pco).tableOfContents +} + +func (pco *pageContentOutput) RenderShortcodes(ctx context.Context) (template.HTML, error) { + content := pco.po.p.content + source, err := content.contentSource() + if err != nil { + return "", err + } + ct, err := content.contentToC(ctx, pco) + if err != nil { + return "", err } -} - -func (p *pageContentOutput) Reset() { - if p.dependencyTracker != nil { - p.dependencyTracker.Reset() - } - p.initToC.Reset() - p.initMain.Reset() - p.initPlain.Reset() - p.renderHooks = &renderHooks{} -} - -func (p *pageContentOutput) Fragments(ctx context.Context) *tableofcontents.Fragments { - p.p.s.initInit(ctx, p.initToC, p.p) - if p.tableOfContents == nil { - return tableofcontents.Empty - } - return p.tableOfContents -} - -func (p *pageContentOutput) RenderShortcodes(ctx context.Context) (template.HTML, error) { - p.p.s.initInit(ctx, p.initToC, p.p) - source := p.p.source.parsed.Input() - renderedShortcodes := p.contentPlaceholders var insertPlaceholders bool var hasVariants bool - var cb func(*pageContentOutput) - if v := tpl.GetCallbackFunctionFromContext(ctx); v != nil { - if fn, ok := v.(func(*pageContentOutput)); ok { - insertPlaceholders = true - cb = fn - } + cb := setGetContentCallbackInContext.Get(ctx) + if cb != nil { + insertPlaceholders = true } c := make([]byte, 0, len(source)+(len(source)/10)) - for _, it := range p.p.cmap.items { + for _, it := range content.parseInfo.itemsStep2 { switch v := it.(type) { case pageparser.Item: c = append(c, source[v.Pos():v.Pos()+len(v.Val(source))]...) @@ -397,7 +134,7 @@ func (p *pageContentOutput) RenderShortcodes(ctx context.Context) (template.HTML case *shortcode: if !insertPlaceholders || !v.insertPlaceholder() { // Insert the rendered shortcode. - renderedShortcode, found := renderedShortcodes[v.placeholder] + renderedShortcode, found := ct.contentPlaceholders[v.placeholder] if !found { // This should never happen. panic(fmt.Sprintf("rendered shortcode %q not found", v.placeholder)) @@ -421,73 +158,78 @@ func (p *pageContentOutput) RenderShortcodes(ctx context.Context) (template.HTML } if hasVariants { - p.p.pageOutputTemplateVariationsState.Store(2) + pco.po.p.pageOutputTemplateVariationsState.Add(1) } if cb != nil { - cb(p) + cb(pco, ct) } return helpers.BytesToHTML(c), nil } -func (p *pageContentOutput) TableOfContents(ctx context.Context) template.HTML { - p.p.s.initInit(ctx, p.initToC, p.p) - return p.tableOfContentsHTML +func (pco *pageContentOutput) Content(ctx context.Context) (any, error) { + r, err := pco.po.p.content.contentRendered(ctx, pco) + return r.content, err } -func (p *pageContentOutput) Content(ctx context.Context) (any, error) { - p.p.s.initInit(ctx, p.initMain, p.p) - return p.content, nil -} - -func (p *pageContentOutput) FuzzyWordCount(ctx context.Context) int { - p.p.s.initInit(ctx, p.initPlain, p.p) - return p.fuzzyWordCount +func (pco *pageContentOutput) TableOfContents(ctx context.Context) template.HTML { + return pco.po.p.content.mustContentToC(ctx, pco).tableOfContentsHTML } func (p *pageContentOutput) Len(ctx context.Context) int { - p.p.s.initInit(ctx, p.initMain, p.p) - return len(p.content) + return len(p.mustContentRendered(ctx).content) } -func (p *pageContentOutput) Plain(ctx context.Context) string { - p.p.s.initInit(ctx, p.initPlain, p.p) - return p.plain -} - -func (p *pageContentOutput) PlainWords(ctx context.Context) []string { - p.p.s.initInit(ctx, p.initPlain, p.p) - return p.plainWords -} - -func (p *pageContentOutput) ReadingTime(ctx context.Context) int { - p.p.s.initInit(ctx, p.initPlain, p.p) - return p.readingTime -} - -func (p *pageContentOutput) Summary(ctx context.Context) template.HTML { - p.p.s.initInit(ctx, p.initMain, p.p) - if !p.p.source.hasSummaryDivider { - p.p.s.initInit(ctx, p.initPlain, p.p) +func (pco *pageContentOutput) mustContentRendered(ctx context.Context) contentSummary { + r, err := pco.po.p.content.contentRendered(ctx, pco) + if err != nil { + pco.fail(err) } - return p.summary + return r } -func (p *pageContentOutput) Truncated(ctx context.Context) bool { - if p.p.truncated { - return true +func (pco *pageContentOutput) mustContentPlain(ctx context.Context) contentPlainPlainWords { + r, err := pco.po.p.content.contentPlain(ctx, pco) + if err != nil { + pco.fail(err) } - p.p.s.initInit(ctx, p.initPlain, p.p) - return p.truncated + return r } -func (p *pageContentOutput) WordCount(ctx context.Context) int { - p.p.s.initInit(ctx, p.initPlain, p.p) - return p.wordCount +func (pco *pageContentOutput) fail(err error) { + pco.po.p.s.h.FatalError(pco.po.p.wrapError(err)) } -func (p *pageContentOutput) RenderString(ctx context.Context, args ...any) (template.HTML, error) { +func (pco *pageContentOutput) Plain(ctx context.Context) string { + return pco.mustContentPlain(ctx).plain +} + +func (pco *pageContentOutput) PlainWords(ctx context.Context) []string { + return pco.mustContentPlain(ctx).plainWords +} + +func (pco *pageContentOutput) ReadingTime(ctx context.Context) int { + return pco.mustContentPlain(ctx).readingTime +} + +func (pco *pageContentOutput) WordCount(ctx context.Context) int { + return pco.mustContentPlain(ctx).wordCount +} + +func (pco *pageContentOutput) FuzzyWordCount(ctx context.Context) int { + return pco.mustContentPlain(ctx).fuzzyWordCount +} + +func (pco *pageContentOutput) Summary(ctx context.Context) template.HTML { + return pco.mustContentPlain(ctx).summary +} + +func (pco *pageContentOutput) Truncated(ctx context.Context) bool { + return pco.mustContentPlain(ctx).summaryTruncated +} + +func (pco *pageContentOutput) RenderString(ctx context.Context, args ...any) (template.HTML, error) { if len(args) < 1 || len(args) > 2 { return "", errors.New("want 1 or 2 arguments") } @@ -523,71 +265,67 @@ func (p *pageContentOutput) RenderString(ctx context.Context, args ...any) (temp return "", err } - if err = p.initRenderHooks(); err != nil { + if err = pco.initRenderHooks(); err != nil { return "", err } - conv := p.p.getContentConverter() - if opts.Markup != "" && opts.Markup != p.p.m.markup { + conv := pco.po.p.getContentConverter() + if opts.Markup != "" && opts.Markup != pco.po.p.m.markup { var err error - // TODO(bep) consider cache - conv, err = p.p.m.newContentConverter(p.p, opts.Markup) + conv, err = pco.po.p.m.newContentConverter(pco.po.p, opts.Markup) if err != nil { - return "", p.p.wrapError(err) + return "", pco.po.p.wrapError(err) } } var rendered []byte + parseInfo := &contentParseInfo{ + pid: pco.po.p.pid, + } + if pageparser.HasShortcode(contentToRender) { + contentToRenderb := []byte(contentToRender) // String contains a shortcode. - parsed, err := pageparser.ParseMain(strings.NewReader(contentToRender), pageparser.Config{}) - if err != nil { - return "", err - } - pm := &pageContentMap{ - items: make([]any, 0, 20), - } - s := newShortcodeHandler(p.p, p.p.s) - - if err := p.p.mapContentForResult( - parsed, - s, - pm, - opts.Markup, - nil, - ); err != nil { - return "", err - } - - placeholders, err := s.prepareShortcodesForPage(ctx, p.p, p.f) + parseInfo.itemsStep1, err = pageparser.ParseBytesMain(contentToRenderb, pageparser.Config{}) if err != nil { return "", err } - contentToRender, hasVariants, err := p.p.contentToRender(ctx, parsed, pm, placeholders) + s := newShortcodeHandler(pco.po.p.pathOrTitle(), pco.po.p.s) + if err := parseInfo.mapItems(contentToRenderb, s); err != nil { + return "", err + } + + placeholders, err := s.prepareShortcodesForPage(ctx, pco.po.p, pco.po.f, true) + if err != nil { + return "", err + } + + contentToRender, hasVariants, err := parseInfo.contentToRender(ctx, contentToRenderb, placeholders) if err != nil { return "", err } if hasVariants { - p.p.pageOutputTemplateVariationsState.Store(2) + pco.po.p.pageOutputTemplateVariationsState.Add(1) } - b, err := p.renderContentWithConverter(ctx, conv, contentToRender, false) + b, err := pco.renderContentWithConverter(ctx, conv, contentToRender, false) if err != nil { - return "", p.p.wrapError(err) + return "", pco.po.p.wrapError(err) } rendered = b.Bytes() - if pm.hasNonMarkdownShortcode || p.placeholdersEnabled { + if parseInfo.hasNonMarkdownShortcode { var hasShortcodeVariants bool tokenHandler := func(ctx context.Context, token string) ([]byte, error) { if token == tocShortcodePlaceholder { - // The Page's TableOfContents was accessed in a shortcode. - if p.tableOfContentsHTML == "" { - p.p.s.initInit(ctx, p.initToC, p.p) + toc, err := pco.po.p.content.contentToC(ctx, pco) + if err != nil { + return nil, err } - return []byte(p.tableOfContentsHTML), nil + // The Page's TableOfContents was accessed in a shortcode. + return []byte(toc.tableOfContentsHTML), nil } renderer, found := placeholders[token] if found { @@ -607,17 +345,17 @@ func (p *pageContentOutput) RenderString(ctx context.Context, args ...any) (temp return "", err } if hasShortcodeVariants { - p.p.pageOutputTemplateVariationsState.Store(2) + pco.po.p.pageOutputTemplateVariationsState.Add(1) } } // We need a consolidated view in $page.HasShortcode - p.p.shortcodeState.transferNames(s) + pco.po.p.content.shortcodeState.transferNames(s) } else { - c, err := p.renderContentWithConverter(ctx, conv, []byte(contentToRender), false) + c, err := pco.renderContentWithConverter(ctx, conv, []byte(contentToRender), false) if err != nil { - return "", p.p.wrapError(err) + return "", pco.po.p.wrapError(err) } rendered = c.Bytes() @@ -626,48 +364,41 @@ func (p *pageContentOutput) RenderString(ctx context.Context, args ...any) (temp if opts.Display == "inline" { // We may have to rethink this in the future when we get other // renderers. - rendered = p.p.s.ContentSpec.TrimShortHTML(rendered) + rendered = pco.po.p.s.ContentSpec.TrimShortHTML(rendered) } return template.HTML(string(rendered)), nil } -func (p *pageContentOutput) RenderWithTemplateInfo(ctx context.Context, info tpl.Info, layout ...string) (template.HTML, error) { - p.p.addDependency(info) - return p.Render(ctx, layout...) -} - -func (p *pageContentOutput) Render(ctx context.Context, layout ...string) (template.HTML, error) { +func (pco *pageContentOutput) Render(ctx context.Context, layout ...string) (template.HTML, error) { if len(layout) == 0 { return "", errors.New("no layout given") } - templ, found, err := p.p.resolveTemplate(layout...) + templ, found, err := pco.po.p.resolveTemplate(layout...) if err != nil { - return "", p.p.wrapError(err) + return "", pco.po.p.wrapError(err) } if !found { return "", nil } - p.p.addDependency(templ.(tpl.Info)) - // Make sure to send the *pageState and not the *pageContentOutput to the template. - res, err := executeToString(ctx, p.p.s.Tmpl(), templ, p.p) + res, err := executeToString(ctx, pco.po.p.s.Tmpl(), templ, pco.po.p) if err != nil { - return "", p.p.wrapError(fmt.Errorf("failed to execute template %s: %w", templ.Name(), err)) + return "", pco.po.p.wrapError(fmt.Errorf("failed to execute template %s: %w", templ.Name(), err)) } return template.HTML(res), nil } -func (p *pageContentOutput) initRenderHooks() error { - if p == nil { +func (pco *pageContentOutput) initRenderHooks() error { + if pco == nil { return nil } - p.renderHooks.init.Do(func() { - if p.p.pageOutputTemplateVariationsState.Load() == 0 { - p.p.pageOutputTemplateVariationsState.Store(1) + pco.renderHooks.init.Do(func() { + if pco.po.p.pageOutputTemplateVariationsState.Load() == 0 { + pco.po.p.pageOutputTemplateVariationsState.Store(1) } type cacheKey struct { @@ -680,14 +411,15 @@ func (p *pageContentOutput) initRenderHooks() error { var renderCacheMu sync.Mutex resolvePosition := func(ctx any) text.Position { + source := pco.po.p.content.mustSource() var offset int switch v := ctx.(type) { case hooks.CodeblockContext: - offset = bytes.Index(p.p.source.parsed.Input(), []byte(v.Inner())) + offset = bytes.Index(source, []byte(v.Inner())) } - pos := p.p.posFromInput(p.p.source.parsed.Input(), offset) + pos := pco.po.p.posFromInput(source, offset) if pos.LineNumber > 0 { // Move up to the code fence delimiter. @@ -698,16 +430,16 @@ func (p *pageContentOutput) initRenderHooks() error { return pos } - p.renderHooks.getRenderer = func(tp hooks.RendererType, id any) any { + pco.renderHooks.getRenderer = func(tp hooks.RendererType, id any) any { renderCacheMu.Lock() defer renderCacheMu.Unlock() - key := cacheKey{tp: tp, id: id, f: p.f} + key := cacheKey{tp: tp, id: id, f: pco.po.f} if r, ok := renderCache[key]; ok { return r } - layoutDescriptor := p.p.getLayoutDescriptor() + layoutDescriptor := pco.po.p.getLayoutDescriptor() layoutDescriptor.RenderingHook = true layoutDescriptor.LayoutOverride = false layoutDescriptor.Layout = "" @@ -733,19 +465,19 @@ func (p *pageContentOutput) initRenderHooks() error { } getHookTemplate := func(f output.Format) (tpl.Template, bool) { - templ, found, err := p.p.s.Tmpl().LookupLayout(layoutDescriptor, f) + templ, found, err := pco.po.p.s.Tmpl().LookupLayout(layoutDescriptor, f) if err != nil { panic(err) } return templ, found } - templ, found1 := getHookTemplate(p.f) + templ, found1 := getHookTemplate(pco.po.f) - if p.p.reusePageOutputContent() { + if pco.po.p.reusePageOutputContent() { // Check if some of the other output formats would give a different template. - for _, f := range p.p.s.renderFormats { - if f.Name == p.f.Name { + for _, f := range pco.po.p.s.renderFormats { + if f.Name == pco.po.f.Name { continue } templ2, found2 := getHookTemplate(f) @@ -757,7 +489,7 @@ func (p *pageContentOutput) initRenderHooks() error { } if templ != templ2 { - p.p.pageOutputTemplateVariationsState.Store(2) + pco.po.p.pageOutputTemplateVariationsState.Add(1) break } } @@ -765,8 +497,8 @@ func (p *pageContentOutput) initRenderHooks() error { } if !found1 { if tp == hooks.CodeBlockRendererType { - // No user provided tempplate for code blocks, so we use the native Go code version -- which is also faster. - r := p.p.s.ContentSpec.Converters.GetHighlighter() + // No user provided template for code blocks, so we use the native Go version -- which is also faster. + r := pco.po.p.s.ContentSpec.Converters.GetHighlighter() renderCache[key] = r return r } @@ -774,8 +506,7 @@ func (p *pageContentOutput) initRenderHooks() error { } r := hookRendererTemplate{ - templateHandler: p.p.s.Tmpl(), - SearchProvider: templ.(identity.SearchProvider), + templateHandler: pco.po.p.s.Tmpl(), templ: templ, resolvePosition: resolvePosition, } @@ -787,31 +518,11 @@ func (p *pageContentOutput) initRenderHooks() error { return nil } -func (p *pageContentOutput) setAutoSummary() error { - if p.p.source.hasSummaryDivider || p.p.m.summary != "" { - return nil - } - - var summary string - var truncated bool - - if p.p.m.isCJKLanguage { - summary, truncated = p.p.s.ContentSpec.TruncateWordsByRune(p.plainWords) - } else { - summary, truncated = p.p.s.ContentSpec.TruncateWordsToWholeSentence(p.plain) - } - p.summary = template.HTML(summary) - - p.truncated = truncated - - return nil -} - -func (cp *pageContentOutput) getContentConverter() (converter.Converter, error) { - if err := cp.initRenderHooks(); err != nil { +func (pco *pageContentOutput) getContentConverter() (converter.Converter, error) { + if err := pco.initRenderHooks(); err != nil { return nil, err } - return cp.p.getContentConverter(), nil + return pco.po.p.getContentConverter(), nil } func (cp *pageContentOutput) ParseAndRenderContent(ctx context.Context, content []byte, renderTOC bool) (converter.ResultRender, error) { @@ -822,8 +533,8 @@ func (cp *pageContentOutput) ParseAndRenderContent(ctx context.Context, content return cp.renderContentWithConverter(ctx, c, content, renderTOC) } -func (cp *pageContentOutput) ParseContent(ctx context.Context, content []byte) (converter.ResultParse, bool, error) { - c, err := cp.getContentConverter() +func (pco *pageContentOutput) ParseContent(ctx context.Context, content []byte) (converter.ResultParse, bool, error) { + c, err := pco.getContentConverter() if err != nil { return nil, false, err } @@ -835,14 +546,14 @@ func (cp *pageContentOutput) ParseContent(ctx context.Context, content []byte) ( Ctx: ctx, Src: content, RenderTOC: true, - GetRenderer: cp.renderHooks.getRenderer, + GetRenderer: pco.renderHooks.getRenderer, } r, err := p.Parse(rctx) return r, ok, err - } -func (cp *pageContentOutput) RenderContent(ctx context.Context, content []byte, doc any) (converter.ResultRender, bool, error) { - c, err := cp.getContentConverter() + +func (pco *pageContentOutput) RenderContent(ctx context.Context, content []byte, doc any) (converter.ResultRender, bool, error) { + c, err := pco.getContentConverter() if err != nil { return nil, false, err } @@ -854,75 +565,23 @@ func (cp *pageContentOutput) RenderContent(ctx context.Context, content []byte, Ctx: ctx, Src: content, RenderTOC: true, - GetRenderer: cp.renderHooks.getRenderer, + GetRenderer: pco.renderHooks.getRenderer, } r, err := p.Render(rctx, doc) - if err == nil { - if ids, ok := r.(identity.IdentitiesProvider); ok { - for _, v := range ids.GetIdentities() { - cp.trackDependency(v) - } - } - } - return r, ok, err } -func (cp *pageContentOutput) renderContentWithConverter(ctx context.Context, c converter.Converter, content []byte, renderTOC bool) (converter.ResultRender, error) { +func (pco *pageContentOutput) renderContentWithConverter(ctx context.Context, c converter.Converter, content []byte, renderTOC bool) (converter.ResultRender, error) { r, err := c.Convert( converter.RenderContext{ Ctx: ctx, Src: content, RenderTOC: renderTOC, - GetRenderer: cp.renderHooks.getRenderer, + GetRenderer: pco.renderHooks.getRenderer, }) - - if err == nil { - if ids, ok := r.(identity.IdentitiesProvider); ok { - for _, v := range ids.GetIdentities() { - cp.trackDependency(v) - } - } - } - return r, err } -func (p *pageContentOutput) setWordCounts(isCJKLanguage bool) { - if isCJKLanguage { - p.wordCount = 0 - for _, word := range p.plainWords { - runeCount := utf8.RuneCountInString(word) - if len(word) == runeCount { - p.wordCount++ - } else { - p.wordCount += runeCount - } - } - } else { - p.wordCount = helpers.TotalWords(p.plain) - } - - // TODO(bep) is set in a test. Fix that. - if p.fuzzyWordCount == 0 { - p.fuzzyWordCount = (p.wordCount + 100) / 100 * 100 - } - - if isCJKLanguage { - p.readingTime = (p.wordCount + 500) / 501 - } else { - p.readingTime = (p.wordCount + 212) / 213 - } -} - -// A callback to signal that we have inserted a placeholder into the rendered -// content. This avoids doing extra replacement work. -func (p *pageContentOutput) enablePlaceholders() { - p.placeholdersEnabledInit.Do(func() { - p.placeholdersEnabled = true - }) -} - // these will be shifted out when rendering a given output format. type pagePerOutputProviders interface { targetPather diff --git a/hugolib/page__tree.go b/hugolib/page__tree.go index 8b02667f1..e54d596bc 100644 --- a/hugolib/page__tree.go +++ b/hugolib/page__tree.go @@ -14,169 +14,121 @@ package hugolib import ( - "path" + "context" + "fmt" "strings" + "github.com/gohugoio/hugo/common/paths" "github.com/gohugoio/hugo/common/types" + "github.com/gohugoio/hugo/hugolib/doctree" "github.com/gohugoio/hugo/resources/kinds" "github.com/gohugoio/hugo/resources/page" ) +// pageTree holds the treen navigational method for a Page. type pageTree struct { p *pageState } -func (pt pageTree) IsAncestor(other any) (bool, error) { - if pt.p == nil { - return false, nil - } - - tp, ok := other.(treeRefProvider) +func (pt pageTree) IsAncestor(other any) bool { + n, ok := other.(contentNodeI) if !ok { - return false, nil + return false } - ref1, ref2 := pt.p.getTreeRef(), tp.getTreeRef() - if ref1 != nil && ref2 != nil && ref1.key == ref2.key { - return false, nil + if n.Path() == pt.p.Path() { + return false } - if ref1 != nil && ref1.key == "/" { - return true, nil + return strings.HasPrefix(n.Path(), paths.AddTrailingSlash(pt.p.Path())) +} + +func (pt pageTree) IsDescendant(other any) bool { + n, ok := other.(contentNodeI) + if !ok { + return false } - if ref1 == nil || ref2 == nil { - if ref1 == nil { - // A 404 or other similar standalone page. - return false, nil - } - - return ref1.n.p.IsHome(), nil + if n.Path() == pt.p.Path() { + return false } - if strings.HasPrefix(ref2.key, ref1.key) { - return true, nil - } - - return strings.HasPrefix(ref2.key, ref1.key+cmBranchSeparator), nil + return strings.HasPrefix(pt.p.Path(), paths.AddTrailingSlash(n.Path())) } func (pt pageTree) CurrentSection() page.Page { - p := pt.p - - if p.IsHome() || p.IsSection() { - return p + if kinds.IsBranch(pt.p.Kind()) { + return pt.p } - return p.Parent() -} - -func (pt pageTree) IsDescendant(other any) (bool, error) { - if pt.p == nil { - return false, nil + dir := pt.p.m.pathInfo.Dir() + if dir == "/" { + return pt.p.s.home } - tp, ok := other.(treeRefProvider) - if !ok { - return false, nil + _, n := pt.p.s.pageMap.treePages.LongestPrefix(dir, true, func(n contentNodeI) bool { return n.isContentNodeBranch() }) + if n != nil { + return n.(page.Page) } - ref1, ref2 := pt.p.getTreeRef(), tp.getTreeRef() - if ref1 != nil && ref2 != nil && ref1.key == ref2.key { - return false, nil - } - - if ref2 != nil && ref2.key == "/" { - return true, nil - } - - if ref1 == nil || ref2 == nil { - if ref2 == nil { - // A 404 or other similar standalone page. - return false, nil - } - - return ref2.n.p.IsHome(), nil - } - - if strings.HasPrefix(ref1.key, ref2.key) { - return true, nil - } - - return strings.HasPrefix(ref1.key, ref2.key+cmBranchSeparator), nil + panic(fmt.Sprintf("CurrentSection not found for %q in lang %s", pt.p.Path(), pt.p.Lang())) } func (pt pageTree) FirstSection() page.Page { - ref := pt.p.getTreeRef() - if ref == nil { + s := pt.p.m.pathInfo.Dir() + if s == "/" { return pt.p.s.home } - key := ref.key - if !ref.isSection() { - key = path.Dir(key) - } - - _, b := ref.m.getFirstSection(key) - if b == nil { - return nil - } - return b.p -} - -func (pt pageTree) InSection(other any) (bool, error) { - if pt.p == nil || types.IsNil(other) { - return false, nil - } - - tp, ok := other.(treeRefProvider) - if !ok { - return false, nil - } - - ref1, ref2 := pt.p.getTreeRef(), tp.getTreeRef() - - if ref1 == nil || ref2 == nil { - if ref1 == nil { - // A 404 or other similar standalone page. - return false, nil + for { + k, n := pt.p.s.pageMap.treePages.LongestPrefix(s, true, func(n contentNodeI) bool { return n.isContentNodeBranch() }) + if n == nil { + return nil } - return ref1.n.p.IsHome(), nil + + // /blog + if strings.Count(k, "/") < 2 { + return n.(page.Page) + } + + if s == "" { + return nil + } + + s = paths.Dir(s) + } - - s1, _ := ref1.getCurrentSection() - s2, _ := ref2.getCurrentSection() - - return s1 == s2, nil } -func (pt pageTree) Page() page.Page { - return pt.p +func (pt pageTree) InSection(other any) bool { + if pt.p == nil || types.IsNil(other) { + return false + } + + p, ok := other.(page.Page) + if !ok { + return false + } + + return pt.CurrentSection() == p.CurrentSection() } func (pt pageTree) Parent() page.Page { - p := pt.p - - if p.parent != nil { - return p.parent - } - if pt.p.IsHome() { return nil } - tree := p.getTreeRef() + dir := pt.p.m.pathInfo.ContainerDir() - if tree == nil || pt.p.Kind() == kinds.KindTaxonomy { + if dir == "" { return pt.p.s.home } - _, b := tree.getSection() - if b == nil { - return nil + _, n := pt.p.s.pageMap.treePages.LongestPrefix(dir, true, nil) + if n != nil { + return n.(page.Page) } - - return b.p + return nil } func (pt pageTree) Ancestors() page.Pages { @@ -190,9 +142,57 @@ func (pt pageTree) Ancestors() page.Pages { } func (pt pageTree) Sections() page.Pages { - if pt.p.bucket == nil { - return nil + var ( + pages page.Pages + currentBranchPrefix string + s = pt.p.Path() + prefix = paths.AddTrailingSlash(s) + tree = pt.p.s.pageMap.treePages + ) + + w := &doctree.NodeShiftTreeWalker[contentNodeI]{ + Tree: tree, + Prefix: prefix, + } + w.Handle = func(ss string, n contentNodeI, match doctree.DimensionFlag) (bool, error) { + if !n.isContentNodeBranch() { + return false, nil + } + if currentBranchPrefix == "" || !strings.HasPrefix(ss, currentBranchPrefix) { + if p, ok := n.(*pageState); ok && p.IsSection() && p.m.shouldList(false) && p.Parent() == pt.p { + pages = append(pages, p) + } else { + w.SkipPrefix(ss + "/") + } + } + currentBranchPrefix = ss + "/" + return false, nil } - return pt.p.bucket.getSections() + if err := w.Walk(context.Background()); err != nil { + panic(err) + } + + page.SortByDefault(pages) + return pages +} + +func (pt pageTree) Page() page.Page { + return pt.p +} + +func (p pageTree) SectionsEntries() []string { + sp := p.SectionsPath() + if sp == "/" { + return nil + } + entries := strings.Split(sp[1:], "/") + if len(entries) == 0 { + return nil + } + return entries +} + +func (p pageTree) SectionsPath() string { + return p.CurrentSection().Path() } diff --git a/hugolib/page_test.go b/hugolib/page_test.go index ca6164d2c..f5ff95f3c 100644 --- a/hugolib/page_test.go +++ b/hugolib/page_test.go @@ -447,6 +447,44 @@ func TestPageWithDelimiterForMarkdownThatCrossesBorder(t *testing.T) { } } +func TestPageDatesTerms(t *testing.T) { + t.Parallel() + + files := ` +-- hugo.toml -- +baseURL = "http://example.com/" +-- content/p1.md -- +--- +title: p1 +date: 2022-01-15 +lastMod: 2022-01-16 +tags: ["a", "b"] +categories: ["c", "d"] +--- +p1 +-- content/p2.md -- +--- +title: p2 +date: 2017-01-16 +lastMod: 2017-01-17 +tags: ["a", "c"] +categories: ["c", "e"] +--- +p2 +-- layouts/_default/list.html -- +{{ .Title }}|Date: {{ .Date.Format "2006-01-02" }}|Lastmod: {{ .Lastmod.Format "2006-01-02" }}| + +` + b := Test(t, files) + + b.AssertFileContent("public/categories/index.html", "Categories|Date: 2022-01-15|Lastmod: 2022-01-16|") + b.AssertFileContent("public/categories/c/index.html", "C|Date: 2022-01-15|Lastmod: 2022-01-16|") + b.AssertFileContent("public/categories/e/index.html", "E|Date: 2017-01-16|Lastmod: 2017-01-17|") + b.AssertFileContent("public/tags/index.html", "Tags|Date: 2022-01-15|Lastmod: 2022-01-16|") + b.AssertFileContent("public/tags/a/index.html", "A|Date: 2022-01-15|Lastmod: 2022-01-16|") + b.AssertFileContent("public/tags/c/index.html", "C|Date: 2017-01-16|Lastmod: 2017-01-17|") +} + func TestPageDatesAllKinds(t *testing.T) { t.Parallel() @@ -469,10 +507,12 @@ categories: ["cool stuff"] s := b.H.Sites[0] checkDate := func(t time.Time, msg string) { + b.Helper() b.Assert(t.Year(), qt.Equals, 2017, qt.Commentf(msg)) } checkDated := func(d resource.Dated, msg string) { + b.Helper() checkDate(d.Date(), "date: "+msg) checkDate(d.Lastmod(), "lastmod: "+msg) } @@ -533,10 +573,10 @@ date: 2012-01-12 b.Assert(p.Lastmod().Year(), qt.Equals, year) } - checkDate(s.getPage("/"), 2018) - checkDate(s.getPage("/no-index"), 2017) - b.Assert(s.getPage("/with-index-no-date").Date().IsZero(), qt.Equals, true) - checkDate(s.getPage("/with-index-date"), 2018) + checkDate(s.getPageOldVersion("/"), 2018) + checkDate(s.getPageOldVersion("/no-index"), 2017) + b.Assert(s.getPageOldVersion("/with-index-no-date").Date().IsZero(), qt.Equals, true) + checkDate(s.getPageOldVersion("/with-index-date"), 2018) b.Assert(s.Site().LastChange().Year(), qt.Equals, 2018) } @@ -713,6 +753,91 @@ func TestPageWithMoreTag(t *testing.T) { testAllMarkdownEnginesForPages(t, assertFunc, nil, simplePageWithSummaryDelimiterSameLine) } +func TestSummaryInFrontMatter(t *testing.T) { + t.Parallel() + Test(t, ` +-- hugo.toml -- +-- content/simple.md -- +--- +title: Simple +summary: "Front **matter** summary" +--- +Simple Page +-- layouts/_default/single.html -- +Summary: {{ .Summary }}|Truncated: {{ .Truncated }}| + +`).AssertFileContent("public/simple/index.html", "Summary: Front matter summary|", "Truncated: false") +} + +func TestSummaryManualSplit(t *testing.T) { + t.Parallel() + Test(t, ` +-- hugo.toml -- +-- content/simple.md -- +--- +title: Simple +--- +This is **summary**. + +This is **content**. +-- layouts/_default/single.html -- +Summary: {{ .Summary }}|Truncated: {{ .Truncated }}| +Content: {{ .Content }}| + +`).AssertFileContent("public/simple/index.html", + "Summary:

This is summary.

|", + "Truncated: true|", + "Content:

This is summary.

\n

This is content.

|", + ) +} + +func TestSummaryManualSplitHTML(t *testing.T) { + t.Parallel() + Test(t, ` +-- hugo.toml -- +-- content/simple.html -- +--- +title: Simple +--- +
+This is summary. +
+ +
+This is content. +
+-- layouts/_default/single.html -- +Summary: {{ .Summary }}|Truncated: {{ .Truncated }}| +Content: {{ .Content }}| + +`).AssertFileContent("public/simple/index.html", "Summary:
\nThis is summary.\n
\n|Truncated: true|\nContent: \n\n
\nThis is content.\n
|") +} + +func TestSummaryAuto(t *testing.T) { + t.Parallel() + Test(t, ` +-- hugo.toml -- +summaryLength = 10 +-- content/simple.md -- +--- +title: Simple +--- +This is **summary**. +This is **more summary**. +This is *even more summary**. +This is **more summary**. + +This is **content**. +-- layouts/_default/single.html -- +Summary: {{ .Summary }}|Truncated: {{ .Truncated }}| +Content: {{ .Content }}| + +`).AssertFileContent("public/simple/index.html", + "Summary: This is summary. This is more summary. This is even more summary*.|", + "Truncated: true|", + "Content:

This is summary.") +} + // #2973 func TestSummaryWithHTMLTagsOnNextLine(t *testing.T) { assertFunc := func(t *testing.T, ext string, pages page.Pages) { @@ -1190,26 +1315,89 @@ func TestPagePaths(t *testing.T) { } func TestTranslationKey(t *testing.T) { - t.Parallel() - c := qt.New(t) - cfg, fs := newTestCfg() - configs, err := loadTestConfigFromProvider(cfg) - c.Assert(err, qt.IsNil) + files := ` +-- hugo.toml -- +disableKinds = ["taxonomy", "term"] +defaultContentLanguage = "en" +defaultContentLanguageInSubdir = true +[languages] +[languages.en] +weight = 1 +[languages.nn] +weight = 2 +-- content/sect/p1.en.md -- +--- +translationkey: "adfasdf" +title: "p1 en" +--- +-- content/sect/p1.nn.md -- +--- +translationkey: "adfasdf" +title: "p1 nn" +--- +-- layouts/_default/single.html -- +Title: {{ .Title }}|TranslationKey: {{ .TranslationKey }}| +Translations: {{ range .Translations }}{{ .Language.Lang }}|{{ end }}| +AllTranslations: {{ range .AllTranslations }}{{ .Language.Lang }}|{{ end }}| - writeSource(t, fs, filepath.Join("content", filepath.FromSlash("sect/simple.no.md")), "---\ntitle: \"A1\"\ntranslationKey: \"k1\"\n---\nContent\n") - writeSource(t, fs, filepath.Join("content", filepath.FromSlash("sect/simple.en.md")), "---\ntitle: \"A2\"\n---\nContent\n") +` + b := Test(t, files) + b.AssertFileContent("public/en/sect/p1/index.html", + "TranslationKey: adfasdf|", + "AllTranslations: en|nn||", + "Translations: nn||", + ) - s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Configs: configs}, BuildCfg{SkipRender: true}) + b.AssertFileContent("public/nn/sect/p1/index.html", + "TranslationKey: adfasdf|", + "Translations: en||", + "AllTranslations: en|nn||", + ) +} - c.Assert(len(s.RegularPages()), qt.Equals, 2) +// Issue #11540. +func TestTranslationKeyResourceSharing(t *testing.T) { + files := ` +-- hugo.toml -- +disableKinds = ["taxonomy", "term"] +defaultContentLanguage = "en" +defaultContentLanguageInSubdir = true +[languages] +[languages.en] +weight = 1 +[languages.nn] +weight = 2 +-- content/sect/mybundle_en/index.en.md -- +--- +translationkey: "adfasdf" +title: "mybundle en" +--- +-- content/sect/mybundle_en/f1.txt -- +f1.en +-- content/sect/mybundle_en/f2.txt -- +f2.en +-- content/sect/mybundle_nn/index.nn.md -- +--- +translationkey: "adfasdf" +title: "mybundle nn" +--- +-- content/sect/mybundle_nn/f2.nn.txt -- +f2.nn +-- layouts/_default/single.html -- +Title: {{ .Title }}|TranslationKey: {{ .TranslationKey }}| +Resources: {{ range .Resources }}{{ .RelPermalink }}|{{ .Content }}|{{ end }}| - home := s.Home() - c.Assert(home, qt.Not(qt.IsNil)) - c.Assert(home.TranslationKey(), qt.Equals, "home") - c.Assert(s.RegularPages()[0].TranslationKey(), qt.Equals, "page/k1") - p2 := s.RegularPages()[1] +` + b := Test(t, files) + b.AssertFileContent("public/en/sect/mybundle_en/index.html", + "TranslationKey: adfasdf|", + "Resources: /en/sect/mybundle_en/f1.txt|f1.en|/en/sect/mybundle_en/f2.txt|f2.en||", + ) - c.Assert(p2.TranslationKey(), qt.Equals, "page/sect/simple") + b.AssertFileContent("public/nn/sect/mybundle_nn/index.html", + "TranslationKey: adfasdf|", + "Title: mybundle nn|TranslationKey: adfasdf|\nResources: /en/sect/mybundle_en/f1.txt|f1.en|/nn/sect/mybundle_nn/f2.nn.txt|f2.nn||", + ) } func TestChompBOM(t *testing.T) { @@ -1383,12 +1571,6 @@ Content:{{ .Content }} ) } -// https://github.com/gohugoio/hugo/issues/5781 -func TestPageWithZeroFile(t *testing.T) { - newTestSitesBuilder(t).WithLogger(loggers.NewDefault()).WithSimpleConfigFile(). - WithTemplatesAdded("index.html", "{{ .File.Filename }}{{ with .File }}{{ .Dir }}{{ end }}").Build(BuildCfg{}) -} - func TestHomePageWithNoTitle(t *testing.T) { b := newTestSitesBuilder(t).WithConfigFile("toml", ` title = "Site Title" @@ -1499,93 +1681,45 @@ func TestShouldBuildWithClock(t *testing.T) { } } -// "dot" in path: #1885 and #2110 -// disablePathToLower regression: #3374 -func TestPathIssues(t *testing.T) { - for _, disablePathToLower := range []bool{false, true} { - for _, uglyURLs := range []bool{false, true} { - disablePathToLower := disablePathToLower - uglyURLs := uglyURLs - t.Run(fmt.Sprintf("disablePathToLower=%t,uglyURLs=%t", disablePathToLower, uglyURLs), func(t *testing.T) { - t.Parallel() - cfg, fs := newTestCfg() - c := qt.New(t) - - cfg.Set("permalinks", map[string]string{ - "post": ":section/:title", - }) - - cfg.Set("uglyURLs", uglyURLs) - cfg.Set("disablePathToLower", disablePathToLower) - cfg.Set("paginate", 1) - th, configs := newTestHelperFromProvider(cfg, fs, t) - - writeSource(t, fs, filepath.Join("layouts", "_default", "single.html"), "{{.Content}}") - writeSource(t, fs, filepath.Join("layouts", "_default", "list.html"), - "P{{.Paginator.PageNumber}}|URL: {{.Paginator.URL}}|{{ if .Paginator.HasNext }}Next: {{.Paginator.Next.URL }}{{ end }}") - - for i := 0; i < 3; i++ { - writeSource(t, fs, filepath.Join("content", "post", fmt.Sprintf("doc%d.md", i)), - fmt.Sprintf(`--- -title: "test%d.dot" -tags: -- ".net" +// See https://github.com/gohugoio/hugo/issues/9171 +// We redefined disablePathToLower in v0.121.0. +func TestPagePathDisablePathToLower(t *testing.T) { + files := ` +-- hugo.toml -- +baseURL = "http://example.com" +disablePathToLower = true +[permalinks] +sect2 = "/:section/:filename/" +sect3 = "/:section/:title/" +-- content/sect/p1.md -- --- -# doc1 -*some content*`, i)) - } - - writeSource(t, fs, filepath.Join("content", "Blog", "Blog1.md"), - fmt.Sprintf(`--- -title: "testBlog" -tags: -- "Blog" +title: "Page1" --- -# doc1 -*some blog content*`)) - - s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Configs: configs}, BuildCfg{}) - - c.Assert(len(s.RegularPages()), qt.Equals, 4) - - pathFunc := func(s string) string { - if uglyURLs { - return strings.Replace(s, "/index.html", ".html", 1) - } - return s - } - - blog := "blog" - - if disablePathToLower { - blog = "Blog" - } - - th.assertFileContent(pathFunc("public/"+blog+"/"+blog+"1/index.html"), "some blog content") - - th.assertFileContent(pathFunc("public/post/test0.dot/index.html"), "some content") - - if uglyURLs { - th.assertFileContent("public/post/page/1.html", `canonical" href="/post.html"`) - th.assertFileContent("public/post.html", `P1|URL: /post.html|Next: /post/page/2.html`) - th.assertFileContent("public/post/page/2.html", `P2|URL: /post/page/2.html|Next: /post/page/3.html`) - } else { - th.assertFileContent("public/post/page/1/index.html", `canonical" href="/post/"`) - th.assertFileContent("public/post/index.html", `P1|URL: /post/|Next: /post/page/2/`) - th.assertFileContent("public/post/page/2/index.html", `P2|URL: /post/page/2/|Next: /post/page/3/`) - th.assertFileContent("public/tags/.net/index.html", `P1|URL: /tags/.net/|Next: /tags/.net/page/2/`) - - } - - p := s.RegularPages()[0] - if uglyURLs { - c.Assert(p.RelPermalink(), qt.Equals, "/post/test0.dot.html") - } else { - c.Assert(p.RelPermalink(), qt.Equals, "/post/test0.dot/") - } - }) - } - } +p1. +-- content/sect/p2.md -- +--- +title: "Page2" +slug: "PaGe2" +--- +p2. +-- content/sect2/PaGe3.md -- +--- +title: "Page3" +--- +-- content/seCt3/p4.md -- +--- +title: "Pag.E4" +slug: "PaGe4" +--- +p4. +-- layouts/_default/single.html -- +Single: {{ .Title}}|{{ .RelPermalink }}|{{ .Path }}| +` + b := Test(t, files) + b.AssertFileContent("public/sect/p1/index.html", "Single: Page1|/sect/p1/|/sect/p1") + b.AssertFileContent("public/sect/PaGe2/index.html", "Single: Page2|/sect/PaGe2/|/sect/p2") + b.AssertFileContent("public/sect2/page3/index.html", "Single: Page3|/sect2/page3/|/sect2/page3|") + b.AssertFileContent("public/sect3/Pag.E4/index.html", "Single: Pag.E4|/sect3/Pag.E4/|/sect3/p4|") } // https://github.com/gohugoio/hugo/issues/4675 @@ -1711,50 +1845,6 @@ title: Scratch Me! b.AssertFileContent("public/scratchme/index.html", "C: cv") } -func TestScratchRebuild(t *testing.T) { - t.Parallel() - - files := ` --- config.toml -- --- content/p1.md -- ---- -title: "p1" ---- -{{< scratchme >}} --- layouts/shortcodes/foo.html -- -notused --- layouts/shortcodes/scratchme.html -- -{{ .Page.Scratch.Set "scratch" "foo" }} -{{ .Page.Store.Set "scratch" "bar" }} --- layouts/_default/single.html -- -{{ .Content }} -Scratch: {{ .Scratch.Get "scratch" }}| -Store: {{ .Store.Get "scratch" }}| -` - - b := NewIntegrationTestBuilder( - IntegrationTestConfig{ - T: t, - TxtarString: files, - Running: true, - }, - ).Build() - - b.AssertFileContent("public/p1/index.html", ` -Scratch: foo| -Store: bar| - `) - - b.EditFiles("layouts/shortcodes/foo.html", "edit") - - b.Build() - - b.AssertFileContent("public/p1/index.html", ` -Scratch: | -Store: bar| - `) -} - func TestPageParam(t *testing.T) { t.Parallel() @@ -1879,27 +1969,6 @@ Link with URL as text `) } -func TestPageCaseIssues(t *testing.T) { - t.Parallel() - - b := newTestSitesBuilder(t) - b.WithConfigFile("toml", `defaultContentLanguage = "no" -[languages] -[languages.NO] -title = "Norsk" -`) - b.WithContent("a/B/C/Page1.md", "---\ntitle: Page1\n---") - b.WithTemplates("index.html", ` -{{ $p1 := site.GetPage "a/B/C/Page1" }} -Lang: {{ .Lang }} -Page1: {{ $p1.Path }} -`) - - b.Build(BuildCfg{}) - - b.AssertFileContent("public/index.html", "Lang: no", filepath.FromSlash("Page1: a/B/C/Page1.md")) -} - func TestPageHashString(t *testing.T) { files := ` -- config.toml -- @@ -1930,6 +1999,8 @@ title: "p2" p2 := b.H.Sites[0].RegularPages()[1] sites := p1.Sites() + b.Assert(p1, qt.Not(qt.Equals), p2) + b.Assert(identity.HashString(p1), qt.Not(qt.Equals), identity.HashString(p2)) b.Assert(identity.HashString(sites[0]), qt.Not(qt.Equals), identity.HashString(sites[1])) } diff --git a/hugolib/page_unwrap.go b/hugolib/page_unwrap.go index c3e1ce8dd..c22ff2174 100644 --- a/hugolib/page_unwrap.go +++ b/hugolib/page_unwrap.go @@ -16,6 +16,7 @@ package hugolib import ( "fmt" + "github.com/gohugoio/hugo/common/types" "github.com/gohugoio/hugo/resources/page" ) @@ -31,6 +32,8 @@ func unwrapPage(in any) (page.Page, error) { return v, nil case pageWrapper: return v.page(), nil + case types.Unwrapper: + return unwrapPage(v.Unwrapv()) case page.Page: return v, nil case nil: diff --git a/hugolib/pagebundler_test.go b/hugolib/pagebundler_test.go index 64d329832..123d752e0 100644 --- a/hugolib/pagebundler_test.go +++ b/hugolib/pagebundler_test.go @@ -15,21 +15,14 @@ package hugolib import ( "fmt" - "io" "os" - "path" "path/filepath" - "regexp" + "testing" "github.com/gohugoio/hugo/common/loggers" - "strings" - "testing" - "github.com/gohugoio/hugo/config" - "github.com/gohugoio/hugo/hugofs/files" - "github.com/gohugoio/hugo/helpers" "github.com/gohugoio/hugo/hugofs" @@ -44,454 +37,180 @@ import ( qt "github.com/frankban/quicktest" ) -func TestPageBundlerSiteRegular(t *testing.T) { - c := qt.New(t) - baseBaseURL := "https://example.com" +func TestPageBundlerBundleInRoot(t *testing.T) { + t.Parallel() + files := ` +-- hugo.toml -- +baseURL = "https://example.com" +disableKinds = ["taxonomy", "term"] +-- content/root/index.md -- +--- +title: "Root" +--- +-- layouts/_default/single.html -- +Basic: {{ .Title }}|{{ .Kind }}|{{ .BundleType }}|{{ .RelPermalink }}| +Tree: Section: {{ .Section }}|CurrentSection: {{ .CurrentSection.RelPermalink }}|Parent: {{ .Parent.RelPermalink }}|FirstSection: {{ .FirstSection.RelPermalink }} +` + b := Test(t, files) - for _, baseURLPath := range []string{"", "/hugo"} { - for _, canonify := range []bool{false, true} { - for _, ugly := range []bool{false, true} { - baseURLPathId := baseURLPath - if baseURLPathId == "" { - baseURLPathId = "NONE" - } - ugly := ugly - canonify := canonify - c.Run(fmt.Sprintf("ugly=%t,canonify=%t,path=%s", ugly, canonify, baseURLPathId), - func(c *qt.C) { - c.Parallel() - baseURL := baseBaseURL + baseURLPath - relURLBase := baseURLPath - if canonify { - relURLBase = "" - } - fs, cfg := newTestBundleSources(c) - cfg.Set("baseURL", baseURL) - cfg.Set("canonifyURLs", canonify) - cfg.Set("defaultContentLanguageInSubdir", false) - - cfg.Set("permalinks", map[string]string{ - "a": ":sections/:filename", - "b": ":year/:slug/", - "c": ":sections/:slug", - "/": ":filename/", - }) - - cfg.Set("outputFormats", map[string]any{ - "CUSTOMO": map[string]any{ - "mediaType": "text/html", - "baseName": "cindex", - "path": "cpath", - "permalinkable": true, - }, - }) - - cfg.Set("outputs", map[string]any{ - "home": []string{"HTML", "CUSTOMO"}, - "page": []string{"HTML", "CUSTOMO"}, - "section": []string{"HTML", "CUSTOMO"}, - }) - - cfg.Set("uglyURLs", ugly) - configs, err := loadTestConfigFromProvider(cfg) - - c.Assert(err, qt.IsNil) - - b := newTestSitesBuilderFromDepsCfg(c, deps.DepsCfg{Fs: fs, Configs: configs}).WithNothingAdded() - - b.Build(BuildCfg{}) - - s := b.H.Sites[0] - - c.Assert(len(s.RegularPages()), qt.Equals, 8) - - singlePage := s.getPage(kinds.KindPage, "a/1.md") - c.Assert(singlePage.BundleType(), qt.Equals, files.ContentClass("")) - - c.Assert(singlePage, qt.Not(qt.IsNil)) - c.Assert(s.getPage("page", "a/1"), qt.Equals, singlePage) - c.Assert(s.getPage("page", "1"), qt.Equals, singlePage) - - c.Assert(content(singlePage), qt.Contains, "TheContent") - - relFilename := func(basePath, outBase string) (string, string) { - rel := basePath - if ugly { - rel = strings.TrimSuffix(basePath, "/") + ".html" - } - - var filename string - if !ugly { - filename = path.Join(basePath, outBase) - } else { - filename = rel - } - - rel = fmt.Sprintf("%s%s", relURLBase, rel) - - return rel, filename - } - - // Check both output formats - rel, filename := relFilename("/a/1/", "index.html") - b.AssertFileContent(filepath.Join("public", filename), - "TheContent", - "Single RelPermalink: "+rel, - ) - - rel, filename = relFilename("/cpath/a/1/", "cindex.html") - - b.AssertFileContent(filepath.Join("public", filename), - "TheContent", - "Single RelPermalink: "+rel, - ) - - b.AssertFileContent(filepath.FromSlash("public/images/hugo-logo.png"), "content") - - // This should be just copied to destination. - b.AssertFileContent(filepath.FromSlash("public/assets/pic1.png"), "content") - - leafBundle1 := s.getPage(kinds.KindPage, "b/my-bundle/index.md") - c.Assert(leafBundle1, qt.Not(qt.IsNil)) - c.Assert(leafBundle1.BundleType(), qt.Equals, files.ContentClassLeaf) - c.Assert(leafBundle1.Section(), qt.Equals, "b") - sectionB := s.getPage(kinds.KindSection, "b") - c.Assert(sectionB, qt.Not(qt.IsNil)) - home := s.Home() - c.Assert(home.BundleType(), qt.Equals, files.ContentClassBranch) - - // This is a root bundle and should live in the "home section" - // See https://github.com/gohugoio/hugo/issues/4332 - rootBundle := s.getPage(kinds.KindPage, "root") - c.Assert(rootBundle, qt.Not(qt.IsNil)) - c.Assert(rootBundle.Parent().IsHome(), qt.Equals, true) - if !ugly { - b.AssertFileContent(filepath.FromSlash("public/root/index.html"), "Single RelPermalink: "+relURLBase+"/root/") - b.AssertFileContent(filepath.FromSlash("public/cpath/root/cindex.html"), "Single RelPermalink: "+relURLBase+"/cpath/root/") - } - - leafBundle2 := s.getPage(kinds.KindPage, "a/b/index.md") - c.Assert(leafBundle2, qt.Not(qt.IsNil)) - unicodeBundle := s.getPage(kinds.KindPage, "c/bundle/index.md") - c.Assert(unicodeBundle, qt.Not(qt.IsNil)) - - pageResources := leafBundle1.Resources().ByType(pageResourceType) - c.Assert(len(pageResources), qt.Equals, 2) - firstPage := pageResources[0].(page.Page) - secondPage := pageResources[1].(page.Page) - - c.Assert(firstPage.File().Filename(), qt.Equals, filepath.FromSlash("/work/base/b/my-bundle/1.md")) - c.Assert(content(firstPage), qt.Contains, "TheContent") - c.Assert(len(leafBundle1.Resources()), qt.Equals, 6) - - // Verify shortcode in bundled page - c.Assert(content(secondPage), qt.Contains, filepath.FromSlash("MyShort in b/my-bundle/2.md")) - - // https://github.com/gohugoio/hugo/issues/4582 - c.Assert(firstPage.Parent(), qt.Equals, leafBundle1) - c.Assert(secondPage.Parent(), qt.Equals, leafBundle1) - - c.Assert(pageResources.GetMatch("1*"), qt.Equals, firstPage) - c.Assert(pageResources.GetMatch("2*"), qt.Equals, secondPage) - c.Assert(pageResources.GetMatch("doesnotexist*"), qt.IsNil) - - imageResources := leafBundle1.Resources().ByType("image") - c.Assert(len(imageResources), qt.Equals, 3) - - c.Assert(leafBundle1.OutputFormats().Get("CUSTOMO"), qt.Not(qt.IsNil)) - - relPermalinker := func(s string) string { - return fmt.Sprintf(s, relURLBase) - } - - permalinker := func(s string) string { - return fmt.Sprintf(s, baseURL) - } - - if ugly { - b.AssertFileContent("public/2017/pageslug.html", - relPermalinker("Single RelPermalink: %s/2017/pageslug.html"), - permalinker("Single Permalink: %s/2017/pageslug.html"), - relPermalinker("Sunset RelPermalink: %s/2017/pageslug/sunset1.jpg"), - permalinker("Sunset Permalink: %s/2017/pageslug/sunset1.jpg")) - } else { - b.AssertFileContent("public/2017/pageslug/index.html", - relPermalinker("Sunset RelPermalink: %s/2017/pageslug/sunset1.jpg"), - permalinker("Sunset Permalink: %s/2017/pageslug/sunset1.jpg")) - - b.AssertFileContent("public/cpath/2017/pageslug/cindex.html", - relPermalinker("Single RelPermalink: %s/cpath/2017/pageslug/"), - relPermalinker("Short Sunset RelPermalink: %s/cpath/2017/pageslug/sunset2.jpg"), - relPermalinker("Sunset RelPermalink: %s/cpath/2017/pageslug/sunset1.jpg"), - permalinker("Sunset Permalink: %s/cpath/2017/pageslug/sunset1.jpg"), - ) - } - - b.AssertFileContent(filepath.FromSlash("public/2017/pageslug/c/logo.png"), "content") - b.AssertFileContent(filepath.FromSlash("public/cpath/2017/pageslug/c/logo.png"), "content") - c.Assert(b.CheckExists("public/cpath/cpath/2017/pageslug/c/logo.png"), qt.Equals, false) - - // Custom media type defined in site config. - c.Assert(len(leafBundle1.Resources().ByType("bepsays")), qt.Equals, 1) - - if ugly { - b.AssertFileContent(filepath.FromSlash("public/2017/pageslug.html"), - "TheContent", - relPermalinker("Sunset RelPermalink: %s/2017/pageslug/sunset1.jpg"), - permalinker("Sunset Permalink: %s/2017/pageslug/sunset1.jpg"), - "Thumb Width: 123", - "Thumb Name: my-sunset-1", - relPermalinker("Short Sunset RelPermalink: %s/2017/pageslug/sunset2.jpg"), - "Short Thumb Width: 56", - "1: Image Title: Sunset Galore 1", - "1: Image Params: map[myparam:My Sunny Param]", - relPermalinker("1: Image RelPermalink: %s/2017/pageslug/sunset1.jpg"), - "2: Image Title: Sunset Galore 2", - "2: Image Params: map[myparam:My Sunny Param]", - "1: Image myParam: Lower: My Sunny Param Caps: My Sunny Param", - "0: Page Title: Bundle Galore", - ) - - // https://github.com/gohugoio/hugo/issues/5882 - b.AssertFileContent( - filepath.FromSlash("public/2017/pageslug.html"), "0: Page RelPermalink: |") - - b.AssertFileContent(filepath.FromSlash("public/cpath/2017/pageslug.html"), "TheContent") - - // 은행 - b.AssertFileContent(filepath.FromSlash("public/c/은행/logo-은행.png"), "은행 PNG") - - } else { - b.AssertFileContent(filepath.FromSlash("public/2017/pageslug/index.html"), "TheContent") - b.AssertFileContent(filepath.FromSlash("public/cpath/2017/pageslug/cindex.html"), "TheContent") - b.AssertFileContent(filepath.FromSlash("public/2017/pageslug/index.html"), "Single Title") - b.AssertFileContent(filepath.FromSlash("public/root/index.html"), "Single Title") - - } - }) - } - } - } + b.AssertFileContent("public/root/index.html", + "Basic: Root|page|leaf|/root/|", + "Tree: Section: |CurrentSection: /|Parent: /|FirstSection: /", + ) } -func TestPageBundlerSiteMultilingual(t *testing.T) { +func TestPageBundlerShortcodeInBundledPage(t *testing.T) { + t.Parallel() + files := ` +-- hugo.toml -- +baseURL = "https://example.com" +disableKinds = ["taxonomy", "term"] +-- content/section/mybundle/index.md -- +--- +title: "Mybundle" +--- +-- content/section/mybundle/p1.md -- +--- +title: "P1" +--- + +P1 content. + +{{< myShort >}} + +-- layouts/_default/single.html -- +Bundled page: {{ .RelPermalink}}|{{ with .Resources.Get "p1.md" }}Title: {{ .Title }}|Content: {{ .Content }}{{ end }}| +-- layouts/shortcodes/myShort.html -- +MyShort. + +` + b := Test(t, files) + + b.AssertFileContent("public/section/mybundle/index.html", + "Bundled page: /section/mybundle/|Title: P1|Content:

P1 content.

\nMyShort.", + ) +} + +func TestPageBundlerResourceMultipleOutputFormatsWithDifferentPaths(t *testing.T) { + t.Parallel() + files := ` +-- hugo.toml -- +baseURL = "https://example.com" +disableKinds = ["taxonomy", "term"] +[outputformats] +[outputformats.cpath] +mediaType = "text/html" +path = "cpath" +-- content/section/mybundle/index.md -- +--- +title: "My Bundle" +outputs: ["html", "cpath"] +--- +-- content/section/mybundle/hello.txt -- +Hello. +-- content/section/mybundle/p1.md -- +--- +title: "P1" +--- +P1. + +{{< hello >}} + +-- layouts/shortcodes/hello.html -- +Hello HTML. +-- layouts/_default/single.html -- +Basic: {{ .Title }}|{{ .Kind }}|{{ .BundleType }}|{{ .RelPermalink }}| +Resources: {{ range .Resources }}RelPermalink: {{ .RelPermalink }}|Content: {{ .Content }}|{{ end }}| +-- layouts/shortcodes/hello.cpath -- +Hello CPATH. +-- layouts/_default/single.cpath -- +Basic: {{ .Title }}|{{ .Kind }}|{{ .BundleType }}|{{ .RelPermalink }}| +Resources: {{ range .Resources }}RelPermalink: {{ .RelPermalink }}|Content: {{ .Content }}|{{ end }}| +` + + b := Test(t, files) + + b.AssertFileContent("public/section/mybundle/index.html", + "Basic: My Bundle|page|leaf|/section/mybundle/|", + "Resources: RelPermalink: |Content:

P1.

\nHello HTML.\n|RelPermalink: /section/mybundle/hello.txt|Content: Hello.||", + ) + + b.AssertFileContent("public/cpath/section/mybundle/index.html", "Basic: My Bundle|page|leaf|/section/mybundle/|\nResources: RelPermalink: |Content:

P1.

\nHello CPATH.\n|RelPermalink: /section/mybundle/hello.txt|Content: Hello.||") +} + +func TestPageBundlerMultilingualTextResource(t *testing.T) { t.Parallel() - for _, ugly := range []bool{false, true} { - ugly := ugly - t.Run(fmt.Sprintf("ugly=%t", ugly), - func(t *testing.T) { - t.Parallel() - c := qt.New(t) - fs, cfg := newTestBundleSourcesMultilingual(t) - cfg.Set("uglyURLs", ugly) - configs, err := loadTestConfigFromProvider(cfg) - c.Assert(err, qt.IsNil) + files := ` +-- hugo.toml -- +baseURL = "https://example.com" +disableKinds = ["taxonomy", "term"] +defaultContentLanguage = "en" +defaultContentLanguageInSubdir = true +[languages] +[languages.en] +weight = 1 +[languages.nn] +weight = 2 +-- content/mybundle/index.md -- +--- +title: "My Bundle" +--- +-- content/mybundle/index.nn.md -- +--- +title: "My Bundle NN" +--- +-- content/mybundle/f1.txt -- +F1 +-- content/mybundle/f2.txt -- +F2 +-- content/mybundle/f2.nn.txt -- +F2 nn. +-- layouts/_default/single.html -- +{{ .Title }}|{{ .RelPermalink }}|{{ .Lang }}| +Resources: {{ range .Resources }}RelPermalink: {{ .RelPermalink }}|Content: {{ .Content }}|{{ end }}| - b := newTestSitesBuilderFromDepsCfg(t, deps.DepsCfg{Fs: fs, Configs: configs}).WithNothingAdded() - b.Build(BuildCfg{}) +` + b := Test(t, files) - sites := b.H - - c.Assert(len(sites.Sites), qt.Equals, 2) - - s := sites.Sites[0] - - c.Assert(len(s.RegularPages()), qt.Equals, 8) - c.Assert(len(s.Pages()), qt.Equals, 16) - // dumpPages(s.AllPages()...) - - c.Assert(len(s.AllPages()), qt.Equals, 31) - - bundleWithSubPath := s.getPage(kinds.KindPage, "lb/index") - c.Assert(bundleWithSubPath, qt.Not(qt.IsNil)) - - // See https://github.com/gohugoio/hugo/issues/4312 - // Before that issue: - // A bundle in a/b/index.en.md - // a/b/index.en.md => OK - // a/b/index => OK - // index.en.md => ambiguous, but OK. - // With bundles, the file name has little meaning, the folder it lives in does. So this should also work: - // a/b - // and probably also just b (aka "my-bundle") - // These may also be translated, so we also need to test that. - // "bf", "my-bf-bundle", "index.md + nn - bfBundle := s.getPage(kinds.KindPage, "bf/my-bf-bundle/index") - c.Assert(bfBundle, qt.Not(qt.IsNil)) - c.Assert(bfBundle.Language().Lang, qt.Equals, "en") - c.Assert(s.getPage(kinds.KindPage, "bf/my-bf-bundle/index.md"), qt.Equals, bfBundle) - c.Assert(s.getPage(kinds.KindPage, "bf/my-bf-bundle"), qt.Equals, bfBundle) - c.Assert(s.getPage(kinds.KindPage, "my-bf-bundle"), qt.Equals, bfBundle) - - nnSite := sites.Sites[1] - c.Assert(len(nnSite.RegularPages()), qt.Equals, 7) - - bfBundleNN := nnSite.getPage(kinds.KindPage, "bf/my-bf-bundle/index") - c.Assert(bfBundleNN, qt.Not(qt.IsNil)) - c.Assert(bfBundleNN.Language().Lang, qt.Equals, "nn") - c.Assert(nnSite.getPage(kinds.KindPage, "bf/my-bf-bundle/index.nn.md"), qt.Equals, bfBundleNN) - c.Assert(nnSite.getPage(kinds.KindPage, "bf/my-bf-bundle"), qt.Equals, bfBundleNN) - c.Assert(nnSite.getPage(kinds.KindPage, "my-bf-bundle"), qt.Equals, bfBundleNN) - - // See https://github.com/gohugoio/hugo/issues/4295 - // Every resource should have its Name prefixed with its base folder. - cBundleResources := bundleWithSubPath.Resources().Match("c/**") - c.Assert(len(cBundleResources), qt.Equals, 4) - bundlePage := bundleWithSubPath.Resources().GetMatch("c/page*") - c.Assert(bundlePage, qt.Not(qt.IsNil)) - - bcBundleNN, _ := nnSite.getPageNew(nil, "bc") - c.Assert(bcBundleNN, qt.Not(qt.IsNil)) - bcBundleEN, _ := s.getPageNew(nil, "bc") - c.Assert(bcBundleNN.Language().Lang, qt.Equals, "nn") - c.Assert(bcBundleEN.Language().Lang, qt.Equals, "en") - c.Assert(len(bcBundleNN.Resources()), qt.Equals, 3) - c.Assert(len(bcBundleEN.Resources()), qt.Equals, 3) - b.AssertFileContent("public/en/bc/data1.json", "data1") - b.AssertFileContent("public/en/bc/data2.json", "data2") - b.AssertFileContent("public/en/bc/logo-bc.png", "logo") - b.AssertFileContent("public/nn/bc/data1.nn.json", "data1.nn") - b.AssertFileContent("public/nn/bc/data2.json", "data2") - b.AssertFileContent("public/nn/bc/logo-bc.png", "logo") - }) - } + b.AssertFileContent("public/en/mybundle/index.html", "My Bundle|/en/mybundle/|en|\nResources: RelPermalink: /en/mybundle/f1.txt|Content: F1|RelPermalink: /en/mybundle/f2.txt|Content: F2||") + b.AssertFileContent("public/nn/mybundle/index.html", "My Bundle NN|/nn/mybundle/|nn|\nResources: RelPermalink: /en/mybundle/f1.txt|Content: F1|RelPermalink: /nn/mybundle/f2.nn.txt|Content: F2 nn.||") } func TestMultilingualDisableLanguage(t *testing.T) { t.Parallel() - c := qt.New(t) - fs, cfg := newTestBundleSourcesMultilingual(t) - cfg.Set("disableLanguages", []string{"nn"}) - configs, err := loadTestConfigFromProvider(cfg) - c.Assert(err, qt.IsNil) - - b := newTestSitesBuilderFromDepsCfg(t, deps.DepsCfg{Fs: fs, Configs: configs}).WithNothingAdded() - b.Build(BuildCfg{}) - sites := b.H - - c.Assert(len(sites.Sites), qt.Equals, 1) - - s := sites.Sites[0] - - c.Assert(len(s.RegularPages()), qt.Equals, 8) - c.Assert(len(s.Pages()), qt.Equals, 16) - // No nn pages - c.Assert(len(s.AllPages()), qt.Equals, 16) - s.pageMap.withEveryBundlePage(func(p *pageState) bool { - c.Assert(p.Language().Lang != "nn", qt.Equals, true) - return false - }) -} - -func TestPageBundlerSiteWitSymbolicLinksInContent(t *testing.T) { - skipSymlink(t) - - wd, _ := os.Getwd() - defer func() { - os.Chdir(wd) - }() - - c := qt.New(t) - - // We need to use the OS fs for this. - workingDir, clean, err := htesting.CreateTempDir(hugofs.Os, "hugosym") - c.Assert(err, qt.IsNil) - cfg := config.New() - cfg.Set("workingDir", workingDir) - cfg.Set("publishDir", "public") - fs := hugofs.NewFromOld(hugofs.Os, cfg) - - contentDirName := "content" - - contentDir := filepath.Join(workingDir, contentDirName) - c.Assert(os.MkdirAll(filepath.Join(contentDir, "a"), 0777), qt.IsNil) - - for i := 1; i <= 3; i++ { - c.Assert(os.MkdirAll(filepath.Join(workingDir, fmt.Sprintf("symcontent%d", i)), 0777), qt.IsNil) - } - - c.Assert(os.MkdirAll(filepath.Join(workingDir, "symcontent2", "a1"), 0777), qt.IsNil) - - // Symlinked sections inside content. - os.Chdir(contentDir) - for i := 1; i <= 3; i++ { - c.Assert(os.Symlink(filepath.FromSlash(fmt.Sprintf(("../symcontent%d"), i)), fmt.Sprintf("symbolic%d", i)), qt.IsNil) - } - - c.Assert(os.Chdir(filepath.Join(contentDir, "a")), qt.IsNil) - - // Create a symlink to one single content file - c.Assert(os.Symlink(filepath.FromSlash("../../symcontent2/a1/page.md"), "page_s.md"), qt.IsNil) - - c.Assert(os.Chdir(filepath.FromSlash("../../symcontent3")), qt.IsNil) - - // Create a circular symlink. Will print some warnings. - c.Assert(os.Symlink(filepath.Join("..", contentDirName), filepath.FromSlash("circus")), qt.IsNil) - - c.Assert(os.Chdir(workingDir), qt.IsNil) - - defer clean() - - cfg.Set("workingDir", workingDir) - cfg.Set("contentDir", contentDirName) - cfg.Set("baseURL", "https://example.com") - configs, err := loadTestConfigFromProvider(cfg) - c.Assert(err, qt.IsNil) - - layout := `{{ .Title }}|{{ .Content }}` - pageContent := `--- -slug: %s -date: 2017-10-09 + files := ` +-- hugo.toml -- +baseURL = "https://example.com" +disableKinds = ["taxonomy", "term"] +defaultContentLanguage = "en" +defaultContentLanguageInSubdir = true +disableLanguages = ["nn"] +[languages] +[languages.en] +weight = 1 +[languages.nn] +weight = 2 +-- content/p1.md -- --- +title: "P1" +--- +P1 +-- content/p1.nn.md -- +--- +title: "P1nn" +--- +P1nn +-- layouts/_default/single.html -- +{{ .Title }}|{{ .Content }}|{{ .Lang }}| -TheContent. ` + b := Test(t, files) - b := newTestSitesBuilderFromDepsCfg(t, deps.DepsCfg{ - Fs: fs, - Configs: configs, - }) - - b.WithTemplates( - "_default/single.html", layout, - "_default/list.html", layout, - ) - - b.WithContent( - "a/regular.md", fmt.Sprintf(pageContent, "a1"), - ) - - b.WithSourceFile( - "symcontent1/s1.md", fmt.Sprintf(pageContent, "s1"), - "symcontent1/s2.md", fmt.Sprintf(pageContent, "s2"), - // Regular files inside symlinked folder. - "symcontent1/s1.md", fmt.Sprintf(pageContent, "s1"), - "symcontent1/s2.md", fmt.Sprintf(pageContent, "s2"), - - // A bundle - "symcontent2/a1/index.md", fmt.Sprintf(pageContent, ""), - "symcontent2/a1/page.md", fmt.Sprintf(pageContent, "page"), - "symcontent2/a1/logo.png", "image", - - // Assets - "symcontent3/s1.png", "image", - "symcontent3/s2.png", "image", - ) - - b.Build(BuildCfg{}) - s := b.H.Sites[0] - - c.Assert(len(s.RegularPages()), qt.Equals, 7) - a1Bundle := s.getPage(kinds.KindPage, "symbolic2/a1/index.md") - c.Assert(a1Bundle, qt.Not(qt.IsNil)) - c.Assert(len(a1Bundle.Resources()), qt.Equals, 2) - c.Assert(len(a1Bundle.Resources().ByType(pageResourceType)), qt.Equals, 1) - - b.AssertFileContent(filepath.FromSlash("public/a/page/index.html"), "TheContent") - b.AssertFileContent(filepath.FromSlash("public/symbolic1/s1/index.html"), "TheContent") - b.AssertFileContent(filepath.FromSlash("public/symbolic2/a1/index.html"), "TheContent") + b.AssertFileContent("public/en/p1/index.html", "P1|

P1

\n|en|") + b.AssertFileExists("public/public/nn/p1/index.html", false) + b.Assert(len(b.H.Sites), qt.Equals, 1) } func TestPageBundlerHeadless(t *testing.T) { @@ -544,10 +263,10 @@ HEADLESS {{< myShort >}} c.Assert(len(s.RegularPages()), qt.Equals, 1) - regular := s.getPage(kinds.KindPage, "a/index") + regular := s.getPageOldVersion(kinds.KindPage, "a/index") c.Assert(regular.RelPermalink(), qt.Equals, "/s1/") - headless := s.getPage(kinds.KindPage, "b/index") + headless := s.getPageOldVersion(kinds.KindPage, "b/index") c.Assert(headless, qt.Not(qt.IsNil)) c.Assert(headless.Title(), qt.Equals, "Headless Bundle in Topless Bar") c.Assert(headless.RelPermalink(), qt.Equals, "") @@ -576,6 +295,7 @@ HEADLESS {{< myShort >}} // No headless bundles here, please. // https://github.com/gohugoio/hugo/issues/6492 c.Assert(s.RegularPages(), qt.HasLen, 1) + c.Assert(s.Pages(), qt.HasLen, 4) c.Assert(s.home.RegularPages(), qt.HasLen, 1) c.Assert(s.home.Pages(), qt.HasLen, 1) } @@ -686,7 +406,6 @@ Single content. b.Build(BuildCfg{}) b.AssertFileContent("public/nn/mybundle/data.yaml", "data nn") - b.AssertFileContent("public/nn/mybundle/forms.yaml", "forms en") b.AssertFileContent("public/mybundle/data.yaml", "data en") b.AssertFileContent("public/mybundle/forms.yaml", "forms en") @@ -701,293 +420,113 @@ Single content. b.AssertFileContent("public/section-not-bundle/single/index.html", "Section Single", "|

Single content.

") } -func newTestBundleSources(t testing.TB) (*hugofs.Fs, config.Provider) { - cfg, fs := newTestCfgBasic() - c := qt.New(t) +func TestBundledResourcesMultilingualDuplicateResourceFiles(t *testing.T) { + t.Parallel() - workDir := "/work" - cfg.Set("workingDir", workDir) - cfg.Set("contentDir", "base") - cfg.Set("baseURL", "https://example.com") - cfg.Set("mediaTypes", map[string]any{ - "bepsays/bep": map[string]any{ - "suffixes": []string{"bep"}, - }, - }) - - pageContent := `--- -title: "Bundle Galore" -slug: pageslug -date: 2017-10-09 + files := ` +-- hugo.toml -- +baseURL = "https://example.com/" +[markup] +[markup.goldmark] +duplicateResourceFiles = true +[languages] +[languages.en] +weight = 1 +[languages.en.permalinks] +"/" = "/enpages/:slug/" +[languages.nn] +weight = 2 +[languages.nn.permalinks] +"/" = "/nnpages/:slug/" +-- content/mybundle/index.md -- --- - -TheContent. -` - - pageContentShortcode := `--- -title: "Bundle Galore" -slug: pageslug -date: 2017-10-09 +title: "My Bundle" --- - -TheContent. - -{{< myShort >}} -` - - pageWithImageShortcodeAndResourceMetadataContent := `--- -title: "Bundle Galore" -slug: pageslug -date: 2017-10-09 -resources: -- src: "*.jpg" - name: "my-sunset-:counter" - title: "Sunset Galore :counter" - params: - myParam: "My Sunny Param" +{{< getresource "f1.txt" >}} +{{< getresource "f2.txt" >}} +-- content/mybundle/index.nn.md -- --- - -TheContent. - -{{< myShort >}} -` - - pageContentNoSlug := `--- -title: "Bundle Galore #2" -date: 2017-10-09 +title: "My Bundle NN" --- - -TheContent. +{{< getresource "f1.txt" >}} +f2.nn.txt is the original name. +{{< getresource "f2.nn.txt" >}} +{{< getresource "f2.txt" >}} +{{< getresource "sub/f3.txt" >}} +-- content/mybundle/f1.txt -- +F1 en. +-- content/mybundle/sub/f3.txt -- +F1 en. +-- content/mybundle/f2.txt -- +F2 en. +-- content/mybundle/f2.nn.txt -- +F2 nn. +-- layouts/shortcodes/getresource.html -- +{{ $r := .Page.Resources.Get (.Get 0)}} +Resource: {{ (.Get 0) }}|{{ with $r }}{{ .RelPermalink }}|{{ .Content }}|{{ else }}Not found.{{ end}} +-- layouts/_default/single.html -- +{{ .Title }}|{{ .RelPermalink }}|{{ .Lang }}|{{ .Content }}| ` + b := Test(t, files) - singleLayout := ` -Single Title: {{ .Title }} -Single RelPermalink: {{ .RelPermalink }} -Single Permalink: {{ .Permalink }} -Content: {{ .Content }} -{{ $sunset := .Resources.GetMatch "my-sunset-1*" }} -{{ with $sunset }} -Sunset RelPermalink: {{ .RelPermalink }} -Sunset Permalink: {{ .Permalink }} -{{ $thumb := .Fill "123x123" }} -Thumb Width: {{ $thumb.Width }} -Thumb Name: {{ $thumb.Name }} -Thumb Title: {{ $thumb.Title }} -Thumb RelPermalink: {{ $thumb.RelPermalink }} -{{ end }} -{{ $types := slice "image" "page" }} -{{ range $types }} -{{ $typeTitle := . | title }} -{{ range $i, $e := $.Resources.ByType . }} -{{ $i }}: {{ $typeTitle }} Title: {{ .Title }} -{{ $i }}: {{ $typeTitle }} Name: {{ .Name }} -{{ $i }}: {{ $typeTitle }} RelPermalink: {{ .RelPermalink }}| -{{ $i }}: {{ $typeTitle }} Params: {{ printf "%v" .Params }} -{{ $i }}: {{ $typeTitle }} myParam: Lower: {{ .Params.myparam }} Caps: {{ .Params.MYPARAM }} -{{ end }} -{{ end }} -` - - myShort := ` -MyShort in {{ .Page.File.Path }}: -{{ $sunset := .Page.Resources.GetMatch "my-sunset-2*" }} -{{ with $sunset }} -Short Sunset RelPermalink: {{ .RelPermalink }} -{{ $thumb := .Fill "56x56" }} -Short Thumb Width: {{ $thumb.Width }} -{{ end }} -` - - listLayout := `{{ .Title }}|{{ .Content }}` - - writeSource(t, fs, filepath.Join(workDir, "layouts", "_default", "single.html"), singleLayout) - writeSource(t, fs, filepath.Join(workDir, "layouts", "_default", "list.html"), listLayout) - writeSource(t, fs, filepath.Join(workDir, "layouts", "shortcodes", "myShort.html"), myShort) - writeSource(t, fs, filepath.Join(workDir, "layouts", "shortcodes", "myShort.customo"), myShort) - - writeSource(t, fs, filepath.Join(workDir, "base", "_index.md"), pageContent) - writeSource(t, fs, filepath.Join(workDir, "base", "_1.md"), pageContent) - writeSource(t, fs, filepath.Join(workDir, "base", "_1.png"), pageContent) - - writeSource(t, fs, filepath.Join(workDir, "base", "images", "hugo-logo.png"), "content") - writeSource(t, fs, filepath.Join(workDir, "base", "a", "2.md"), pageContent) - writeSource(t, fs, filepath.Join(workDir, "base", "a", "1.md"), pageContent) - - writeSource(t, fs, filepath.Join(workDir, "base", "a", "b", "index.md"), pageContentNoSlug) - writeSource(t, fs, filepath.Join(workDir, "base", "a", "b", "ab1.md"), pageContentNoSlug) - - // Mostly plain static assets in a folder with a page in a sub folder thrown in. - writeSource(t, fs, filepath.Join(workDir, "base", "assets", "pic1.png"), "content") - writeSource(t, fs, filepath.Join(workDir, "base", "assets", "pic2.png"), "content") - writeSource(t, fs, filepath.Join(workDir, "base", "assets", "pages", "mypage.md"), pageContent) - - // Bundle - writeSource(t, fs, filepath.Join(workDir, "base", "b", "my-bundle", "index.md"), pageWithImageShortcodeAndResourceMetadataContent) - writeSource(t, fs, filepath.Join(workDir, "base", "b", "my-bundle", "1.md"), pageContent) - writeSource(t, fs, filepath.Join(workDir, "base", "b", "my-bundle", "2.md"), pageContentShortcode) - writeSource(t, fs, filepath.Join(workDir, "base", "b", "my-bundle", "custom-mime.bep"), "bepsays") - writeSource(t, fs, filepath.Join(workDir, "base", "b", "my-bundle", "c", "logo.png"), "content") - - // Bundle with 은행 slug - // See https://github.com/gohugoio/hugo/issues/4241 - writeSource(t, fs, filepath.Join(workDir, "base", "c", "bundle", "index.md"), `--- -title: "은행 은행" -slug: 은행 -date: 2017-10-09 ---- - -Content for 은행. + // helpers.PrintFs(b.H.Fs.PublishDir, "", os.Stdout) + b.AssertFileContent("public/nn/nnpages/my-bundle-nn/index.html", ` +My Bundle NN +Resource: f1.txt|/nn/nnpages/my-bundle-nn/f1.txt| +Resource: f2.txt|/nn/nnpages/my-bundle-nn/f2.nn.txt|F2 nn.| +Resource: f2.nn.txt|/nn/nnpages/my-bundle-nn/f2.nn.txt|F2 nn.| +Resource: sub/f3.txt|/nn/nnpages/my-bundle-nn/sub/f3.txt|F1 en.| `) - // Bundle in root - writeSource(t, fs, filepath.Join(workDir, "base", "root", "index.md"), pageWithImageShortcodeAndResourceMetadataContent) - writeSource(t, fs, filepath.Join(workDir, "base", "root", "1.md"), pageContent) - writeSource(t, fs, filepath.Join(workDir, "base", "root", "c", "logo.png"), "content") + b.AssertFileContent("public/enpages/my-bundle/f2.txt", "F2 en.") + b.AssertFileContent("public/nn/nnpages/my-bundle-nn/f2.nn.txt", "F2 nn") - writeSource(t, fs, filepath.Join(workDir, "base", "c", "bundle", "logo-은행.png"), "은행 PNG") + b.AssertFileContent("public/enpages/my-bundle/index.html", ` +Resource: f1.txt|/enpages/my-bundle/f1.txt|F1 en.| +Resource: f2.txt|/enpages/my-bundle/f2.txt|F2 en.| +`) + b.AssertFileContent("public/enpages/my-bundle/f1.txt", "F1 en.") - // Write a real image into one of the bundle above. - src, err := os.Open("testdata/sunset.jpg") - c.Assert(err, qt.IsNil) - - // We need 2 to test https://github.com/gohugoio/hugo/issues/4202 - out, err := fs.Source.Create(filepath.Join(workDir, "base", "b", "my-bundle", "sunset1.jpg")) - c.Assert(err, qt.IsNil) - out2, err := fs.Source.Create(filepath.Join(workDir, "base", "b", "my-bundle", "sunset2.jpg")) - c.Assert(err, qt.IsNil) - - _, err = io.Copy(out, src) - c.Assert(err, qt.IsNil) - out.Close() - src.Seek(0, 0) - _, err = io.Copy(out2, src) - out2.Close() - src.Close() - c.Assert(err, qt.IsNil) - - return fs, cfg -} - -func newTestBundleSourcesMultilingual(t *testing.T) (*hugofs.Fs, config.Provider) { - cfg, fs := newTestCfgBasic() - - workDir := "/work" - cfg.Set("workingDir", workDir) - cfg.Set("contentDir", "base") - cfg.Set("baseURL", "https://example.com") - cfg.Set("defaultContentLanguage", "en") - - langConfig := map[string]any{ - "en": map[string]any{ - "weight": 1, - "languageName": "English", - }, - "nn": map[string]any{ - "weight": 2, - "languageName": "Nynorsk", - }, - } - - cfg.Set("languages", langConfig) - - pageContent := `--- -slug: pageslug -date: 2017-10-09 ---- - -TheContent. -` - - layout := `{{ .Title }}|{{ .Content }}|Lang: {{ .Site.Language.Lang }}` - - writeSource(t, fs, filepath.Join(workDir, "layouts", "_default", "single.html"), layout) - writeSource(t, fs, filepath.Join(workDir, "layouts", "_default", "list.html"), layout) - - writeSource(t, fs, filepath.Join(workDir, "base", "1s", "mypage.md"), pageContent) - writeSource(t, fs, filepath.Join(workDir, "base", "1s", "mypage.nn.md"), pageContent) - writeSource(t, fs, filepath.Join(workDir, "base", "1s", "mylogo.png"), "content") - - writeSource(t, fs, filepath.Join(workDir, "base", "bb", "_index.md"), pageContent) - writeSource(t, fs, filepath.Join(workDir, "base", "bb", "_index.nn.md"), pageContent) - writeSource(t, fs, filepath.Join(workDir, "base", "bb", "en.md"), pageContent) - writeSource(t, fs, filepath.Join(workDir, "base", "bb", "_1.md"), pageContent) - writeSource(t, fs, filepath.Join(workDir, "base", "bb", "_1.nn.md"), pageContent) - writeSource(t, fs, filepath.Join(workDir, "base", "bb", "a.png"), "content") - writeSource(t, fs, filepath.Join(workDir, "base", "bb", "b.png"), "content") - writeSource(t, fs, filepath.Join(workDir, "base", "bb", "b.nn.png"), "content") - writeSource(t, fs, filepath.Join(workDir, "base", "bb", "c.nn.png"), "content") - writeSource(t, fs, filepath.Join(workDir, "base", "bb", "b", "d.nn.png"), "content") - - writeSource(t, fs, filepath.Join(workDir, "base", "bc", "_index.md"), pageContent) - writeSource(t, fs, filepath.Join(workDir, "base", "bc", "_index.nn.md"), pageContent) - writeSource(t, fs, filepath.Join(workDir, "base", "bc", "page.md"), pageContent) - writeSource(t, fs, filepath.Join(workDir, "base", "bc", "logo-bc.png"), "logo") - writeSource(t, fs, filepath.Join(workDir, "base", "bc", "page.nn.md"), pageContent) - writeSource(t, fs, filepath.Join(workDir, "base", "bc", "data1.json"), "data1") - writeSource(t, fs, filepath.Join(workDir, "base", "bc", "data2.json"), "data2") - writeSource(t, fs, filepath.Join(workDir, "base", "bc", "data1.nn.json"), "data1.nn") - - writeSource(t, fs, filepath.Join(workDir, "base", "bd", "index.md"), pageContent) - writeSource(t, fs, filepath.Join(workDir, "base", "bd", "page.md"), pageContent) - writeSource(t, fs, filepath.Join(workDir, "base", "bd", "page.nn.md"), pageContent) - - writeSource(t, fs, filepath.Join(workDir, "base", "be", "_index.md"), pageContent) - writeSource(t, fs, filepath.Join(workDir, "base", "be", "page.md"), pageContent) - writeSource(t, fs, filepath.Join(workDir, "base", "be", "page.nn.md"), pageContent) - - // Bundle leaf, multilingual - writeSource(t, fs, filepath.Join(workDir, "base", "lb", "index.md"), pageContent) - writeSource(t, fs, filepath.Join(workDir, "base", "lb", "index.nn.md"), pageContent) - writeSource(t, fs, filepath.Join(workDir, "base", "lb", "1.md"), pageContent) - writeSource(t, fs, filepath.Join(workDir, "base", "lb", "2.md"), pageContent) - writeSource(t, fs, filepath.Join(workDir, "base", "lb", "2.nn.md"), pageContent) - writeSource(t, fs, filepath.Join(workDir, "base", "lb", "c", "page.md"), pageContent) - writeSource(t, fs, filepath.Join(workDir, "base", "lb", "c", "logo.png"), "content") - writeSource(t, fs, filepath.Join(workDir, "base", "lb", "c", "logo.nn.png"), "content") - writeSource(t, fs, filepath.Join(workDir, "base", "lb", "c", "one.png"), "content") - writeSource(t, fs, filepath.Join(workDir, "base", "lb", "c", "d", "deep.png"), "content") - - // Translated bundle in some sensible sub path. - writeSource(t, fs, filepath.Join(workDir, "base", "bf", "my-bf-bundle", "index.md"), pageContent) - writeSource(t, fs, filepath.Join(workDir, "base", "bf", "my-bf-bundle", "index.nn.md"), pageContent) - writeSource(t, fs, filepath.Join(workDir, "base", "bf", "my-bf-bundle", "page.md"), pageContent) - - return fs, cfg + // Should be duplicated to the nn bundle. + b.AssertFileContent("public/nn/nnpages/my-bundle-nn/f1.txt", "F1 en.") } // https://github.com/gohugoio/hugo/issues/5858 func TestBundledResourcesWhenMultipleOutputFormats(t *testing.T) { t.Parallel() - b := newTestSitesBuilder(t).Running().WithConfigFile("toml", ` + files := ` +-- hugo.toml -- baseURL = "https://example.org" +disableKinds = ["taxonomy", "term"] +disableLiveReload = true [outputs] - # This looks odd, but it triggers the behaviour in #5858 - # The total output formats list gets sorted, so CSS before HTML. - home = [ "CSS" ] - -`) - b.WithContent("mybundle/index.md", ` +# This looks odd, but it triggers the behaviour in #5858 +# The total output formats list gets sorted, so CSS before HTML. +home = [ "CSS" ] +-- content/mybundle/index.md -- --- title: Page -date: 2017-01-15 --- -`, - "mybundle/data.json", "MyData", - ) +-- content/mybundle/data.json -- +MyData +-- layouts/_default/single.html -- +{{ range .Resources }} +{{ .ResourceType }}|{{ .Title }}| +{{ end }} +` - b.CreateSites().Build(BuildCfg{}) + b := TestRunning(t, files) b.AssertFileContent("public/mybundle/data.json", "MyData") - // Change the bundled JSON file and make sure it gets republished. - b.EditFiles("content/mybundle/data.json", "My changed data") - - b.Build(BuildCfg{}) + b.EditFileReplaceAll("content/mybundle/data.json", "MyData", "My changed data").Build() b.AssertFileContent("public/mybundle/data.json", "My changed data") } +// https://github.com/gohugoio/hugo/issues/5858 + // https://github.com/gohugoio/hugo/issues/4870 func TestBundleSlug(t *testing.T) { t.Parallel() @@ -1016,191 +555,45 @@ slug: %s c.Assert(b.CheckExists("public/about/services2/this-is-another-slug/index.html"), qt.Equals, true) } -func TestBundleMisc(t *testing.T) { - config := ` -baseURL = "https://example.com" -defaultContentLanguage = "en" -defaultContentLanguageInSubdir = true -ignoreFiles = ["README\\.md", "content/en/ignore"] - -[Languages] -[Languages.en] -weight = 99999 -contentDir = "content/en" -[Languages.nn] -weight = 20 -contentDir = "content/nn" -[Languages.sv] -weight = 30 -contentDir = "content/sv" -[Languages.nb] -weight = 40 -contentDir = "content/nb" - -` - - const pageContent = `--- -title: %q ---- -` - createPage := func(s string) string { - return fmt.Sprintf(pageContent, s) - } - - b := newTestSitesBuilder(t).WithConfigFile("toml", config) - b.WithLogger(loggers.NewDefault()) - - b.WithTemplates("_default/list.html", `{{ range .Site.Pages }} -{{ .Kind }}|{{ .Path }}|{{ with .CurrentSection }}CurrentSection: {{ .Path }}{{ end }}|{{ .RelPermalink }}{{ end }} -`) - - b.WithTemplates("_default/single.html", `Single: {{ .Title }}`) - - b.WithContent("en/sect1/sect2/_index.md", createPage("en: Sect 2")) - b.WithContent("en/sect1/sect2/page.md", createPage("en: Page")) - b.WithContent("en/sect1/sect2/data-branch.json", "mydata") - b.WithContent("nn/sect1/sect2/page.md", createPage("nn: Page")) - b.WithContent("nn/sect1/sect2/data-branch.json", "my nn data") - - // En only - b.WithContent("en/enonly/myen.md", createPage("en: Page")) - b.WithContent("en/enonly/myendata.json", "mydata") - - // Leaf - - b.WithContent("nn/b1/index.md", createPage("nn: leaf")) - b.WithContent("en/b1/index.md", createPage("en: leaf")) - b.WithContent("sv/b1/index.md", createPage("sv: leaf")) - b.WithContent("nb/b1/index.md", createPage("nb: leaf")) - - // Should be ignored - b.WithContent("en/ignore/page.md", createPage("en: ignore")) - b.WithContent("en/README.md", createPage("en: ignore")) - - // Both leaf and branch bundle in same dir - b.WithContent("en/b2/index.md", `--- -slug: leaf ---- -`) - b.WithContent("en/b2/_index.md", createPage("en: branch")) - - b.WithContent("en/b1/data1.json", "en: data") - b.WithContent("sv/b1/data1.json", "sv: data") - b.WithContent("sv/b1/data2.json", "sv: data2") - b.WithContent("nb/b1/data2.json", "nb: data2") - - b.WithContent("en/b3/_index.md", createPage("en: branch")) - b.WithContent("en/b3/p1.md", createPage("en: page")) - b.WithContent("en/b3/data1.json", "en: data") - - b.Build(BuildCfg{}) - - b.AssertFileContent("public/en/index.html", - filepath.FromSlash("section|sect1/sect2/_index.md|CurrentSection: sect1/sect2/_index.md"), - "myen.md|CurrentSection: enonly") - - b.AssertFileContentFn("public/en/index.html", func(s string) bool { - // Check ignored files - return !regexp.MustCompile("README|ignore").MatchString(s) - }) - - b.AssertFileContent("public/nn/index.html", filepath.FromSlash("page|sect1/sect2/page.md|CurrentSection: sect1")) - b.AssertFileContentFn("public/nn/index.html", func(s string) bool { - return !strings.Contains(s, "enonly") - }) - - // Check order of inherited data file - b.AssertFileContent("public/nb/b1/data1.json", "en: data") // Default content - b.AssertFileContent("public/nn/b1/data2.json", "sv: data") // First match - - b.AssertFileContent("public/en/enonly/myen/index.html", "Single: en: Page") - b.AssertFileContent("public/en/enonly/myendata.json", "mydata") - - c := qt.New(t) - c.Assert(b.CheckExists("public/sv/enonly/myen/index.html"), qt.Equals, false) - - // Both leaf and branch bundle in same dir - // We log a warning about it, but we keep both. - b.AssertFileContent("public/en/b2/index.html", - "/en/b2/leaf/", - filepath.FromSlash("section|sect1/sect2/_index.md|CurrentSection: sect1/sect2/_index.md")) -} - -// Issue 6136 +// See #11663 func TestPageBundlerPartialTranslations(t *testing.T) { - config := ` -baseURL = "https://example.org" + t.Parallel() + files := ` +-- hugo.toml -- +baseURL = "https://example.com" +disableKinds = ["taxonomy", "term"] defaultContentLanguage = "en" defaultContentLanguageInSubDir = true -disableKinds = ["taxonomy", "term"] [languages] [languages.nn] -languageName = "Nynorsk" weight = 2 -title = "Tittel på Nynorsk" [languages.en] -title = "Title in English" -languageName = "English" weight = 1 +-- content/section/mybundle/index.md -- +--- +title: "Mybundle" +--- +-- content/section/mybundle/bundledpage.md -- +--- +title: "Bundled page en" +--- +-- content/section/mybundle/bundledpage.nn.md -- +--- +title: "Bundled page nn" +--- + +-- layouts/_default/single.html -- +Bundled page: {{ .RelPermalink}}|Len resources: {{ len .Resources }}| + + ` + b := Test(t, files) - pageContent := func(id string) string { - return fmt.Sprintf(` ---- -title: %q ---- -`, id) - } - - dataContent := func(id string) string { - return id - } - - b := newTestSitesBuilder(t).WithConfigFile("toml", config) - - b.WithContent("blog/sect1/_index.nn.md", pageContent("s1.nn")) - b.WithContent("blog/sect1/data.json", dataContent("s1.data")) - - b.WithContent("blog/sect1/b1/index.nn.md", pageContent("s1.b1.nn")) - b.WithContent("blog/sect1/b1/data.json", dataContent("s1.b1.data")) - - b.WithContent("blog/sect2/_index.md", pageContent("s2")) - b.WithContent("blog/sect2/data.json", dataContent("s2.data")) - - b.WithContent("blog/sect2/b1/index.md", pageContent("s2.b1")) - b.WithContent("blog/sect2/b1/data.json", dataContent("s2.b1.data")) - - b.WithContent("blog/sect2/b2/index.md", pageContent("s2.b2")) - b.WithContent("blog/sect2/b2/bp.md", pageContent("s2.b2.bundlecontent")) - - b.WithContent("blog/sect2/b3/index.md", pageContent("s2.b3")) - b.WithContent("blog/sect2/b3/bp.nn.md", pageContent("s2.b3.bundlecontent.nn")) - - b.WithContent("blog/sect2/b4/index.nn.md", pageContent("s2.b4")) - b.WithContent("blog/sect2/b4/bp.nn.md", pageContent("s2.b4.bundlecontent.nn")) - - b.WithTemplates("index.html", ` -Num Pages: {{ len .Site.Pages }} -{{ range .Site.Pages }} -{{ .Kind }}|{{ .RelPermalink }}|Content: {{ .Title }}|Resources: {{ range .Resources }}R: {{ .Title }}|{{ .Content }}|{{ end -}} -{{ end }} -`) - - b.Build(BuildCfg{}) - - b.AssertFileContent("public/nn/index.html", - "Num Pages: 6", - "page|/nn/blog/sect1/b1/|Content: s1.b1.nn|Resources: R: data.json|s1.b1.data|", - "page|/nn/blog/sect2/b3/|Content: s2.b3|Resources: R: s2.b3.bundlecontent.nn|", - "page|/nn/blog/sect2/b4/|Content: s2.b4|Resources: R: s2.b4.bundlecontent.nn", + b.AssertFileContent("public/en/section/mybundle/index.html", + "Bundled page: /en/section/mybundle/|Len resources: 1|", ) - b.AssertFileContent("public/en/index.html", - "Num Pages: 6", - "section|/en/blog/sect2/|Content: s2|Resources: R: data.json|s2.data|", - "page|/en/blog/sect2/b1/|Content: s2.b1|Resources: R: data.json|s2.b1.data|", - "page|/en/blog/sect2/b2/|Content: s2.b2|Resources: R: s2.b2.bundlecontent|", - ) + b.AssertFileExists("public/nn/section/mybundle/index.html", false) } // #6208 @@ -1329,7 +722,7 @@ func TestPageBundlerHome(t *testing.T) { cfg.Set("publishDir", "public") fs := hugofs.NewFromOld(hugofs.Os, cfg) - os.MkdirAll(filepath.Join(workDir, "content"), 0777) + os.MkdirAll(filepath.Join(workDir, "content"), 0o777) defer clean() diff --git a/hugolib/pagecollections.go b/hugolib/pagecollections.go index 26da4905e..8e05ad7e6 100644 --- a/hugolib/pagecollections.go +++ b/hugolib/pagecollections.go @@ -1,4 +1,4 @@ -// Copyright 2019 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -18,91 +18,65 @@ import ( "path" "path/filepath" "strings" - "sync" - - "github.com/gohugoio/hugo/common/paths" + "github.com/gohugoio/hugo/hugofs" "github.com/gohugoio/hugo/hugofs/files" - "github.com/gohugoio/hugo/helpers" + "github.com/gohugoio/hugo/common/paths" "github.com/gohugoio/hugo/resources/kinds" "github.com/gohugoio/hugo/resources/page" ) -// PageCollections contains the page collections for a site. -type PageCollections struct { +// pageFinder provides ways to find a Page in a Site. +type pageFinder struct { pageMap *pageMap - - // Lazy initialized page collections - pages *lazyPagesFactory - regularPages *lazyPagesFactory - allPages *lazyPagesFactory - allRegularPages *lazyPagesFactory } -// Pages returns all pages. -// This is for the current language only. -func (c *PageCollections) Pages() page.Pages { - return c.pages.get() -} - -// RegularPages returns all the regular pages. -// This is for the current language only. -func (c *PageCollections) RegularPages() page.Pages { - return c.regularPages.get() -} - -// AllPages returns all pages for all languages. -func (c *PageCollections) AllPages() page.Pages { - return c.allPages.get() -} - -// AllRegularPages returns all regular pages for all languages. -func (c *PageCollections) AllRegularPages() page.Pages { - return c.allRegularPages.get() -} - -type lazyPagesFactory struct { - pages page.Pages - - init sync.Once - factory page.PagesFactory -} - -func (l *lazyPagesFactory) get() page.Pages { - l.init.Do(func() { - l.pages = l.factory() - }) - return l.pages -} - -func newLazyPagesFactory(factory page.PagesFactory) *lazyPagesFactory { - return &lazyPagesFactory{factory: factory} -} - -func newPageCollections(m *pageMap) *PageCollections { +func newPageFinder(m *pageMap) *pageFinder { if m == nil { panic("must provide a pageMap") } - - c := &PageCollections{pageMap: m} - - c.pages = newLazyPagesFactory(func() page.Pages { - return m.createListAllPages() - }) - - c.regularPages = newLazyPagesFactory(func() page.Pages { - return c.findPagesByKindIn(kinds.KindPage, c.pages.get()) - }) - + c := &pageFinder{pageMap: m} return c } +// getPageRef resolves a Page from ref/relRef, with a slightly more comprehensive +// search path than getPage. +func (c *pageFinder) getPageRef(context page.Page, ref string) (page.Page, error) { + n, err := c.getContentNode(context, true, ref) + if err != nil { + return nil, err + } + + if p, ok := n.(page.Page); ok { + return p, nil + } + return nil, nil +} + +func (c *pageFinder) getPage(context page.Page, ref string) (page.Page, error) { + n, err := c.getContentNode(context, false, filepath.ToSlash(ref)) + if err != nil { + return nil, err + } + if p, ok := n.(page.Page); ok { + return p, nil + } + return nil, nil +} + +// Only used in tests. +func (c *pageFinder) getPageOldVersion(kind string, sections ...string) page.Page { + refs := append([]string{kind}, path.Join(sections...)) + p, _ := c.getPageForRefs(refs...) + return p +} + // This is an adapter func for the old API with Kind as first argument. // This is invoked when you do .Site.GetPage. We drop the Kind and fails // if there are more than 2 arguments, which would be ambiguous. -func (c *PageCollections) getPageOldVersion(ref ...string) (page.Page, error) { +func (c *pageFinder) getPageForRefs(ref ...string) (page.Page, error) { var refs []string for _, r := range ref { // A common construct in the wild is @@ -141,184 +115,109 @@ func (c *PageCollections) getPageOldVersion(ref ...string) (page.Page, error) { key = "/" + key } - return c.getPageNew(nil, key) + return c.getPage(nil, key) } -// Only used in tests. -func (c *PageCollections) getPage(typ string, sections ...string) page.Page { - refs := append([]string{typ}, path.Join(sections...)) - p, _ := c.getPageOldVersion(refs...) - return p -} - -// getPageRef resolves a Page from ref/relRef, with a slightly more comprehensive -// search path than getPageNew. -func (c *PageCollections) getPageRef(context page.Page, ref string) (page.Page, error) { - n, err := c.getContentNode(context, true, ref) - if err != nil || n == nil || n.p == nil { - return nil, err - } - return n.p, nil -} - -func (c *PageCollections) getPageNew(context page.Page, ref string) (page.Page, error) { - n, err := c.getContentNode(context, false, ref) - if err != nil || n == nil || n.p == nil { - return nil, err - } - return n.p, nil -} - -func (c *PageCollections) getSectionOrPage(ref string) (*contentNode, string) { - var n *contentNode - - pref := helpers.AddTrailingSlash(ref) - s, v, found := c.pageMap.sections.LongestPrefix(pref) - - if found { - n = v.(*contentNode) - } - - if found && s == pref { - // A section - return n, "" - } - - m := c.pageMap - - filename := strings.TrimPrefix(strings.TrimPrefix(ref, s), "/") - langSuffix := "." + m.s.Lang() - - // Trim both extension and any language code. - name := paths.PathNoExt(filename) - name = strings.TrimSuffix(name, langSuffix) - - // These are reserved bundle names and will always be stored by their owning - // folder name. - name = strings.TrimSuffix(name, "/index") - name = strings.TrimSuffix(name, "/_index") - - if !found { - return nil, name - } - - // Check if it's a section with filename provided. - if !n.p.File().IsZero() && n.p.File().LogicalName() == filename { - return n, name - } - - return m.getPage(s, name), name -} - -// For Ref/Reflink and .Site.GetPage do simple name lookups for the potentially ambiguous myarticle.md and /myarticle.md, -// but not when we get ./myarticle*, section/myarticle. -func shouldDoSimpleLookup(ref string) bool { - if ref[0] == '.' { - return false - } - - slashCount := strings.Count(ref, "/") - - if slashCount > 1 { - return false - } - - return slashCount == 0 || ref[0] == '/' -} - -func (c *PageCollections) getContentNode(context page.Page, isReflink bool, ref string) (*contentNode, error) { - ref = filepath.ToSlash(strings.ToLower(strings.TrimSpace(ref))) +const defaultContentExt = ".md" +func (c *pageFinder) getContentNode(context page.Page, isReflink bool, ref string) (contentNodeI, error) { + inRef := ref if ref == "" { ref = "/" } - inRef := ref - navUp := strings.HasPrefix(ref, "..") - var doSimpleLookup bool - if isReflink || context == nil { - doSimpleLookup = shouldDoSimpleLookup(ref) + if paths.HasExt(ref) { + return c.getContentNodeForRef(context, isReflink, true, inRef, ref) } + var refs []string + + // We are always looking for a content file and having an extension greatly simplifies the code that follows, + // even in the case where the extension does not match this one. + if ref == "/" { + refs = append(refs, "/_index"+defaultContentExt) + } else if strings.HasSuffix(ref, "/index") { + refs = append(refs, ref+"/index"+defaultContentExt) + refs = append(refs, ref+defaultContentExt) + } else { + refs = append(refs, ref+defaultContentExt) + } + + for _, ref := range refs { + n, err := c.getContentNodeForRef(context, isReflink, false, inRef, ref) + if n != nil || err != nil { + return n, err + } + } + + return nil, nil +} + +func (c *pageFinder) getContentNodeForRef(context page.Page, isReflink, hadExtension bool, inRef, ref string) (contentNodeI, error) { + s := c.pageMap.s + contentPathParser := s.Conf.PathParser() + if context != nil && !strings.HasPrefix(ref, "/") { - // Try the page-relative path. - var base string - if context.File().IsZero() { - base = context.SectionsPath() - } else { - meta := context.File().FileInfo().Meta() - base = filepath.ToSlash(filepath.Dir(meta.Path)) - if meta.Classifier == files.ContentClassLeaf { - // Bundles are stored in subfolders e.g. blog/mybundle/index.md, - // so if the user has not explicitly asked to go up, - // look on the "blog" level. - if !navUp { - base = path.Dir(base) - } + // Try the page-relative path first. + // Branch pages: /mysection, "./mypage" => /mysection/mypage + // Regular pages: /mysection/mypage.md, Path=/mysection/mypage, "./someotherpage" => /mysection/mypage/../someotherpage + // Regular leaf bundles: /mysection/mypage/index.md, Path=/mysection/mypage, "./someotherpage" => /mysection/mypage/../someotherpage + // Given the above, for regular pages we use the containing folder. + var baseDir string + if pi := context.PathInfo(); pi != nil { + if pi.IsBranchBundle() || (hadExtension) { + baseDir = pi.Dir() + } else { + baseDir = pi.ContainerDir() } } - ref = path.Join("/", strings.ToLower(base), ref) - } - if !strings.HasPrefix(ref, "/") { - ref = "/" + ref - } + rel := path.Join(baseDir, inRef) - m := c.pageMap - - // It's either a section, a page in a section or a taxonomy node. - // Start with the most likely: - n, name := c.getSectionOrPage(ref) - if n != nil { - return n, nil - } - - if !strings.HasPrefix(inRef, "/") { - // Many people will have "post/foo.md" in their content files. - if n, _ := c.getSectionOrPage("/" + inRef); n != nil { - return n, nil + if !hadExtension && !paths.HasExt(rel) { + // See comment above. + rel += defaultContentExt } - } - // Check if it's a taxonomy node - pref := helpers.AddTrailingSlash(ref) - s, v, found := m.taxonomies.LongestPrefix(pref) + relPath := contentPathParser.Parse(files.ComponentFolderContent, rel) - if found { - if !m.onSameLevel(pref, s) { - return nil, nil + n, err := c.getContentNodeFromPath(relPath, ref) + if n != nil || err != nil { + return n, err } - return v.(*contentNode), nil - } - getByName := func(s string) (*contentNode, error) { - n := m.pageReverseIndex.Get(s) - if n != nil { - if n == ambiguousContentNode { - return nil, fmt.Errorf("page reference %q is ambiguous", ref) + if hadExtension && context.File() != nil { + if n, err := c.getContentNodeFromRefReverseLookup(inRef, context.File().FileInfo()); n != nil || err != nil { + return n, err } - return n, nil } + } + + if strings.HasPrefix(ref, ".") { + // Page relative, no need to look further. return nil, nil } - var module string - if context != nil && !context.File().IsZero() { - module = context.File().FileInfo().Meta().Module + refPath := contentPathParser.Parse(files.ComponentFolderContent, ref) + + n, err := c.getContentNodeFromPath(refPath, ref) + + if n != nil || err != nil { + return n, err } - if module == "" && !c.pageMap.s.home.File().IsZero() { - module = c.pageMap.s.home.File().FileInfo().Meta().Module - } - - if module != "" { - n, err := getByName(module + ref) - if err != nil { - return nil, err + if hadExtension && s.home != nil && s.home.File() != nil { + if n, err := c.getContentNodeFromRefReverseLookup(inRef, s.home.File().FileInfo()); n != nil || err != nil { + return n, err } - if n != nil { - return n, nil + } + + var doSimpleLookup bool + if isReflink || context == nil { + slashCount := strings.Count(inRef, "/") + if slashCount <= 1 { + doSimpleLookup = slashCount == 0 || ref[0] == '/' } } @@ -326,16 +225,46 @@ func (c *PageCollections) getContentNode(context page.Page, isReflink bool, ref return nil, nil } - // Ref/relref supports this potentially ambiguous lookup. - return getByName(path.Base(name)) + n = c.pageMap.pageReverseIndex.Get(refPath.BaseNameNoIdentifier()) + if n == ambiguousContentNode { + return nil, fmt.Errorf("page reference %q is ambiguous", inRef) + } + + return n, nil } -func (*PageCollections) findPagesByKindIn(kind string, inPages page.Pages) page.Pages { - var pages page.Pages - for _, p := range inPages { - if p.Kind() == kind { - pages = append(pages, p) +func (c *pageFinder) getContentNodeFromRefReverseLookup(ref string, fi hugofs.FileMetaInfo) (contentNodeI, error) { + s := c.pageMap.s + meta := fi.Meta() + dir := meta.Filename + if !fi.IsDir() { + dir = filepath.Dir(meta.Filename) + } + + realFilename := filepath.Join(dir, ref) + + pcs, err := s.BaseFs.Content.ReverseLookup(realFilename) + if err != nil { + return nil, err + } + + // There may be multiple matches, but we will only use the first one. + for _, pc := range pcs { + pi := s.Conf.PathParser().Parse(pc.Component, pc.Path) + if n := c.pageMap.treePages.Get(pi.Base()); n != nil { + return n, nil } } - return pages + return nil, nil +} + +func (c *pageFinder) getContentNodeFromPath(refPath *paths.Path, ref string) (contentNodeI, error) { + s := refPath.Base() + + n := c.pageMap.treePages.Get(s) + if n != nil { + return n, nil + } + + return nil, nil } diff --git a/hugolib/pagecollections_test.go b/hugolib/pagecollections_test.go index b11fc9899..8fd4f0739 100644 --- a/hugolib/pagecollections_test.go +++ b/hugolib/pagecollections_test.go @@ -63,12 +63,12 @@ func BenchmarkGetPage(b *testing.B) { b.ResetTimer() for i := 0; i < b.N; i++ { - home, _ := s.getPageNew(nil, "/") + home, _ := s.getPage(nil, "/") if home == nil { b.Fatal("Home is nil") } - p, _ := s.getPageNew(nil, pagePaths[i]) + p, _ := s.getPage(nil, pagePaths[i]) if p == nil { b.Fatal("Section is nil") } @@ -107,7 +107,7 @@ func TestBenchmarkGetPageRegular(t *testing.T) { for i := 0; i < 10; i++ { pp := path.Join("/", fmt.Sprintf("sect%d", i), fmt.Sprintf("page%d.md", i)) - page, _ := s.getPageNew(nil, pp) + page, _ := s.getPage(nil, pp) c.Assert(page, qt.Not(qt.IsNil), qt.Commentf(pp)) } } @@ -127,7 +127,7 @@ func BenchmarkGetPageRegular(b *testing.B) { b.ResetTimer() for i := 0; i < b.N; i++ { - page, _ := s.getPageNew(nil, pagePaths[i]) + page, _ := s.getPage(nil, pagePaths[i]) c.Assert(page, qt.Not(qt.IsNil)) } }) @@ -147,7 +147,7 @@ func BenchmarkGetPageRegular(b *testing.B) { b.ResetTimer() for i := 0; i < b.N; i++ { - page, _ := s.getPageNew(pages[i], pagePaths[i]) + page, _ := s.getPage(pages[i], pagePaths[i]) c.Assert(page, qt.Not(qt.IsNil)) } }) @@ -226,7 +226,7 @@ func TestGetPage(t *testing.T) { s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Configs: configs}, BuildCfg{SkipRender: true}) - sec3, err := s.getPageNew(nil, "/sect3") + sec3, err := s.getPage(nil, "/sect3") c.Assert(err, qt.IsNil) c.Assert(sec3, qt.Not(qt.IsNil)) @@ -313,15 +313,36 @@ func TestGetPage(t *testing.T) { } } - // test new internal Site.getPageNew + // test new internal Site.getPage for _, ref := range test.pathVariants { - page2, err := s.getPageNew(test.context, ref) + page2, err := s.getPage(test.context, ref) test.check(page2, err, errorMsg, c) } }) } } +// #11664 +func TestGetPageIndexIndex(t *testing.T) { + files := ` +-- hugo.toml -- +disableKinds = ["taxonomy", "term"] +-- content/mysect/index/index.md -- +--- +title: "Mysect Index" +--- +-- layouts/index.html -- +GetPage 1: {{ with site.GetPage "mysect/index/index.md" }}{{ .Title }}|{{ .RelPermalink }}|{{ .Path }}{{ end }}| +GetPage 2: {{ with site.GetPage "mysect/index" }}{{ .Title }}|{{ .RelPermalink }}|{{ .Path }}{{ end }}| +` + + b := Test(t, files) + b.AssertFileContent("public/index.html", + "GetPage 1: Mysect Index|/mysect/index/|/mysect/index|", + "GetPage 2: Mysect Index|/mysect/index/|/mysect/index|", + ) +} + // https://github.com/gohugoio/hugo/issues/6034 func TestGetPageRelative(t *testing.T) { b := newTestSitesBuilder(t) @@ -348,6 +369,172 @@ NOT FOUND b.AssertFileContent("public/who/index.html", `NOT FOUND`) } +func TestGetPageIssue11883(t *testing.T) { + files := ` +-- hugo.toml -- +-- p1/index.md -- +--- +title: p1 +--- +-- p1/p1.xyz -- +xyz. +-- layouts/index.html -- +Home. {{ with .Page.GetPage "p1.xyz" }}{{ else }}OK 1{{ end }} {{ with .Site.GetPage "p1.xyz" }}{{ else }}OK 2{{ end }} +` + + b := Test(t, files) + b.AssertFileContent("public/index.html", "Home. OK 1 OK 2") +} + +func TestGetPageBundleToRegular(t *testing.T) { + files := ` +-- hugo.toml -- +-- content/s1/p1/index.md -- +--- +title: p1 +--- +-- content/s1/p2.md -- +--- +title: p2 +--- +-- layouts/_default/single.html -- +{{ with .GetPage "p2" }} + OK: {{ .LinkTitle }} +{{ else }} + Unable to get p2. +{{ end }} +` + + b := Test(t, files) + b.AssertFileContent("public/s1/p1/index.html", "OK: p2") + b.AssertFileContent("public/s1/p2/index.html", "OK: p2") +} + +func TestPageGetPageVariations(t *testing.T) { + files := ` +-- hugo.toml -- +-- content/s1/p1/index.md -- +--- +title: p1 +--- +-- content/s1/p2.md -- +--- +title: p2 +--- +-- content/s2/p3/index.md -- +--- +title: p3 +--- +-- content/p2.md -- +--- +title: p2_root +--- +-- layouts/index.html -- +/s1/p2.md: {{ with .GetPage "/s1/p2.md" }}{{ .Title }}{{ end }}| +/s1/p2: {{ with .GetPage "/s1/p2" }}{{ .Title }}{{ end }}| +/s1/p1/index.md: {{ with .GetPage "/s1/p1/index.md" }}{{ .Title }}{{ end }}| +/s1/p1: {{ with .GetPage "/s1/p1" }}{{ .Title }}{{ end }}| +-- layouts/_default/single.html -- +../p2: {{ with .GetPage "../p2" }}{{ .Title }}{{ end }}| +../p2.md: {{ with .GetPage "../p2.md" }}{{ .Title }}{{ end }}| +p1/index.md: {{ with .GetPage "p1/index.md" }}{{ .Title }}{{ end }}| +../s2/p3/index.md: {{ with .GetPage "../s2/p3/index.md" }}{{ .Title }}{{ end }}| +` + + b := Test(t, files) + + b.AssertFileContent("public/index.html", ` +/s1/p2.md: p2| +/s1/p2: p2| +/s1/p1/index.md: p1| +/s1/p1: p1| +`) + + b.AssertFileContent("public/s1/p1/index.html", ` +../p2: p2_root| +../p2.md: p2| + +`) + + b.AssertFileContent("public/s1/p2/index.html", ` +../p2: p2_root| +../p2.md: p2_root| +p1/index.md: p1| +../s2/p3/index.md: p3| + +`) +} + +func TestPageGetPageMountsReverseLookup(t *testing.T) { + tempDir := t.TempDir() + + files := ` +-- README.md -- +--- +title: README +--- +-- blog/b1.md -- +--- +title: b1 +--- +{{< ref "../docs/d1.md" >}} +-- blog/b2/index.md -- +--- +title: b2 +--- +{{< ref "../../docs/d1.md" >}} +-- docs/d1.md -- +--- +title: d1 +--- +-- hugo.toml -- +baseURL = "https://example.com/" +[module] +[[module.mounts]] +source = "layouts" +target = "layouts" +[[module.mounts]] +source = "README.md" +target = "content/_index.md" +[[module.mounts]] +source = "blog" +target = "content/posts" +[[module.mounts]] +source = "docs" +target = "content/mydocs" +-- layouts/shortcodes/ref.html -- +{{ $ref := .Get 0 }} +.Page.GetPage({{ $ref }}).Title: {{ with .Page.GetPage $ref }}{{ .Title }}{{ end }}| +-- layouts/index.html -- +Home. +/blog/b1.md: {{ with .GetPage "/blog/b1.md" }}{{ .Title }}{{ end }}| +/blog/b2/index.md: {{ with .GetPage "/blog/b2/index.md" }}{{ .Title }}{{ end }}| +/docs/d1.md: {{ with .GetPage "/docs/d1.md" }}{{ .Title }}{{ end }}| +/README.md: {{ with .GetPage "/README.md" }}{{ .Title }}{{ end }}| +-- layouts/_default/single.html -- +Single. +/README.md: {{ with .GetPage "/README.md" }}{{ .Title }}{{ end }}| +{{ .Content }} + + +` + b := Test(t, files, TestOptWithConfig(func(cfg *IntegrationTestConfig) { cfg.WorkingDir = tempDir })) + + b.AssertFileContent("public/index.html", + ` +/blog/b1.md: b1| +/blog/b2/index.md: b2| +/docs/d1.md: d1| +/README.md: README +`, + ) + + b.AssertFileContent("public/mydocs/d1/index.html", `README.md: README|`) + + b.AssertFileContent("public/posts/b1/index.html", `.Page.GetPage(../docs/d1.md).Title: d1|`) + b.AssertFileContent("public/posts/b2/index.html", `.Page.GetPage(../../docs/d1.md).Title: d1|`) +} + // https://github.com/gohugoio/hugo/issues/7016 func TestGetPageMultilingual(t *testing.T) { b := newTestSitesBuilder(t) @@ -386,15 +573,6 @@ NOT FOUND b.AssertFileContent("public/en/index.html", `NOT FOUND`) } -func TestShouldDoSimpleLookup(t *testing.T) { - c := qt.New(t) - - c.Assert(shouldDoSimpleLookup("foo.md"), qt.Equals, true) - c.Assert(shouldDoSimpleLookup("/foo.md"), qt.Equals, true) - c.Assert(shouldDoSimpleLookup("./foo.md"), qt.Equals, false) - c.Assert(shouldDoSimpleLookup("docs/foo.md"), qt.Equals, false) -} - func TestRegularPagesRecursive(t *testing.T) { b := newTestSitesBuilder(t) @@ -449,5 +627,4 @@ RegularPagesRecursive: {{ range .RegularPagesRecursive }}{{ .Kind }}:{{ .RelPerm }).Build() b.AssertFileContent("public/index.html", `RegularPagesRecursive: page:/p1/|page:/post/p2/||End.`) - } diff --git a/hugolib/pages_capture.go b/hugolib/pages_capture.go index c57c707de..acdc674e6 100644 --- a/hugolib/pages_capture.go +++ b/hugolib/pages_capture.go @@ -1,4 +1,4 @@ -// Copyright 2019 The Hugo Authors. All rights reserved. +// Copyright 2021 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -15,190 +15,188 @@ package hugolib import ( "context" + "errors" "fmt" - pth "path" + "os" "path/filepath" - "reflect" - - "github.com/gohugoio/hugo/common/herrors" - "github.com/gohugoio/hugo/common/loggers" - "github.com/gohugoio/hugo/common/maps" + "strings" + "sync" + "sync/atomic" + "time" + "github.com/bep/logg" + "github.com/gohugoio/hugo/common/paths" + "github.com/gohugoio/hugo/common/rungroup" + "github.com/gohugoio/hugo/helpers" "github.com/gohugoio/hugo/parser/pageparser" - - "github.com/gohugoio/hugo/hugofs/files" + "github.com/spf13/afero" "github.com/gohugoio/hugo/source" + "github.com/gohugoio/hugo/common/loggers" "github.com/gohugoio/hugo/hugofs" - "github.com/spf13/afero" -) - -const ( - walkIsRootFileMetaKey = "walkIsRootFileMetaKey" ) func newPagesCollector( + ctx context.Context, + h *HugoSites, sp *source.SourceSpec, - contentMap *pageMaps, logger loggers.Logger, - contentTracker *contentChangeMap, - proc pagesCollectorProcessorProvider, filenames ...string) *pagesCollector { + infoLogger logg.LevelLogger, + m *pageMap, + ids []pathChange, +) *pagesCollector { return &pagesCollector{ - fs: sp.SourceFs, - contentMap: contentMap, - proc: proc, + ctx: ctx, + h: h, + fs: sp.BaseFs.Content.Fs, + m: m, sp: sp, logger: logger, - filenames: filenames, - tracker: contentTracker, + infoLogger: infoLogger, + ids: ids, + seenDirs: make(map[string]bool), } } -type contentDirKey struct { - dirname string - filename string - tp bundleDirType -} - -type fileinfoBundle struct { - header hugofs.FileMetaInfo - resources []hugofs.FileMetaInfo -} - -func (b *fileinfoBundle) containsResource(name string) bool { - for _, r := range b.resources { - if r.Name() == name { - return true - } - } - - return false -} - -type pageBundles map[string]*fileinfoBundle - type pagesCollector struct { - sp *source.SourceSpec - fs afero.Fs - logger loggers.Logger + ctx context.Context + h *HugoSites + sp *source.SourceSpec + logger loggers.Logger + infoLogger logg.LevelLogger - contentMap *pageMaps + m *pageMap - // Ordered list (bundle headers first) used in partial builds. - filenames []string + fs afero.Fs - // Content files tracker used in partial builds. - tracker *contentChangeMap + // List of paths that have changed. Used in partial builds. + ids []pathChange + seenDirs map[string]bool - proc pagesCollectorProcessorProvider + g rungroup.Group[hugofs.FileMetaInfo] } -// isCascadingEdit returns whether the dir represents a cascading edit. -// That is, if a front matter cascade section is removed, added or edited. -// If this is the case we must re-evaluate its descendants. -func (c *pagesCollector) isCascadingEdit(dir contentDirKey) (bool, string) { - // This is either a section or a taxonomy node. Find it. - prefix := cleanTreeKey(dir.dirname) +func (c *pagesCollector) copyFile(fim hugofs.FileMetaInfo) error { + meta := fim.Meta() + f, err := meta.Open() + if err != nil { + return fmt.Errorf("copyFile: failed to open: %w", err) + } - section := "/" - var isCascade bool + s := c.m.s - c.contentMap.walkBranchesPrefix(prefix, func(s string, n *contentNode) bool { - if n.fi == nil || dir.filename != n.fi.Meta().Filename { - return false - } + target := filepath.Join(s.PathSpec.GetTargetLanguageBasePath(), meta.PathInfo.Path()) - f, err := n.fi.Meta().Open() - if err != nil { - // File may have been removed, assume a cascading edit. - // Some false positives is not too bad. - isCascade = true - return true - } + defer f.Close() - pf, err := pageparser.ParseFrontMatterAndContent(f) - f.Close() - if err != nil { - isCascade = true - return true - } + fs := s.PublishFsStatic - if n.p == nil || n.p.bucket == nil { - return true - } + s.PathSpec.ProcessingStats.Incr(&s.PathSpec.ProcessingStats.Files) - section = s - - maps.PrepareParams(pf.FrontMatter) - cascade1, ok := pf.FrontMatter["cascade"] - hasCascade := n.p.bucket.cascade != nil && len(n.p.bucket.cascade) > 0 - if !ok { - isCascade = hasCascade - - return true - } - - if !hasCascade { - isCascade = true - return true - } - - for _, v := range n.p.bucket.cascade { - isCascade = !reflect.DeepEqual(cascade1, v) - if isCascade { - break - } - } - - return true - }) - - return isCascade, section + return helpers.WriteToDisk(filepath.Clean(target), f, fs) } -// Collect. +// Collect collects content by walking the file system and storing +// it in the content tree. +// It may be restricted by filenames set on the collector (partial build). func (c *pagesCollector) Collect() (collectErr error) { - c.proc.Start(context.Background()) - defer func() { - err := c.proc.Wait() - if collectErr == nil { - collectErr = err + var ( + numWorkers = c.h.numWorkers + numFilesProcessedTotal atomic.Uint64 + numFilesProcessedLast uint64 + fileBatchTimer = time.Now() + fileBatchTimerMu sync.Mutex + ) + + l := c.infoLogger.WithField("substep", "collect") + + logFilesProcessed := func(force bool) { + fileBatchTimerMu.Lock() + if force || time.Since(fileBatchTimer) > 3*time.Second { + numFilesProcessedBatch := numFilesProcessedTotal.Load() - numFilesProcessedLast + numFilesProcessedLast = numFilesProcessedTotal.Load() + loggers.TimeTrackf(l, fileBatchTimer, + logg.Fields{ + logg.Field{Name: "files", Value: numFilesProcessedBatch}, + logg.Field{Name: "files_total", Value: numFilesProcessedTotal.Load()}, + }, + "", + ) + fileBatchTimer = time.Now() } + fileBatchTimerMu.Unlock() + } + + defer func() { + logFilesProcessed(true) }() - if len(c.filenames) == 0 { - // Collect everything. - collectErr = c.collectDir("", false, nil) - } else { - for _, pm := range c.contentMap.pmaps { - pm.cfg.isRebuild = true - } - dirs := make(map[contentDirKey]bool) - for _, filename := range c.filenames { - dir, btype := c.tracker.resolveAndRemove(filename) - dirs[contentDirKey{dir, filename, btype}] = true - } - - for dir := range dirs { - for _, pm := range c.contentMap.pmaps { - pm.s.ResourceSpec.DeleteBySubstring(dir.dirname) - } - - switch dir.tp { - case bundleLeaf: - collectErr = c.collectDir(dir.dirname, true, nil) - case bundleBranch: - isCascading, section := c.isCascadingEdit(dir) - - if isCascading { - c.contentMap.deleteSection(section) + c.g = rungroup.Run[hugofs.FileMetaInfo](c.ctx, rungroup.Config[hugofs.FileMetaInfo]{ + NumWorkers: numWorkers, + Handle: func(ctx context.Context, fi hugofs.FileMetaInfo) error { + if err := c.m.AddFi(fi); err != nil { + if errors.Is(err, pageparser.ErrPlainHTMLDocumentsNotSupported) { + // Reclassify this as a static file. + if err := c.copyFile(fi); err != nil { + return err + } + } else { + return hugofs.AddFileInfoToError(err, fi, c.fs) } - collectErr = c.collectDir(dir.dirname, !isCascading, nil) - default: + } + numFilesProcessedTotal.Add(1) + if numFilesProcessedTotal.Load()%1000 == 0 { + logFilesProcessed(false) + } + return nil + }, + }) + + if c.ids == nil { + // Collect everything. + collectErr = c.collectDir(nil, false, nil) + } else { + for _, s := range c.h.Sites { + s.pageMap.cfg.isRebuild = true + } + + for _, id := range c.ids { + if id.p.IsLeafBundle() { + collectErr = c.collectDir( + id.p, + false, + func(fim hugofs.FileMetaInfo) bool { + return true + }, + ) + } else if id.p.IsBranchBundle() { + collectErr = c.collectDir( + id.p, + false, + func(fim hugofs.FileMetaInfo) bool { + if fim.IsDir() { + return true + } + fimp := fim.Meta().PathInfo + if fimp == nil { + return false + } + + return strings.HasPrefix(fimp.Path(), paths.AddTrailingSlash(id.p.Dir())) + }, + ) + } else { // We always start from a directory. - collectErr = c.collectDir(dir.dirname, true, func(fim hugofs.FileMetaInfo) bool { - return dir.filename == fim.Meta().Filename + collectErr = c.collectDir(id.p, id.isDir, func(fim hugofs.FileMetaInfo) bool { + if id.delete || id.isDir { + if id.isDir { + return strings.HasPrefix(fim.Meta().PathInfo.Path(), paths.AddTrailingSlash(id.p.Path())) + } + + return id.p.Dir() == fim.Meta().PathInfo.Dir() + } + return id.p.Path() == fim.Meta().PathInfo.Path() }) } @@ -209,160 +207,51 @@ func (c *pagesCollector) Collect() (collectErr error) { } + werr := c.g.Wait() + if collectErr == nil { + collectErr = werr + } + return } -func (c *pagesCollector) isBundleHeader(fi hugofs.FileMetaInfo) bool { - class := fi.Meta().Classifier - return class == files.ContentClassLeaf || class == files.ContentClassBranch -} - -func (c *pagesCollector) getLang(fi hugofs.FileMetaInfo) string { - lang := fi.Meta().Lang - if lang != "" { - return lang - } - return c.sp.Cfg.DefaultContentLanguage() -} - -func (c *pagesCollector) addToBundle(info hugofs.FileMetaInfo, btyp bundleDirType, bundles pageBundles) error { - getBundle := func(lang string) *fileinfoBundle { - return bundles[lang] - } - - cloneBundle := func(lang string) *fileinfoBundle { - // Every bundled content file needs a content file header. - // Use the default content language if found, else just - // pick one. - var ( - source *fileinfoBundle - found bool - ) - - source, found = bundles[c.sp.Cfg.DefaultContentLanguage()] - if !found { - for _, b := range bundles { - source = b - break - } - } - - if source == nil { - panic(fmt.Sprintf("no source found, %d", len(bundles))) - } - - clone := c.cloneFileInfo(source.header) - clone.Meta().Lang = lang - - return &fileinfoBundle{ - header: clone, - } - } - - lang := c.getLang(info) - bundle := getBundle(lang) - isBundleHeader := c.isBundleHeader(info) - if bundle != nil && isBundleHeader { - // index.md file inside a bundle, see issue 6208. - info.Meta().Classifier = files.ContentClassContent - isBundleHeader = false - } - classifier := info.Meta().Classifier - isContent := classifier == files.ContentClassContent - if bundle == nil { - if isBundleHeader { - bundle = &fileinfoBundle{header: info} - bundles[lang] = bundle +func (c *pagesCollector) collectDir(dirPath *paths.Path, isDir bool, inFilter func(fim hugofs.FileMetaInfo) bool) error { + var dpath string + if dirPath != nil { + if isDir { + dpath = filepath.FromSlash(dirPath.Path()) } else { - if btyp == bundleBranch { - // No special logic for branch bundles. - // Every language needs its own _index.md file. - // Also, we only clone bundle headers for lonesome, bundled, - // content files. - return c.handleFiles(info) - } - - if isContent { - bundle = cloneBundle(lang) - bundles[lang] = bundle - } + dpath = filepath.FromSlash(dirPath.Dir()) } } - if !isBundleHeader && bundle != nil { - bundle.resources = append(bundle.resources, info) + if c.seenDirs[dpath] { + return nil } + c.seenDirs[dpath] = true - if classifier == files.ContentClassFile { - translations := info.Meta().Translations - - for lang, b := range bundles { - if !stringSliceContains(lang, translations...) && !b.containsResource(info.Name()) { - - // Clone and add it to the bundle. - clone := c.cloneFileInfo(info) - clone.Meta().Lang = lang - b.resources = append(b.resources, clone) - } - } - } - - return nil -} - -func (c *pagesCollector) cloneFileInfo(fi hugofs.FileMetaInfo) hugofs.FileMetaInfo { - return hugofs.NewFileMetaInfo(fi, hugofs.NewFileMeta()) -} - -func (c *pagesCollector) collectDir(dirname string, partial bool, inFilter func(fim hugofs.FileMetaInfo) bool) error { - fi, err := c.fs.Stat(dirname) + root, err := c.fs.Stat(dpath) if err != nil { - if herrors.IsNotExist(err) { - // May have been deleted. + if os.IsNotExist(err) { return nil } return err } - handleDir := func( - btype bundleDirType, - dir hugofs.FileMetaInfo, - path string, - readdir []hugofs.FileMetaInfo) error { - if btype > bundleNot && c.tracker != nil { - c.tracker.add(path, btype) - } + rootm := root.(hugofs.FileMetaInfo) - if btype == bundleBranch { - if err := c.handleBundleBranch(readdir); err != nil { - return err - } - // A branch bundle is only this directory level, so keep walking. - return nil - } else if btype == bundleLeaf { - if err := c.handleBundleLeaf(dir, path, readdir); err != nil { - return err - } - - return nil - } - - if err := c.handleFiles(readdir...); err != nil { - return err - } - - return nil + if err := c.collectDirDir(dpath, rootm, inFilter); err != nil { + return err } - filter := func(fim hugofs.FileMetaInfo) bool { - if fim.Meta().SkipDir { - return false - } + return nil +} +func (c *pagesCollector) collectDirDir(path string, root hugofs.FileMetaInfo, inFilter func(fim hugofs.FileMetaInfo) bool) error { + filter := func(fim hugofs.FileMetaInfo) bool { if c.sp.IgnoreFile(fim.Meta().Filename) { return false } - if inFilter != nil { return inFilter(fim) } @@ -370,210 +259,120 @@ func (c *pagesCollector) collectDir(dirname string, partial bool, inFilter func( } preHook := func(dir hugofs.FileMetaInfo, path string, readdir []hugofs.FileMetaInfo) ([]hugofs.FileMetaInfo, error) { - var btype bundleDirType - filtered := readdir[:0] for _, fi := range readdir { if filter(fi) { filtered = append(filtered, fi) - - if c.tracker != nil { - // Track symlinks. - c.tracker.addSymbolicLinkMapping(fi) - } } } - walkRoot := dir.Meta().IsRootFile readdir = filtered + if len(readdir) == 0 { + return nil, nil + } - // We merge language directories, so there can be duplicates, but they - // will be ordered, most important first. - var duplicates []int - seen := make(map[string]bool) + // Pick the first regular file. + var first hugofs.FileMetaInfo + for _, fi := range readdir { + if fi.IsDir() { + continue + } + first = fi + break + } - for i, fi := range readdir { + if first == nil { + // Only dirs, keep walking. + return readdir, nil + } + // Any bundle file will always be first. + firstPi := first.Meta().PathInfo + if firstPi == nil { + panic(fmt.Sprintf("collectDirDir: no path info for %q", first.Meta().Filename)) + } + + if firstPi.IsLeafBundle() { + if err := c.handleBundleLeaf(dir, first, path, readdir); err != nil { + return nil, err + } + return nil, filepath.SkipDir + } + + for _, fi := range readdir { if fi.IsDir() { continue } meta := fi.Meta() - meta.IsRootFile = walkRoot - class := meta.Classifier - translationBase := meta.TranslationBaseNameWithExt - key := pth.Join(meta.Lang, translationBase) - - if seen[key] { - duplicates = append(duplicates, i) - continue - } - seen[key] = true - - var thisBtype bundleDirType - - switch class { - case files.ContentClassLeaf: - thisBtype = bundleLeaf - case files.ContentClassBranch: - thisBtype = bundleBranch + pi := meta.PathInfo + if pi == nil { + panic(fmt.Sprintf("no path info for %q", meta.Filename)) } - // Folders with both index.md and _index.md type of files have - // undefined behaviour and can never work. - // The branch variant will win because of sort order, but log - // a warning about it. - if thisBtype > bundleNot && btype > bundleNot && thisBtype != btype { - c.logger.Warnf("Content directory %q have both index.* and _index.* files, pick one.", dir.Meta().Filename) - // Reclassify it so it will be handled as a content file inside the - // section, which is in line with the <= 0.55 behaviour. - meta.Classifier = files.ContentClassContent - } else if thisBtype > bundleNot { - btype = thisBtype + if meta.Lang == "" { + panic("lang not set") } - } - - if len(duplicates) > 0 { - for i := len(duplicates) - 1; i >= 0; i-- { - idx := duplicates[i] - readdir = append(readdir[:idx], readdir[idx+1:]...) + if err := c.g.Enqueue(fi); err != nil { + return nil, err } } - err := handleDir(btype, dir, path, readdir) - if err != nil { - return nil, err - } - - if btype == bundleLeaf || partial { - return nil, filepath.SkipDir - } - // Keep walking. return readdir, nil } var postHook hugofs.WalkHook - if c.tracker != nil { - postHook = func(dir hugofs.FileMetaInfo, path string, readdir []hugofs.FileMetaInfo) ([]hugofs.FileMetaInfo, error) { - if c.tracker == nil { - // Nothing to do. - return readdir, nil - } - - return readdir, nil - } - } - - wfn := func(path string, info hugofs.FileMetaInfo, err error) error { - if err != nil { - return err - } + wfn := func(path string, fi hugofs.FileMetaInfo) error { return nil } - fim := fi.(hugofs.FileMetaInfo) - // Make sure the pages in this directory gets re-rendered, - // even in fast render mode. - fim.Meta().IsRootFile = true - - w := hugofs.NewWalkway(hugofs.WalkwayConfig{ - Fs: c.fs, - Logger: c.logger, - Root: dirname, - Info: fim, - HookPre: preHook, - HookPost: postHook, - WalkFn: wfn, - }) + w := hugofs.NewWalkway( + hugofs.WalkwayConfig{ + Logger: c.logger, + Root: path, + Info: root, + Fs: c.fs, + HookPre: preHook, + HookPost: postHook, + WalkFn: wfn, + }) return w.Walk() } -func (c *pagesCollector) handleBundleBranch(readdir []hugofs.FileMetaInfo) error { - // Maps bundles to its language. - bundles := pageBundles{} - - var contentFiles []hugofs.FileMetaInfo - - for _, fim := range readdir { - - if fim.IsDir() { - continue - } - - meta := fim.Meta() - - switch meta.Classifier { - case files.ContentClassContent: - contentFiles = append(contentFiles, fim) - default: - if err := c.addToBundle(fim, bundleBranch, bundles); err != nil { - return err - } - } - - } - - // Make sure the section is created before its pages. - if err := c.proc.Process(bundles); err != nil { - return err - } - - return c.handleFiles(contentFiles...) -} - -func (c *pagesCollector) handleBundleLeaf(dir hugofs.FileMetaInfo, path string, readdir []hugofs.FileMetaInfo) error { - // Maps bundles to its language. - bundles := pageBundles{} - - walk := func(path string, info hugofs.FileMetaInfo, err error) error { - if err != nil { - return err - } +func (c *pagesCollector) handleBundleLeaf(dir, bundle hugofs.FileMetaInfo, inPath string, readdir []hugofs.FileMetaInfo) error { + bundlePi := bundle.Meta().PathInfo + walk := func(path string, info hugofs.FileMetaInfo) error { if info.IsDir() { return nil } - return c.addToBundle(info, bundleLeaf, bundles) + pi := info.Meta().PathInfo + + if info != bundle { + // Everything inside a leaf bundle is a Resource, + // even the content pages. + // Note that we do allow index.md as page resources, but not in the bundle root. + if !pi.IsLeafBundle() || pi.Dir() != bundlePi.Dir() { + paths.ModifyPathBundleTypeResource(pi) + } + } + + return c.g.Enqueue(info) } // Start a new walker from the given path. - w := hugofs.NewWalkway(hugofs.WalkwayConfig{ - Root: path, - Fs: c.fs, - Logger: c.logger, - Info: dir, - DirEntries: readdir, - WalkFn: walk, - }) + w := hugofs.NewWalkway( + hugofs.WalkwayConfig{ + Root: inPath, + Fs: c.fs, + Logger: c.logger, + Info: dir, + DirEntries: readdir, + WalkFn: walk, + }) - if err := w.Walk(); err != nil { - return err - } - - return c.proc.Process(bundles) -} - -func (c *pagesCollector) handleFiles(fis ...hugofs.FileMetaInfo) error { - for _, fi := range fis { - if fi.IsDir() { - continue - } - - if err := c.proc.Process(fi); err != nil { - return err - } - } - return nil -} - -func stringSliceContains(k string, values ...string) bool { - for _, v := range values { - if k == v { - return true - } - } - return false + return w.Walk() } diff --git a/hugolib/pages_capture_test.go b/hugolib/pages_capture_test.go deleted file mode 100644 index c771d30ee..000000000 --- a/hugolib/pages_capture_test.go +++ /dev/null @@ -1,78 +0,0 @@ -// Copyright 2019 The Hugo Authors. All rights reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package hugolib - -import ( - "context" - "fmt" - "path/filepath" - "testing" - - qt "github.com/frankban/quicktest" - "github.com/gohugoio/hugo/common/loggers" - "github.com/gohugoio/hugo/config" - "github.com/gohugoio/hugo/config/testconfig" - "github.com/gohugoio/hugo/source" - "github.com/spf13/afero" -) - -func TestPagesCapture(t *testing.T) { - - c := qt.New(t) - - afs := afero.NewMemMapFs() - - writeFile := func(filename string) { - c.Assert(afero.WriteFile(afs, filepath.Join("content", filepath.FromSlash(filename)), []byte(fmt.Sprintf("content-%s", filename)), 0755), qt.IsNil) - } - - writeFile("_index.md") - writeFile("logo.png") - writeFile("root.md") - writeFile("blog/index.md") - writeFile("blog/hello.md") - writeFile("blog/images/sunset.png") - writeFile("pages/page1.md") - writeFile("pages/page2.md") - - cfg := config.New() - d := testconfig.GetTestDeps(afs, cfg) - sourceSpec := source.NewSourceSpec(d.PathSpec, nil, d.BaseFs.Content.Fs) - - t.Run("Collect", func(t *testing.T) { - c := qt.New(t) - proc := &testPagesCollectorProcessor{} - coll := newPagesCollector(sourceSpec, nil, loggers.NewDefault(), nil, proc) - c.Assert(coll.Collect(), qt.IsNil) - // 2 bundles, 3 pages. - c.Assert(len(proc.items), qt.Equals, 5) - }) - -} - -type testPagesCollectorProcessor struct { - items []any - waitErr error -} - -func (proc *testPagesCollectorProcessor) Process(item any) error { - proc.items = append(proc.items, item) - return nil -} - -func (proc *testPagesCollectorProcessor) Start(ctx context.Context) context.Context { - return ctx -} - -func (proc *testPagesCollectorProcessor) Wait() error { return proc.waitErr } diff --git a/hugolib/pages_language_merge_test.go b/hugolib/pages_language_merge_test.go index 55241d306..8a5d6c184 100644 --- a/hugolib/pages_language_merge_test.go +++ b/hugolib/pages_language_merge_test.go @@ -70,8 +70,8 @@ func TestMergeLanguages(t *testing.T) { c.Assert(len(firstNN.Sites()), qt.Equals, 4) c.Assert(firstNN.Sites().First().Language().Lang, qt.Equals, "en") - nnBundle := nnSite.getPage("page", "bundle") - enBundle := enSite.getPage("page", "bundle") + nnBundle := nnSite.getPageOldVersion("page", "bundle") + enBundle := enSite.getPageOldVersion("page", "bundle") c.Assert(len(enBundle.Resources()), qt.Equals, 6) c.Assert(len(nnBundle.Resources()), qt.Equals, 2) diff --git a/hugolib/pages_process.go b/hugolib/pages_process.go deleted file mode 100644 index b0c04244b..000000000 --- a/hugolib/pages_process.go +++ /dev/null @@ -1,203 +0,0 @@ -// Copyright 2019 The Hugo Authors. All rights reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package hugolib - -import ( - "context" - "fmt" - "path/filepath" - - "github.com/gohugoio/hugo/config" - "github.com/gohugoio/hugo/source" - - "github.com/gohugoio/hugo/hugofs/files" - "golang.org/x/sync/errgroup" - - "github.com/gohugoio/hugo/common/herrors" - "github.com/gohugoio/hugo/hugofs" -) - -func newPagesProcessor(h *HugoSites, sp *source.SourceSpec) *pagesProcessor { - procs := make(map[string]pagesCollectorProcessorProvider) - for _, s := range h.Sites { - procs[s.Lang()] = &sitePagesProcessor{ - m: s.pageMap, - errorSender: s.h, - itemChan: make(chan interface{}, config.GetNumWorkerMultiplier()*2), - } - } - return &pagesProcessor{ - procs: procs, - } -} - -type pagesCollectorProcessorProvider interface { - Process(item any) error - Start(ctx context.Context) context.Context - Wait() error -} - -type pagesProcessor struct { - // Per language/Site - procs map[string]pagesCollectorProcessorProvider -} - -func (proc *pagesProcessor) Process(item any) error { - switch v := item.(type) { - // Page bundles mapped to their language. - case pageBundles: - for _, vv := range v { - proc.getProcFromFi(vv.header).Process(vv) - } - case hugofs.FileMetaInfo: - proc.getProcFromFi(v).Process(v) - default: - panic(fmt.Sprintf("unrecognized item type in Process: %T", item)) - - } - - return nil -} - -func (proc *pagesProcessor) Start(ctx context.Context) context.Context { - for _, p := range proc.procs { - ctx = p.Start(ctx) - } - return ctx -} - -func (proc *pagesProcessor) Wait() error { - var err error - for _, p := range proc.procs { - if e := p.Wait(); e != nil { - err = e - } - } - return err -} - -func (proc *pagesProcessor) getProcFromFi(fi hugofs.FileMetaInfo) pagesCollectorProcessorProvider { - if p, found := proc.procs[fi.Meta().Lang]; found { - return p - } - return defaultPageProcessor -} - -type nopPageProcessor int - -func (nopPageProcessor) Process(item any) error { - return nil -} - -func (nopPageProcessor) Start(ctx context.Context) context.Context { - return context.Background() -} - -func (nopPageProcessor) Wait() error { - return nil -} - -var defaultPageProcessor = new(nopPageProcessor) - -type sitePagesProcessor struct { - m *pageMap - errorSender herrors.ErrorSender - - ctx context.Context - itemChan chan any - itemGroup *errgroup.Group -} - -func (p *sitePagesProcessor) Process(item any) error { - select { - case <-p.ctx.Done(): - return nil - default: - p.itemChan <- item - } - return nil -} - -func (p *sitePagesProcessor) Start(ctx context.Context) context.Context { - p.itemGroup, ctx = errgroup.WithContext(ctx) - p.ctx = ctx - p.itemGroup.Go(func() error { - for item := range p.itemChan { - if err := p.doProcess(item); err != nil { - return err - } - } - return nil - }) - return ctx -} - -func (p *sitePagesProcessor) Wait() error { - close(p.itemChan) - return p.itemGroup.Wait() -} - -func (p *sitePagesProcessor) copyFile(fim hugofs.FileMetaInfo) error { - meta := fim.Meta() - f, err := meta.Open() - if err != nil { - return fmt.Errorf("copyFile: failed to open: %w", err) - } - - s := p.m.s - - target := filepath.Join(s.PathSpec.GetTargetLanguageBasePath(), meta.Path) - - defer f.Close() - - fs := s.PublishFsStatic - - return s.publish(&s.PathSpec.ProcessingStats.Files, target, f, fs) -} - -func (p *sitePagesProcessor) doProcess(item any) error { - m := p.m - switch v := item.(type) { - case *fileinfoBundle: - if err := m.AddFilesBundle(v.header, v.resources...); err != nil { - return err - } - case hugofs.FileMetaInfo: - if p.shouldSkip(v) { - return nil - } - meta := v.Meta() - - classifier := meta.Classifier - switch classifier { - case files.ContentClassContent: - if err := m.AddFilesBundle(v); err != nil { - return err - } - case files.ContentClassFile: - if err := p.copyFile(v); err != nil { - return err - } - default: - panic(fmt.Sprintf("invalid classifier: %q", classifier)) - } - default: - panic(fmt.Sprintf("unrecognized item type in Process: %T", item)) - } - return nil -} - -func (p *sitePagesProcessor) shouldSkip(fim hugofs.FileMetaInfo) bool { - return p.m.s.conf.IsLangDisabled(fim.Meta().Lang) -} diff --git a/hugolib/paths/paths.go b/hugolib/paths/paths.go index 83d5921e0..397dba3f8 100644 --- a/hugolib/paths/paths.go +++ b/hugolib/paths/paths.go @@ -87,12 +87,13 @@ func (p *Paths) AllModules() modules.Modules { } // GetBasePath returns any path element in baseURL if needed. +// The path returned will have a leading, but no trailing slash. func (p *Paths) GetBasePath(isRelativeURL bool) string { if isRelativeURL && p.Cfg.CanonifyURLs() { // The baseURL will be prepended later. return "" } - return p.Cfg.BaseURL().BasePath + return p.Cfg.BaseURL().BasePathNoTrailingSlash } func (p *Paths) Lang() string { diff --git a/hugolib/rebuild_test.go b/hugolib/rebuild_test.go new file mode 100644 index 000000000..d3ac5665d --- /dev/null +++ b/hugolib/rebuild_test.go @@ -0,0 +1,1256 @@ +package hugolib + +import ( + "fmt" + "path/filepath" + "strings" + "testing" + "time" + + "github.com/fortytw2/leaktest" + qt "github.com/frankban/quicktest" + "github.com/gohugoio/hugo/common/types" + "github.com/gohugoio/hugo/htesting" + "github.com/gohugoio/hugo/resources/resource_transformers/tocss/dartsass" + "github.com/gohugoio/hugo/resources/resource_transformers/tocss/scss" +) + +const rebuildFilesSimple = ` +-- hugo.toml -- +baseURL = "https://example.com" +disableKinds = ["term", "taxonomy", "sitemap", "robotstxt", "404"] +disableLiveReload = true +[outputs] +home = ["html"] +section = ["html"] +page = ["html"] +-- content/mysection/_index.md -- +--- +title: "My Section" +--- +-- content/mysection/mysectionbundle/index.md -- +--- +title: "My Section Bundle" +--- +My Section Bundle Content. +-- content/mysection/mysectionbundle/mysectionbundletext.txt -- +My Section Bundle Text 2 Content. +-- content/mysection/mysectionbundle/mysectionbundlecontent.md -- +--- +title: "My Section Bundle Content" +--- +My Section Bundle Content. +-- content/mysection/_index.md -- +--- +title: "My Section" +--- +-- content/mysection/mysectiontext.txt -- +-- content/_index.md -- +--- +title: "Home" +--- +Home Content. +-- content/hometext.txt -- +Home Text Content. +-- layouts/_default/single.html -- +Single: {{ .Title }}|{{ .Content }}$ +Resources: {{ range $i, $e := .Resources }}{{ $i }}:{{ .RelPermalink }}|{{ .Content }}|{{ end }}$ +Len Resources: {{ len .Resources }}| +-- layouts/_default/list.html -- +List: {{ .Title }}|{{ .Content }}$ +Len Resources: {{ len .Resources }}| +Resources: {{ range $i, $e := .Resources }}{{ $i }}:{{ .RelPermalink }}|{{ .Content }}|{{ end }}$ +-- layouts/shortcodes/foo.html -- +Foo. + +` + +func TestRebuildEditTextFileInLeafBundle(t *testing.T) { + b := TestRunning(t, rebuildFilesSimple) + b.AssertFileContent("public/mysection/mysectionbundle/index.html", + "Resources: 0:/mysection/mysectionbundle/mysectionbundletext.txt|My Section Bundle Text 2 Content.|1:|

My Section Bundle Content.

\n|$") + + b.EditFileReplaceAll("content/mysection/mysectionbundle/mysectionbundletext.txt", "Content.", "Content Edited.").Build() + b.AssertFileContent("public/mysection/mysectionbundle/index.html", + "Text 2 Content Edited") + b.AssertRenderCountPage(1) + b.AssertRenderCountContent(1) +} + +func TestRebuildEditTextFileInHomeBundle(t *testing.T) { + b := TestRunning(t, rebuildFilesSimple) + b.AssertFileContent("public/index.html", "Home Content.") + b.AssertFileContent("public/index.html", "Home Text Content.") + + b.EditFileReplaceAll("content/hometext.txt", "Content.", "Content Edited.").Build() + b.AssertFileContent("public/index.html", "Home Content.") + b.AssertFileContent("public/index.html", "Home Text Content Edited.") + b.AssertRenderCountPage(1) + b.AssertRenderCountContent(1) +} + +func TestRebuildEditTextFileInBranchBundle(t *testing.T) { + b := TestRunning(t, rebuildFilesSimple) + b.AssertFileContent("public/mysection/index.html", "My Section") + + b.EditFileReplaceAll("content/mysection/mysectiontext.txt", "Content.", "Content Edited.").Build() + b.AssertFileContent("public/mysection/index.html", "My Section") + b.AssertRenderCountPage(1) + b.AssertRenderCountContent(1) +} + +func TestRebuildRenameTextFileInLeafBundle(t *testing.T) { + b := TestRunning(t, rebuildFilesSimple) + b.AssertFileContent("public/mysection/mysectionbundle/index.html", "My Section Bundle Text 2 Content.") + + b.RenameFile("content/mysection/mysectionbundle/mysectionbundletext.txt", "content/mysection/mysectionbundle/mysectionbundletext2.txt").Build() + b.AssertFileContent("public/mysection/mysectionbundle/index.html", "mysectionbundletext2", "My Section Bundle Text 2 Content.") + b.AssertRenderCountPage(3) + b.AssertRenderCountContent(3) +} + +func TestRebuildRenameTextFileInBranchBundle(t *testing.T) { + b := TestRunning(t, rebuildFilesSimple) + b.AssertFileContent("public/mysection/index.html", "My Section") + + b.RenameFile("content/mysection/mysectiontext.txt", "content/mysection/mysectiontext2.txt").Build() + b.AssertFileContent("public/mysection/index.html", "mysectiontext2", "My Section") + b.AssertRenderCountPage(2) + b.AssertRenderCountContent(2) +} + +func TestRebuildRenameTextFileInHomeBundle(t *testing.T) { + b := TestRunning(t, rebuildFilesSimple) + b.AssertFileContent("public/index.html", "Home Text Content.") + + b.RenameFile("content/hometext.txt", "content/hometext2.txt").Build() + b.AssertFileContent("public/index.html", "hometext2", "Home Text Content.") + b.AssertRenderCountPage(2) +} + +func TestRebuildRenameDirectoryWithLeafBundle(t *testing.T) { + b := TestRunning(t, rebuildFilesSimple) + b.RenameDir("content/mysection/mysectionbundle", "content/mysection/mysectionbundlerenamed").Build() + b.AssertFileContent("public/mysection/mysectionbundlerenamed/index.html", "My Section Bundle") + b.AssertRenderCountPage(1) +} + +func TestRebuildRenameDirectoryWithBranchBundle(t *testing.T) { + b := TestRunning(t, rebuildFilesSimple) + b.RenameDir("content/mysection", "content/mysectionrenamed").Build() + b.AssertFileContent("public/mysectionrenamed/index.html", "My Section") + b.AssertFileContent("public/mysectionrenamed/mysectionbundle/index.html", "My Section Bundle") + b.AssertFileContent("public/mysectionrenamed/mysectionbundle/mysectionbundletext.txt", "My Section Bundle Text 2 Content.") + b.AssertRenderCountPage(2) +} + +func TestRebuildRenameDirectoryWithRegularPageUsedInHome(t *testing.T) { + files := ` +-- hugo.toml -- +baseURL = "https://example.com" +disableLiveReload = true +-- content/foo/p1.md -- +--- +title: "P1" +--- +-- layouts/index.html -- +Pages: {{ range .Site.RegularPages }}{{ .RelPermalink }}|{{ end }}$ +` + b := TestRunning(t, files) + + b.AssertFileContent("public/index.html", "Pages: /foo/p1/|$") + + b.RenameDir("content/foo", "content/bar").Build() + + b.AssertFileContent("public/index.html", "Pages: /bar/p1/|$") +} + +func TestRebuildAddRegularFileRegularPageUsedInHomeMultilingual(t *testing.T) { + files := ` +-- hugo.toml -- +baseURL = "https://example.com" +disableLiveReload = true +[languages] +[languages.en] +weight = 1 +[languages.nn] +weight = 2 +[languages.fr] +weight = 3 +[languages.a] +weight = 4 +[languages.b] +weight = 5 +[languages.c] +weight = 6 +[languages.d] +weight = 7 +[languages.e] +weight = 8 +[languages.f] +weight = 9 +[languages.g] +weight = 10 +[languages.h] +weight = 11 +[languages.i] +weight = 12 +[languages.j] +weight = 13 +-- content/foo/_index.md -- +-- content/foo/data.txt -- +-- content/foo/p1.md -- +-- content/foo/p1.nn.md -- +-- content/foo/p1.fr.md -- +-- content/foo/p1.a.md -- +-- content/foo/p1.b.md -- +-- content/foo/p1.c.md -- +-- content/foo/p1.d.md -- +-- content/foo/p1.e.md -- +-- content/foo/p1.f.md -- +-- content/foo/p1.g.md -- +-- content/foo/p1.h.md -- +-- content/foo/p1.i.md -- +-- content/foo/p1.j.md -- +-- layouts/index.html -- +RegularPages: {{ range .Site.RegularPages }}{{ .RelPermalink }}|{{ end }}$ +` + b := TestRunning(t, files) + + b.AssertFileContent("public/index.html", "RegularPages: /foo/p1/|$") + b.AssertFileContent("public/nn/index.html", "RegularPages: /nn/foo/p1/|$") + b.AssertFileContent("public/i/index.html", "RegularPages: /i/foo/p1/|$") + + b.AddFiles("content/foo/p2.md", ``).Build() + + b.AssertFileContent("public/index.html", "RegularPages: /foo/p1/|/foo/p2/|$") + b.AssertFileContent("public/fr/index.html", "RegularPages: /fr/foo/p1/|$") + + b.AddFiles("content/foo/p2.fr.md", ``).Build() + b.AssertFileContent("public/fr/index.html", "RegularPages: /fr/foo/p1/|/fr/foo/p2/|$") + + b.AddFiles("content/foo/p2.i.md", ``).Build() + b.AssertFileContent("public/i/index.html", "RegularPages: /i/foo/p1/|/i/foo/p2/|$") +} + +func TestRebuildRenameDirectoryWithBranchBundleFastRender(t *testing.T) { + recentlyVisited := types.NewEvictingStringQueue(10).Add("/a/b/c/") + b := TestRunning(t, rebuildFilesSimple, func(cfg *IntegrationTestConfig) { cfg.BuildCfg = BuildCfg{RecentlyVisited: recentlyVisited} }) + b.RenameDir("content/mysection", "content/mysectionrenamed").Build() + b.AssertFileContent("public/mysectionrenamed/index.html", "My Section") + b.AssertFileContent("public/mysectionrenamed/mysectionbundle/index.html", "My Section Bundle") + b.AssertFileContent("public/mysectionrenamed/mysectionbundle/mysectionbundletext.txt", "My Section Bundle Text 2 Content.") + b.AssertRenderCountPage(2) +} + +func TestRebuilErrorRecovery(t *testing.T) { + b := TestRunning(t, rebuildFilesSimple) + _, err := b.EditFileReplaceAll("content/mysection/mysectionbundle/index.md", "My Section Bundle Content.", "My Section Bundle Content\n\n\n\n{{< foo }}.").BuildE() + + b.Assert(err, qt.Not(qt.IsNil)) + b.Assert(err.Error(), qt.Contains, filepath.FromSlash(`"/content/mysection/mysectionbundle/index.md:8:9": unrecognized character`)) + + // Fix the error + b.EditFileReplaceAll("content/mysection/mysectionbundle/index.md", "{{< foo }}", "{{< foo >}}").Build() +} + +func TestRebuildScopedToOutputFormat(t *testing.T) { + files := ` +-- hugo.toml -- +baseURL = "https://example.com" +disableKinds = ["term", "taxonomy", "sitemap", "robotstxt", "404"] +disableLiveReload = true +-- content/p1.md -- +--- +title: "P1" +outputs: ["html", "json"] +--- +P1 Content. + +{{< myshort >}} +-- layouts/_default/single.html -- +Single HTML: {{ .Title }}|{{ .Content }}| +-- layouts/_default/single.json -- +Single JSON: {{ .Title }}|{{ .Content }}| +-- layouts/shortcodes/myshort.html -- +My short. +` + b := Test(t, files, TestOptRunning()) + b.AssertRenderCountPage(3) + b.AssertRenderCountContent(1) + b.AssertFileContent("public/p1/index.html", "Single HTML: P1|

P1 Content.

\n") + b.AssertFileContent("public/p1/index.json", "Single JSON: P1|

P1 Content.

\n") + b.EditFileReplaceAll("layouts/_default/single.html", "Single HTML", "Single HTML Edited").Build() + b.AssertFileContent("public/p1/index.html", "Single HTML Edited: P1|

P1 Content.

\n") + b.AssertRenderCountPage(1) + + // Edit shortcode. Note that this is reused across all output formats. + b.EditFileReplaceAll("layouts/shortcodes/myshort.html", "My short", "My short edited").Build() + b.AssertFileContent("public/p1/index.html", "My short edited") + b.AssertFileContent("public/p1/index.json", "My short edited") + b.AssertRenderCountPage(3) // rss (uses .Content) + 2 single pages. +} + +func TestRebuildBaseof(t *testing.T) { + t.Parallel() + + files := ` +-- hugo.toml -- +title = "Hugo Site" +baseURL = "https://example.com" +disableKinds = ["term", "taxonomy"] +disableLiveReload = true +-- layouts/_default/baseof.html -- +Baseof: {{ .Title }}| +{{ block "main" . }}default{{ end }} +-- layouts/index.html -- +{{ define "main" }} +Home: {{ .Title }}|{{ .Content }}| +{{ end }} +` + b := Test(t, files, TestOptRunning()) + b.AssertFileContent("public/index.html", "Baseof: Hugo Site|", "Home: Hugo Site||") + b.EditFileReplaceFunc("layouts/_default/baseof.html", func(s string) string { + return strings.Replace(s, "Baseof", "Baseof Edited", 1) + }).Build() + b.AssertFileContent("public/index.html", "Baseof Edited: Hugo Site|", "Home: Hugo Site||") +} + +func TestRebuildSingleWithBaseof(t *testing.T) { + t.Parallel() + + files := ` +-- hugo.toml -- +title = "Hugo Site" +baseURL = "https://example.com" +disableKinds = ["term", "taxonomy"] +disableLiveReload = true +-- content/p1.md -- +--- +title: "P1" +--- +P1 Content. +-- layouts/_default/baseof.html -- +Baseof: {{ .Title }}| +{{ block "main" . }}default{{ end }} +-- layouts/index.html -- +Home. +-- layouts/_default/single.html -- +{{ define "main" }} +Single: {{ .Title }}|{{ .Content }}| +{{ end }} +` + b := Test(t, files, TestOptRunning()) + b.AssertFileContent("public/p1/index.html", "Baseof: P1|\n\nSingle: P1|

P1 Content.

\n|") + b.EditFileReplaceFunc("layouts/_default/single.html", func(s string) string { + return strings.Replace(s, "Single", "Single Edited", 1) + }).Build() + b.AssertFileContent("public/p1/index.html", "Baseof: P1|\n\nSingle Edited: P1|

P1 Content.

\n|") +} + +func TestRebuildFromString(t *testing.T) { + files := ` +-- hugo.toml -- +baseURL = "https://example.com" +disableKinds = ["term", "taxonomy", "sitemap", "robotstxt", "404"] +disableLiveReload = true +-- content/p1.md -- +--- +title: "P1" +layout: "l1" +--- +P1 Content. +-- content/p2.md -- +--- +title: "P2" +layout: "l2" +--- +P2 Content. +-- assets/mytext.txt -- +My Text +-- layouts/_default/l1.html -- +{{ $r := partial "get-resource.html" . }} +L1: {{ .Title }}|{{ .Content }}|R: {{ $r.Content }}| +-- layouts/_default/l2.html -- +L2. +-- layouts/partials/get-resource.html -- +{{ $mytext := resources.Get "mytext.txt" }} +{{ $txt := printf "Text: %s" $mytext.Content }} +{{ $r := resources.FromString "r.txt" $txt }} +{{ return $r }} + +` + b := TestRunning(t, files) + + b.AssertFileContent("public/p1/index.html", "L1: P1|

P1 Content.

\n|R: Text: My Text|") + + b.EditFileReplaceAll("assets/mytext.txt", "My Text", "My Text Edited").Build() + + b.AssertFileContent("public/p1/index.html", "L1: P1|

P1 Content.

\n|R: Text: My Text Edited|") + + b.AssertRenderCountPage(1) +} + +func TestRebuildDeeplyNestedLink(t *testing.T) { + t.Parallel() + + files := ` +-- hugo.toml -- +baseURL = "https://example.com/" +disableKinds = ["term", "taxonomy", "sitemap", "robotstxt", "404"] +disableLiveReload = true +-- content/s/p1.md -- +--- +title: "P1" +--- +-- content/s/p2.md -- +--- +title: "P2" +--- +-- content/s/p3.md -- +--- +title: "P3" +--- +-- content/s/p4.md -- +--- +title: "P4" +--- +-- content/s/p5.md -- +--- +title: "P5" +--- +-- content/s/p6.md -- +--- +title: "P6" +--- +-- content/s/p7.md -- +--- +title: "P7" +--- +-- layouts/_default/list.html -- +List. +-- layouts/_default/single.html -- +Single. +-- layouts/_default/single.html -- +Next: {{ with .PrevInSection }}{{ .Title }}{{ end }}| +Prev: {{ with .NextInSection }}{{ .Title }}{{ end }}| + + +` + + b := TestRunning(t, files) + + b.AssertFileContent("public/s/p1/index.html", "Next: P2|") + b.EditFileReplaceAll("content/s/p7.md", "P7", "P7 Edited").Build() + b.AssertFileContent("public/s/p6/index.html", "Next: P7 Edited|") +} + +func TestRebuildVariations(t *testing.T) { + // t.Parallel() not supported, see https://github.com/fortytw2/leaktest/issues/4 + // This leaktest seems to be a little bit shaky on Travis. + if !htesting.IsCI() { + defer leaktest.CheckTimeout(t, 10*time.Second)() + } + + files := ` +-- hugo.toml -- +baseURL = "https://example.com" +disdableKinds = ["term", "taxonomy"] +disableLiveReload = true +defaultContentLanguage = "nn" +paginate = 20 +[security] +enableInlineShortcodes = true +[languages] +[languages.en] +weight = 1 +[languages.nn] +weight = 2 +-- content/mysect/p1/index.md -- +--- +title: "P1" +--- +P1 Content. +{{< include "mysect/p2" >}} +§§§go { page="mysect/p3" } +hello +§§§ + +{{< foo.inline >}}Foo{{< /foo.inline >}} +-- content/mysect/p2/index.md -- +--- +title: "P2" +--- +P2 Content. +-- content/mysect/p3/index.md -- +--- +title: "P3" +--- +P3 Content. +-- content/mysect/sub/_index.md -- +-- content/mysect/sub/p4/index.md -- +--- +title: "P4" +--- +P4 Content. +-- content/mysect/sub/p5/index.md -- +--- +title: "P5" +lastMod: 2019-03-02 +--- +P5 Content. +-- content/myothersect/_index.md -- +--- +cascade: +- _target: + cascadeparam: "cascadevalue" +--- +-- content/myothersect/sub/_index.md -- +-- content/myothersect/sub/p6/index.md -- +--- +title: "P6" +--- +P6 Content. +-- content/translations/p7.en.md -- +--- +title: "P7 EN" +--- +P7 EN Content. +-- content/translations/p7.nn.md -- +--- +title: "P7 NN" +--- +P7 NN Content. +-- layouts/index.html -- +Home: {{ .Title }}|{{ .Content }}| +RegularPages: {{ range .RegularPages }}{{ .RelPermalink }}|{{ end }}$ +Len RegularPagesRecursive: {{ len .RegularPagesRecursive }} +Site.Lastmod: {{ .Site.Lastmod.Format "2006-01-02" }}| +Paginate: {{ range (.Paginate .Site.RegularPages).Pages }}{{ .RelPermalink }}|{{ .Title }}|{{ end }}$ +-- layouts/_default/single.html -- +Single: {{ .Title }}|{{ .Content }}| +Single Partial Cached: {{ partialCached "pcached" . }}| +Page.Lastmod: {{ .Lastmod.Format "2006-01-02" }}| +Cascade param: {{ .Params.cascadeparam }}| +-- layouts/_default/list.html -- +List: {{ .Title }}|{{ .Content }}| +RegularPages: {{ range .RegularPages }}{{ .Title }}|{{ end }}$ +Len RegularPagesRecursive: {{ len .RegularPagesRecursive }} +RegularPagesRecursive: {{ range .RegularPagesRecursive }}{{ .RelPermalink }}|{{ end }}$ +List Partial P1: {{ partial "p1" . }}| +Page.Lastmod: {{ .Lastmod.Format "2006-01-02" }}| +Cascade param: {{ .Params.cascadeparam }}| +-- layouts/partials/p1.html -- +Partial P1. +-- layouts/partials/pcached.html -- +Partial Pcached. +-- layouts/shortcodes/include.html -- +{{ $p := site.GetPage (.Get 0)}} +{{ with $p }} +Shortcode Include: {{ .Title }}| +{{ end }} +Shortcode .Page.Title: {{ .Page.Title }}| +Shortcode Partial P1: {{ partial "p1" . }}| +-- layouts/_default/_markup/render-codeblock.html -- +{{ $p := site.GetPage (.Attributes.page)}} +{{ with $p }} +Codeblock Include: {{ .Title }}| +{{ end }} + + + +` + + b := NewIntegrationTestBuilder( + IntegrationTestConfig{ + T: t, + TxtarString: files, + Running: true, + BuildCfg: BuildCfg{ + testCounters: &buildCounters{}, + }, + // Verbose: true, + // LogLevel: logg.LevelTrace, + }, + ).Build() + + // When running the server, this is done on shutdown. + // Do this here to satisfy the leak detector above. + defer func() { + b.Assert(b.H.Close(), qt.IsNil) + }() + + contentRenderCount := b.counters.contentRenderCounter.Load() + pageRenderCount := b.counters.pageRenderCounter.Load() + + b.Assert(contentRenderCount > 0, qt.IsTrue) + b.Assert(pageRenderCount > 0, qt.IsTrue) + + // Test cases: + // - Edit content file direct + // - Edit content file transitive shortcode + // - Edit content file transitive render hook + // - Rename one languge version of a content file + // - Delete content file, check site.RegularPages and section.RegularPagesRecursive (length) + // - Add content file (see above). + // - Edit shortcode + // - Edit inline shortcode + // - Edit render hook + // - Edit partial used in template + // - Edit partial used in shortcode + // - Edit partial cached. + // - Edit lastMod date in content file, check site.Lastmod. + editFile := func(filename string, replacementFunc func(s string) string) { + b.EditFileReplaceFunc(filename, replacementFunc).Build() + b.Assert(b.counters.contentRenderCounter.Load() < contentRenderCount, qt.IsTrue, qt.Commentf("count %d < %d", b.counters.contentRenderCounter.Load(), contentRenderCount)) + b.Assert(b.counters.pageRenderCounter.Load() < pageRenderCount, qt.IsTrue, qt.Commentf("count %d < %d", b.counters.pageRenderCounter.Load(), pageRenderCount)) + } + + b.AssertFileContent("public/index.html", "RegularPages: $", "Len RegularPagesRecursive: 7", "Site.Lastmod: 2019-03-02") + + b.AssertFileContent("public/mysect/p1/index.html", + "Single: P1|

P1 Content.", + "Shortcode Include: P2|", + "Codeblock Include: P3|") + + editFile("content/mysect/p1/index.md", func(s string) string { + return strings.ReplaceAll(s, "P1", "P1 Edited") + }) + + b.AssertFileContent("public/mysect/p1/index.html", "Single: P1 Edited|

P1 Edited Content.") + b.AssertFileContent("public/index.html", "RegularPages: $", "Len RegularPagesRecursive: 7", "Paginate: /mysect/sub/p5/|P5|/mysect/p1/|P1 Edited") + b.AssertFileContent("public/mysect/index.html", "RegularPages: P1 Edited|P2|P3|$", "Len RegularPagesRecursive: 5") + + // p2 is included in p1 via shortcode. + editFile("content/mysect/p2/index.md", func(s string) string { + return strings.ReplaceAll(s, "P2", "P2 Edited") + }) + + b.AssertFileContent("public/mysect/p1/index.html", "Shortcode Include: P2 Edited|") + + // p3 is included in p1 via codeblock hook. + editFile("content/mysect/p3/index.md", func(s string) string { + return strings.ReplaceAll(s, "P3", "P3 Edited") + }) + + b.AssertFileContent("public/mysect/p1/index.html", "Codeblock Include: P3 Edited|") + + // Remove a content file in a nested section. + b.RemoveFiles("content/mysect/sub/p4/index.md").Build() + b.AssertFileContent("public/mysect/index.html", "RegularPages: P1 Edited|P2 Edited|P3 Edited", "Len RegularPagesRecursive: 4") + b.AssertFileContent("public/mysect/sub/index.html", "RegularPages: P5|$", "RegularPagesRecursive: 1") + + // Rename one of the translations. + b.AssertFileContent("public/translations/index.html", "RegularPagesRecursive: /translations/p7/") + b.AssertFileContent("public/en/translations/index.html", "RegularPagesRecursive: /en/translations/p7/") + b.RenameFile("content/translations/p7.nn.md", "content/translations/p7rename.nn.md").Build() + b.AssertFileContent("public/translations/index.html", "RegularPagesRecursive: /translations/p7rename/") + b.AssertFileContent("public/en/translations/index.html", "RegularPagesRecursive: /en/translations/p7/") + + // Edit shortcode + editFile("layouts/shortcodes/include.html", func(s string) string { + return s + "\nShortcode Include Edited." + }) + b.AssertFileContent("public/mysect/p1/index.html", "Shortcode Include Edited.") + + // Edit render hook + editFile("layouts/_default/_markup/render-codeblock.html", func(s string) string { + return s + "\nCodeblock Include Edited." + }) + b.AssertFileContent("public/mysect/p1/index.html", "Codeblock Include Edited.") + + // Edit partial p1 + editFile("layouts/partials/p1.html", func(s string) string { + return strings.Replace(s, "Partial P1", "Partial P1 Edited", 1) + }) + b.AssertFileContent("public/mysect/index.html", "List Partial P1: Partial P1 Edited.") + b.AssertFileContent("public/mysect/p1/index.html", "Shortcode Partial P1: Partial P1 Edited.") + + // Edit partial cached. + editFile("layouts/partials/pcached.html", func(s string) string { + return strings.Replace(s, "Partial Pcached", "Partial Pcached Edited", 1) + }) + b.AssertFileContent("public/mysect/p1/index.html", "Pcached Edited.") + + // Edit lastMod date in content file, check site.Lastmod. + editFile("content/mysect/sub/p5/index.md", func(s string) string { + return strings.Replace(s, "2019-03-02", "2020-03-10", 1) + }) + b.AssertFileContent("public/index.html", "Site.Lastmod: 2020-03-10|") + b.AssertFileContent("public/mysect/index.html", "Page.Lastmod: 2020-03-10|") + + // Adjust the date back a few days. + editFile("content/mysect/sub/p5/index.md", func(s string) string { + return strings.Replace(s, "2020-03-10", "2019-03-08", 1) + }) + b.AssertFileContent("public/mysect/index.html", "Page.Lastmod: 2019-03-08|") + b.AssertFileContent("public/index.html", "Site.Lastmod: 2019-03-08|") + + // Check cascade mods. + b.AssertFileContent("public/myothersect/index.html", "Cascade param: cascadevalue|") + b.AssertFileContent("public/myothersect/sub/index.html", "Cascade param: cascadevalue|") + b.AssertFileContent("public/myothersect/sub/p6/index.html", "Cascade param: cascadevalue|") + + editFile("content/myothersect/_index.md", func(s string) string { + return strings.Replace(s, "cascadevalue", "cascadevalue edited", 1) + }) + b.AssertFileContent("public/myothersect/index.html", "Cascade param: cascadevalue edited|") + b.AssertFileContent("public/myothersect/sub/p6/index.html", "Cascade param: cascadevalue edited|") + + // Repurpose the cascadeparam to set the title. + editFile("content/myothersect/_index.md", func(s string) string { + return strings.Replace(s, "cascadeparam:", "title:", 1) + }) + b.AssertFileContent("public/myothersect/sub/index.html", "Cascade param: |", "List: cascadevalue edited|") + + // Revert it. + editFile("content/myothersect/_index.md", func(s string) string { + return strings.Replace(s, "title:", "cascadeparam:", 1) + }) + b.AssertFileContent("public/myothersect/sub/index.html", "Cascade param: cascadevalue edited|", "List: |") +} + +func TestRebuildVariationsJSNoneFingerprinted(t *testing.T) { + t.Parallel() + + files := ` +-- hugo.toml -- +baseURL = "https://example.com/" +disableKinds = ["term", "taxonomy", "sitemap", "robotsTXT", "404", "rss"] +disableLiveReload = true +-- content/p1/index.md -- +--- +title: "P1" +--- +P1. +-- content/p2/index.md -- +--- +title: "P2" +--- +P2. +-- content/p3/index.md -- +--- +title: "P3" +--- +P3. +-- content/p4/index.md -- +--- +title: "P4" +--- +P4. +-- assets/main.css -- +body { + background: red; +} +-- layouts/default/list.html -- +List. +-- layouts/_default/single.html -- +Single. +{{ $css := resources.Get "main.css" | minify }} +RelPermalink: {{ $css.RelPermalink }}| + +` + + b := TestRunning(t, files) + + b.AssertFileContent("public/p1/index.html", "RelPermalink: /main.min.css|") + b.AssertFileContent("public/main.min.css", "body{background:red}") + + b.EditFileReplaceAll("assets/main.css", "red", "blue") + b.RemoveFiles("content/p2/index.md") + b.RemoveFiles("content/p3/index.md") + b.Build() + + b.AssertFileContent("public/main.min.css", "body{background:blue}") +} + +func TestRebuildVariationsJSInNestedCachedPartialFingerprinted(t *testing.T) { + t.Parallel() + + files := ` +-- hugo.toml -- +baseURL = "https://example.com/" +disableKinds = ["term", "taxonomy", "sitemap", "robotsTXT", "404", "rss"] +disableLiveReload = true +-- content/p1/index.md -- +--- +title: "P1" +--- +P1. +-- content/p2/index.md -- +--- +title: "P2" +--- +P2. +-- content/p3/index.md -- +--- +title: "P3" +--- +P3. +-- content/p4/index.md -- +--- +title: "P4" +--- +P4. +-- assets/js/main.js -- +console.log("Hello"); +-- layouts/_default/list.html -- +List. {{ partial "head.html" . }}$ +-- layouts/_default/single.html -- +Single. {{ partial "head.html" . }}$ +-- layouts/partials/head.html -- +{{ partialCached "js.html" . }}$ +-- layouts/partials/js.html -- +{{ $js := resources.Get "js/main.js" | js.Build | fingerprint }} +RelPermalink: {{ $js.RelPermalink }}| +` + + b := TestRunning(t, files) + + b.AssertFileContent("public/p1/index.html", "/js/main.712a50b59d0f0dedb4e3606eaa3860b1f1a5305f6c42da30a2985e473ba314eb.js") + b.AssertFileContent("public/index.html", "/js/main.712a50b59d0f0dedb4e3606eaa3860b1f1a5305f6c42da30a2985e473ba314eb.js") + + b.EditFileReplaceAll("assets/js/main.js", "Hello", "Hello is Edited").Build() + + for i := 1; i < 5; i++ { + b.AssertFileContent(fmt.Sprintf("public/p%d/index.html", i), "/js/main.6535698cec9a21875f40ae03e96f30c4bee41a01e979224761e270b9034b2424.js") + } + + b.AssertFileContent("public/index.html", "/js/main.6535698cec9a21875f40ae03e96f30c4bee41a01e979224761e270b9034b2424.js") +} + +func TestRebuildVariationsJSInNestedPartialFingerprintedInBase(t *testing.T) { + t.Parallel() + + files := ` +-- hugo.toml -- +baseURL = "https://example.com/" +disableKinds = ["term", "taxonomy", "sitemap", "robotsTXT", "404", "rss"] +disableLiveReload = true +-- assets/js/main.js -- +console.log("Hello"); +-- layouts/_default/baseof.html -- +Base. {{ partial "common/head.html" . }}$ +{{ block "main" . }}default{{ end }} +-- layouts/_default/list.html -- +{{ define "main" }}main{{ end }} +-- layouts/partials/common/head.html -- +{{ partial "myfiles/js.html" . }}$ +-- layouts/partials/myfiles/js.html -- +{{ $js := resources.Get "js/main.js" | js.Build | fingerprint }} +RelPermalink: {{ $js.RelPermalink }}| +` + + b := TestRunning(t, files) + + b.AssertFileContent("public/index.html", "/js/main.712a50b59d0f0dedb4e3606eaa3860b1f1a5305f6c42da30a2985e473ba314eb.js") + + b.EditFileReplaceAll("assets/js/main.js", "Hello", "Hello is Edited").Build() + + b.AssertFileContent("public/index.html", "/js/main.6535698cec9a21875f40ae03e96f30c4bee41a01e979224761e270b9034b2424.js") +} + +func TestRebuildVariationsJSBundled(t *testing.T) { + t.Parallel() + + files := ` +-- hugo.toml -- +baseURL = "https://example.com" +disableKinds = ["term", "taxonomy", "sitemap", "robotsTXT", "404", "rss"] +disableLiveReload = true +-- content/_index.md -- +--- +title: "Home" +--- +-- content/p1.md -- +--- +title: "P1" +layout: "main" +--- +-- content/p2.md -- +--- +title: "P2" +--- +{{< jsfingerprinted >}} +-- content/p3.md -- +--- +title: "P3" +layout: "plain" +--- +{{< jsfingerprinted >}} +-- content/main.js -- +console.log("Hello"); +-- content/foo.js -- +console.log("Foo"); +-- layouts/index.html -- +Home. +{{ $js := site.Home.Resources.Get "main.js" }} +{{ with $js }} + +{{ end }} +-- layouts/_default/single.html -- +Single. Deliberately no .Content in here. +-- layouts/_default/plain.html -- +Content: {{ .Content }}| +-- layouts/_default/main.html -- +{{ $js := site.Home.Resources.Get "main.js" }} +{{ with $js }} + +{{ end }} +-- layouts/shortcodes/jsfingerprinted.html -- +{{ $js := site.Home.Resources.Get "foo.js" | fingerprint }} + +` + + testCounters := &buildCounters{} + + b := NewIntegrationTestBuilder( + IntegrationTestConfig{ + T: t, + TxtarString: files, + Running: true, + // LogLevel: logg.LevelTrace, + // Verbose: true, + BuildCfg: BuildCfg{ + testCounters: testCounters, + }, + }, + ).Build() + + b.AssertFileContent("public/index.html", ``) + b.AssertFileContent("public/p1/index.html", "") + b.AssertFileContent("public/p2/index.html", "Single. Deliberately no .Content in here.") + b.AssertFileContent("public/p3/index.html", "foo.57b4465b908531b43d4e4680ab1063d856b475cb1ae81ad43e0064ecf607bec1.js") + b.AssertRenderCountPage(4) + + // Edit JS file. + b.EditFileReplaceFunc("content/main.js", func(s string) string { + return strings.Replace(s, "Hello", "Hello is Edited", 1) + }).Build() + + b.AssertFileContent("public/p1/index.html", "") + // The p1 (the one inlining the JS) should be rebuilt. + b.AssertRenderCountPage(2) + // But not the content file. + b.AssertRenderCountContent(0) + + // This is included with RelPermalink in a shortcode used in p3, but it's fingerprinted + // so we need to rebuild on change. + b.EditFileReplaceFunc("content/foo.js", func(s string) string { + return strings.Replace(s, "Foo", "Foo Edited", 1) + }).Build() + + // Verify that the hash has changed. + b.AssertFileContent("public/p3/index.html", "foo.3a332a088521231e5fc9bd22f15e0ccf507faa7b373fbff22959005b9a80481c.js") + + b.AssertRenderCountPage(1) + b.AssertRenderCountContent(1) +} + +func TestRebuildEditData(t *testing.T) { + t.Parallel() + + files := ` +-- hugo.toml -- +disableLiveReload = true +[security] +enableInlineShortcodes=true +-- data/mydata.yaml -- +foo: bar +-- content/_index.md -- +--- +title: "Home" +--- +{{< data "mydata.foo" >}}} +-- content/p1.md -- +--- +title: "P1" +--- + +Foo inline: {{< foo.inline >}}{{ site.Data.mydata.foo }}|{{< /foo.inline >}} +-- layouts/shortcodes/data.html -- +{{ $path := split (.Get 0) "." }} +{{ $data := index site.Data $path }} +Foo: {{ $data }}| +-- layouts/index.html -- +Content: {{ .Content }}| +-- layouts/_default/single.html -- +Single: {{ .Content }}| +` + b := TestRunning(t, files) + + b.AssertFileContent("public/index.html", "Foo: bar|") + b.AssertFileContent("public/p1/index.html", "Foo inline: bar|") + b.EditFileReplaceFunc("data/mydata.yaml", func(s string) string { + return strings.Replace(s, "bar", "bar edited", 1) + }).Build() + b.AssertFileContent("public/index.html", "Foo: bar edited|") + b.AssertFileContent("public/p1/index.html", "Foo inline: bar edited|") +} + +func TestRebuildEditHomeContent(t *testing.T) { + t.Parallel() + + files := ` +-- hugo.toml -- +baseURL = "https://example.com" +disableLiveReload = true +-- content/_index.md -- +--- +title: "Home" +--- +Home. +-- layouts/index.html -- +Content: {{ .Content }} +` + b := TestRunning(t, files) + + b.AssertFileContent("public/index.html", "Content:

Home.

") + b.EditFileReplaceAll("content/_index.md", "Home.", "Home").Build() + b.AssertFileContent("public/index.html", "Content:

Home

") +} + +func TestRebuildVariationsAssetsJSImport(t *testing.T) { + t.Parallel() + files := ` +-- hugo.toml -- +baseURL = "https://example.com" +disableKinds = ["term", "taxonomy"] +disableLiveReload = true +-- layouts/index.html -- +Home. {{ now }} +{{ with (resources.Get "js/main.js" | js.Build | fingerprint) }} + +{{ end }} +-- assets/js/lib/foo.js -- +export function foo() { + console.log("Foo"); +} +-- assets/js/main.js -- +import { foo } from "./lib/foo.js"; +console.log("Hello"); +foo(); +` + + b := NewIntegrationTestBuilder( + IntegrationTestConfig{ + T: t, + TxtarString: files, + Running: true, + // LogLevel: logg.LevelTrace, + NeedsOsFS: true, + }, + ).Build() + + b.AssertFileContent("public/index.html", "Home.", "Hello", "Foo") + // Edit the imported file. + b.EditFileReplaceAll("assets/js/lib/foo.js", "Foo", "Foo Edited").Build() + b.AssertFileContent("public/index.html", "Home.", "Hello", "Foo Edited") +} + +func TestRebuildVariationsAssetsPostCSSImport(t *testing.T) { + if !htesting.IsCI() { + t.Skip("skip CI only") + } + + files := ` +-- hugo.toml -- +baseURL = "https://example.com" +disableKinds = ["term", "taxonomy", "sitemap", "rss"] +disableLiveReload = true +-- assets/css/lib/foo.css -- +body { + background: red; +} +-- assets/css/main.css -- +@import "lib/foo.css"; +-- package.json -- +{ + "devDependencies": { + "postcss-cli": "^9.0.1" + } +} +-- content/p1.md -- +--- +title: "P1" +--- +-- content/p2.md -- +--- +title: "P2" +layout: "foo" +--- +{{< fingerprinted >}} +-- content/p3.md -- +--- +title: "P3" +layout: "foo" +--- +{{< notfingerprinted >}} +-- layouts/shortcodes/fingerprinted.html -- +Fingerprinted. +{{ $opts := dict "inlineImports" true "noMap" true }} +{{ with (resources.Get "css/main.css" | postCSS $opts | fingerprint) }} + +{{ end }} +-- layouts/shortcodes/notfingerprinted.html -- +Fingerprinted. +{{ $opts := dict "inlineImports" true "noMap" true }} +{{ with (resources.Get "css/main.css" | postCSS $opts) }} + +{{ end }} +-- layouts/index.html -- +Home. +{{ $opts := dict "inlineImports" true "noMap" true }} +{{ with (resources.Get "css/main.css" | postCSS $opts) }} + +{{ end }} +-- layouts/_default/foo.html -- +Foo. +{{ .Title }}|{{ .Content }}| +-- layouts/_default/single.html -- +Single. +{{ $opts := dict "inlineImports" true "noMap" true }} +{{ with (resources.Get "css/main.css" | postCSS $opts) }} + +{{ end }} +` + + b := NewIntegrationTestBuilder( + IntegrationTestConfig{ + T: t, + TxtarString: files, + Running: true, + NeedsOsFS: true, + NeedsNpmInstall: true, + // LogLevel: logg.LevelTrace, + }, + ).Build() + + b.AssertFileContent("public/index.html", "Home.", "") + b.AssertFileContent("public/p1/index.html", "Single.", "/css/main.css") + b.AssertRenderCountPage(4) + + // Edit the imported file. + b.EditFileReplaceFunc("assets/css/lib/foo.css", func(s string) string { + return strings.Replace(s, "red", "blue", 1) + }).Build() + + b.AssertRenderCountPage(3) + + b.AssertFileContent("public/index.html", "Home.", "") +} + +func TestRebuildVariationsAssetsSassImport(t *testing.T) { + if !htesting.IsCI() { + t.Skip("skip CI only") + } + + filesTemplate := ` +-- hugo.toml -- +baseURL = "https://example.com" +disableKinds = ["term", "taxonomy"] +disableLiveReload = true +-- assets/css/lib/foo.scss -- +body { + background: red; +} +-- assets/css/main.scss -- +@import "lib/foo"; +-- layouts/index.html -- +Home. +{{ $opts := dict "transpiler" "TRANSPILER" }} +{{ with (resources.Get "css/main.scss" | toCSS $opts) }} + +{{ end }} +` + + runTest := func(transpiler string) { + t.Run(transpiler, func(t *testing.T) { + files := strings.Replace(filesTemplate, "TRANSPILER", transpiler, 1) + b := NewIntegrationTestBuilder( + IntegrationTestConfig{ + T: t, + TxtarString: files, + Running: true, + NeedsOsFS: true, + }, + ).Build() + + b.AssertFileContent("public/index.html", "Home.", "background: red") + + // Edit the imported file. + b.EditFileReplaceFunc("assets/css/lib/foo.scss", func(s string) string { + return strings.Replace(s, "red", "blue", 1) + }).Build() + + b.AssertFileContent("public/index.html", "Home.", "background: blue") + }) + } + + if scss.Supports() { + runTest("libsass") + } + + if dartsass.Supports() { + runTest("dartsass") + } +} + +func benchmarkFilesEdit(count int) string { + files := ` +-- hugo.toml -- +baseURL = "https://example.com" +disdableKinds = ["term", "taxonomy"] +disableLiveReload = true +-- layouts/_default/single.html -- +Single: {{ .Title }}|{{ .Content }}| +-- layouts/_default/list.html -- +List: {{ .Title }}|{{ .Content }}| +-- content/mysect/_index.md -- +--- +title: "My Sect" +--- + ` + + contentTemplate := ` +--- +title: "P%d" +--- +P%d Content. +` + + for i := 0; i < count; i++ { + files += fmt.Sprintf("-- content/mysect/p%d/index.md --\n%s", i, fmt.Sprintf(contentTemplate, i, i)) + } + + return files +} + +func BenchmarkRebuildContentFileChange(b *testing.B) { + files := benchmarkFilesEdit(500) + + cfg := IntegrationTestConfig{ + T: b, + TxtarString: files, + Running: true, + // Verbose: true, + // LogLevel: logg.LevelInfo, + } + builders := make([]*IntegrationTestBuilder, b.N) + + for i := range builders { + builders[i] = NewIntegrationTestBuilder(cfg) + builders[i].Build() + } + + b.ResetTimer() + for i := 0; i < b.N; i++ { + bb := builders[i] + bb.EditFileReplaceFunc("content/mysect/p123/index.md", func(s string) string { + return s + "... Edited" + }).Build() + // fmt.Println(bb.LogString()) + } +} diff --git a/hugolib/rendershortcodes_test.go b/hugolib/rendershortcodes_test.go index c6fa711cc..d0bc0546c 100644 --- a/hugolib/rendershortcodes_test.go +++ b/hugolib/rendershortcodes_test.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -63,7 +63,7 @@ Fragments: {{ .Fragments.Identifiers }}| HasShortcode Level 1: {{ .HasShortcode "include" }}| HasShortcode Level 2: {{ .HasShortcode "withmarkdown" }}| HasShortcode Level 3: {{ .HasShortcode "level3" }}| -HasSHortcode not found: {{ .HasShortcode "notfound" }}| +HasShortcode not found: {{ .HasShortcode "notfound" }}| Content: {{ .Content }}| ` @@ -79,11 +79,8 @@ Content: {{ .Content }}| "HasShortcode Level 1: true|", "HasShortcode Level 2: true|", "HasShortcode Level 3: true|", - "HasSHortcode not found: false|", + "HasShortcode not found: false|", ) - - // TODO1 more assertions. - } func TestRenderShortcodesNestedMultipleOutputFormatTemplates(t *testing.T) { @@ -130,7 +127,6 @@ JSON: {{ .Content }} b.AssertFileContent("public/p1/index.html", "Myshort HTML") b.AssertFileContent("public/p1/index.json", "Myshort JSON") - } func TestRenderShortcodesEditNested(t *testing.T) { @@ -159,27 +155,12 @@ title: "p2" Myshort Original. -- layouts/_default/single.html -- {{ .Content }} - - - ` - - b := NewIntegrationTestBuilder( - IntegrationTestConfig{ - T: t, - TxtarString: files, - Running: true, - }, - ).Build() - + b := TestRunning(t, files) b.AssertFileContent("public/p1/index.html", "Myshort Original.") - b.EditFileReplace("layouts/shortcodes/myshort.html", func(s string) string { - return "Myshort Edited." - }) - b.Build() + b.EditFileReplaceAll("layouts/shortcodes/myshort.html", "Original", "Edited").Build() b.AssertFileContent("public/p1/index.html", "Myshort Edited.") - } func TestRenderShortcodesEditIncludedPage(t *testing.T) { @@ -223,10 +204,9 @@ Myshort Original. b.AssertFileContent("public/p1/index.html", "Original") - b.EditFileReplace("content/p2.md", func(s string) string { + b.EditFileReplaceFunc("content/p2.md", func(s string) string { return strings.Replace(s, "Original", "Edited", 1) }) b.Build() b.AssertFileContent("public/p1/index.html", "Edited") - } diff --git a/hugolib/renderstring_test.go b/hugolib/renderstring_test.go index e0a4cd036..40980bdcb 100644 --- a/hugolib/renderstring_test.go +++ b/hugolib/renderstring_test.go @@ -1,4 +1,4 @@ -// Copyright 2022 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -121,7 +121,6 @@ HasShortcode: foo:{{ .HasShortcode "foo" }}:false ` t.Run("Basic", func(t *testing.T) { - b := NewIntegrationTestBuilder( IntegrationTestConfig{ T: t, @@ -139,11 +138,9 @@ HasShortcode: mark2:true:true HasShortcode: foo:false:false Page Type: *hugolib.pageForShortcode`, ) - }) t.Run("Edit shortcode", func(t *testing.T) { - b := NewIntegrationTestBuilder( IntegrationTestConfig{ T: t, @@ -157,7 +154,6 @@ Page Type: *hugolib.pageForShortcode`, b.AssertFileContent("public/p1/index.html", `Edit shortcode`, ) - }) } @@ -189,7 +185,6 @@ Page Kind: home Has myshort: true Has other: false `) - } func TestRenderStringWithShortcodeIssue10654(t *testing.T) { diff --git a/hugolib/resource_chain_test.go b/hugolib/resource_chain_test.go index 17c3b2f0c..1365db72c 100644 --- a/hugolib/resource_chain_test.go +++ b/hugolib/resource_chain_test.go @@ -36,11 +36,10 @@ func TestResourceChainBasic(t *testing.T) { failIfHandler := func(h http.Handler) http.Handler { return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { if r.URL.Path == "/fail.jpg" { - http.Error(w, "{ msg: failed }", 501) + http.Error(w, "{ msg: failed }", http.StatusNotImplemented) return } h.ServeHTTP(w, r) - }) } ts := httptest.NewServer( @@ -89,7 +88,7 @@ FAILED REMOTE ERROR DETAILS CONTENT: {{ with $failedImg.Err }}|{{ . }}|{{ with . fs := b.Fs.Source imageDir := filepath.Join("assets", "images") - b.Assert(os.MkdirAll(imageDir, 0777), qt.IsNil) + b.Assert(os.MkdirAll(imageDir, 0o777), qt.IsNil) src, err := os.Open("testdata/sunset.jpg") b.Assert(err, qt.IsNil) out, err := fs.Create(filepath.Join(imageDir, "sunset.jpg")) @@ -101,18 +100,18 @@ FAILED REMOTE ERROR DETAILS CONTENT: {{ with $failedImg.Err }}|{{ . }}|{{ with . b.Running() for i := 0; i < 2; i++ { - + b.Logf("Test run %d", i) b.Build(BuildCfg{}) b.AssertFileContent("public/index.html", fmt.Sprintf(` -SUNSET: images/sunset.jpg|/images/sunset.a9bf1d944e19c0f382e0d8f51de690f7d0bc8fa97390c4242a86c3e5c0737e71.jpg|900|90587 -FIT: images/sunset.jpg|/images/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_200x200_fit_q75_box.jpg|200 +SUNSET: /images/sunset.jpg|/images/sunset.a9bf1d944e19c0f382e0d8f51de690f7d0bc8fa97390c4242a86c3e5c0737e71.jpg|900|90587 +FIT: /images/sunset.jpg|/images/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_200x200_fit_q75_box.jpg|200 CSS integrity Data first: sha256-od9YaHw8nMOL8mUy97Sy8sKwMV3N4hI3aVmZXATxH+8= /styles.min.a1df58687c3c9cc38bf26532f7b4b2f2c2b0315dcde212376959995c04f11fef.css CSS integrity Data last: /styles2.min.1cfc52986836405d37f9998a63fd6dd8608e8c410e5e3db1daaa30f78bc273ba.css sha256-HPxSmGg2QF03+ZmKY/1t2GCOjEEOXj2x2qow94vCc7o= -SUNSET REMOTE: sunset_%[1]s.jpg|/sunset_%[1]s.a9bf1d944e19c0f382e0d8f51de690f7d0bc8fa97390c4242a86c3e5c0737e71.jpg|900|90587 -FIT REMOTE: sunset_%[1]s.jpg|/sunset_%[1]s_hu59e56ffff1bc1d8d122b1403d34e039f_0_200x200_fit_q75_box.jpg|200 +SUNSET REMOTE: /sunset_%[1]s.jpg|/sunset_%[1]s.a9bf1d944e19c0f382e0d8f51de690f7d0bc8fa97390c4242a86c3e5c0737e71.jpg|900|90587 +FIT REMOTE: /sunset_%[1]s.jpg|/sunset_%[1]s_hu59e56ffff1bc1d8d122b1403d34e039f_90587_200x200_fit_q75_box.jpg|200 REMOTE NOT FOUND: OK LOCAL NOT FOUND: OK PRINT PROTOCOL ERROR DETAILS: Err: error calling resources.GetRemote: Get "gopher://example.org": unsupported protocol scheme "gopher"|| @@ -125,9 +124,9 @@ FAILED REMOTE ERROR DETAILS CONTENT: |failed to fetch remote resource: Not Imple b.AssertFileContent("public/styles.min.a1df58687c3c9cc38bf26532f7b4b2f2c2b0315dcde212376959995c04f11fef.css", "body{background-color:#add8e6}") b.AssertFileContent("public//styles2.min.1cfc52986836405d37f9998a63fd6dd8608e8c410e5e3db1daaa30f78bc273ba.css", "body{background-color:orange}") - b.EditFiles("page1.md", ` + b.EditFiles("content/_index.md", ` --- -title: "Page 1 edit" +title: "Home edit" summary: "Edited summary" --- @@ -135,9 +134,6 @@ Edited content. `) - b.Assert(b.Fs.WorkingDirWritable.Remove("public"), qt.IsNil) - b.H.ResourceSpec.ClearCaches() - } } @@ -147,7 +143,9 @@ func TestResourceChainPostProcess(t *testing.T) { rnd := rand.New(rand.NewSource(time.Now().UnixNano())) b := newTestSitesBuilder(t) - b.WithConfigFile("toml", `[minify] + b.WithConfigFile("toml", ` +disableLiveReload = true +[minify] minifyOutput = true [minify.tdewolff] [minify.tdewolff.html] @@ -184,7 +182,7 @@ End.`) b.AssertFileContent("public/index.html", `Start. HELLO: /hello.min.a2d1cb24f24b322a7dad520414c523e9.html|Integrity: md5-otHLJPJLMip9rVIEFMUj6Q==|MediaType: text/html -HELLO2: Name: hello.html|Content:

Hello World!

|Title: hello.html|ResourceType: text +HELLO2: Name: /hello.html|Content:

Hello World!

|Title: /hello.html|ResourceType: text foo Hello End.`) @@ -317,7 +315,6 @@ func TestResourceChains(t *testing.T) { } http.Error(w, "Not found", http.StatusNotFound) - return })) t.Cleanup(func() { ts.Close() @@ -680,22 +677,6 @@ $color: #333; } } -func TestMultiSiteResource(t *testing.T) { - t.Parallel() - c := qt.New(t) - - b := newMultiSiteTestDefaultBuilder(t) - - b.CreateSites().Build(BuildCfg{}) - - // This build is multilingual, but not multihost. There should be only one pipes.txt - b.AssertFileContent("public/fr/index.html", "French Home Page", "String Resource: /blog/text/pipes.txt") - c.Assert(b.CheckExists("public/fr/text/pipes.txt"), qt.Equals, false) - c.Assert(b.CheckExists("public/en/text/pipes.txt"), qt.Equals, false) - b.AssertFileContent("public/en/index.html", "Default Home Page", "String Resource: /blog/text/pipes.txt") - b.AssertFileContent("public/text/pipes.txt", "Hugo Pipes") -} - func TestResourcesMatch(t *testing.T) { t.Parallel() diff --git a/hugolib/rss_test.go b/hugolib/rss_test.go index ba2491c66..0c3c21b90 100644 --- a/hugolib/rss_test.go +++ b/hugolib/rss_test.go @@ -45,7 +45,7 @@ func TestRSSOutput(t *testing.T) { // Section RSS th.assertFileContent(filepath.Join("public", "sect", rssURI), "}} b.Assert(len(h.Sites), qt.Equals, 1) s := h.Sites[0] - home := s.getPage(kinds.KindHome) + home := s.getPageOldVersion(kinds.KindHome) b.Assert(home, qt.Not(qt.IsNil)) b.Assert(len(home.OutputFormats()), qt.Equals, 3) @@ -829,7 +829,6 @@ title: "Hugo Rocks!"

Doc

`, ) - } // https://github.com/gohugoio/hugo/issues/6857 @@ -927,7 +926,6 @@ title: "p1" b.AssertFileContent("public/p1/index.html", ` \n
  • \n

    List 1

    \n
      \n
    1. Item Mark1 1
    2. \n
    3. Item Mark1 2
    4. \n
    5. Item Mark2 1
    6. \n
    7. Item Mark2 2\n
        \n
      1. Item Mark2 2-1
      2. \n
      \n
    8. \n
    9. Item Mark2 3
    10. \n
    \n
  • \n") - } func TestShortcodeCodeblockIndent(t *testing.T) { @@ -998,7 +995,6 @@ echo "foo"; ).Build() b.AssertFileContent("public/p1/index.html", "
    echo "foo";\n
    ") - } func TestShortcodeHighlightDeindent(t *testing.T) { @@ -1041,7 +1037,6 @@ title: "p1" `) - } // Issue 10236. @@ -1073,7 +1068,6 @@ Title: {{ .Get "title" | safeHTML }} ).Build() b.AssertFileContent("public/p1/index.html", `Title: Steve "Francia".`) - } // Issue 10391. @@ -1166,7 +1160,6 @@ C'est un test ).Build() b.AssertFileContent("public/fr/p2/index.html", `plus-dinformations`) - } // Issue 10671. @@ -1281,5 +1274,4 @@ Hello. ).Build() b.AssertFileContent("public/p1/index.html", "Hello.") - } diff --git a/hugolib/site.go b/hugolib/site.go index c682eebc9..312f6b97f 100644 --- a/hugolib/site.go +++ b/hugolib/site.go @@ -1,4 +1,4 @@ -// Copyright 2019 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -19,18 +19,18 @@ import ( "io" "mime" "net/url" - "path" "path/filepath" "runtime" "sort" "strings" + "sync" "time" "github.com/bep/logg" - "github.com/gohugoio/hugo/common/herrors" "github.com/gohugoio/hugo/common/htime" "github.com/gohugoio/hugo/common/hugio" "github.com/gohugoio/hugo/common/types" + "github.com/gohugoio/hugo/hugolib/doctree" "golang.org/x/text/unicode/norm" "github.com/gohugoio/hugo/common/paths" @@ -41,11 +41,6 @@ import ( "github.com/gohugoio/hugo/markup/converter" - "github.com/gohugoio/hugo/hugofs/files" - hglob "github.com/gohugoio/hugo/hugofs/glob" - - "github.com/gohugoio/hugo/common/maps" - "github.com/gohugoio/hugo/common/text" "github.com/gohugoio/hugo/publisher" @@ -55,19 +50,14 @@ import ( "github.com/gohugoio/hugo/resources/kinds" "github.com/gohugoio/hugo/resources/page" - "github.com/gohugoio/hugo/config" "github.com/gohugoio/hugo/lazy" "github.com/fsnotify/fsnotify" bp "github.com/gohugoio/hugo/bufferpool" - "github.com/gohugoio/hugo/deps" "github.com/gohugoio/hugo/helpers" "github.com/gohugoio/hugo/navigation" "github.com/gohugoio/hugo/output" - "github.com/gohugoio/hugo/source" "github.com/gohugoio/hugo/tpl" - - "github.com/spf13/afero" ) func (s *Site) Taxonomies() page.TaxonomyList { @@ -75,25 +65,32 @@ func (s *Site) Taxonomies() page.TaxonomyList { return s.taxonomies } -type taxonomiesConfig map[string]string - -func (t taxonomiesConfig) Values() []viewName { - var vals []viewName - for k, v := range t { - vals = append(vals, viewName{singular: k, plural: v}) +type ( + taxonomiesConfig map[string]string + taxonomiesConfigValues struct { + views []viewName + viewsByTreeKey map[string]viewName } - sort.Slice(vals, func(i, j int) bool { - return vals[i].plural < vals[j].plural +) + +func (t taxonomiesConfig) Values() taxonomiesConfigValues { + var views []viewName + for k, v := range t { + views = append(views, viewName{singular: k, plural: v, pluralTreeKey: cleanTreeKey(v)}) + } + sort.Slice(views, func(i, j int) bool { + return views[i].plural < views[j].plural }) - return vals -} + viewsByTreeKey := make(map[string]viewName) + for _, v := range views { + viewsByTreeKey[v.pluralTreeKey] = v + } -type siteConfigHolder struct { - sitemap config.SitemapConfig - taxonomiesConfig taxonomiesConfig - timeout time.Duration - hasCJKLanguage bool + return taxonomiesConfigValues{ + views: views, + viewsByTreeKey: viewsByTreeKey, + } } // Lazily loaded site dependencies. @@ -111,15 +108,6 @@ func (init *siteInit) Reset() { init.taxonomies.Reset() } -func (s *Site) initInit(ctx context.Context, init *lazy.Init, pctx pageContext) bool { - _, err := init.Do(ctx) - - if err != nil { - s.h.FatalError(pctx.wrapError(err)) - } - return err == nil -} - func (s *Site) prepareInits() { s.init = &siteInit{} @@ -153,11 +141,6 @@ func (s *Site) prepareInits() { }) s.init.prevNextInSection = init.Branch(func(context.Context) (any, error) { - var sections page.Pages - s.home.treeRef.m.collectSectionsRecursiveIncludingSelf(pageMapQuery{Prefix: s.home.treeRef.key}, func(n *contentNode) { - sections = append(sections, n.p) - }) - setNextPrev := func(pas page.Pages) { for i, p := range pas { np, ok := p.(nextPrevInSectionProvider) @@ -183,40 +166,35 @@ func (s *Site) prepareInits() { } } - for _, sect := range sections { - treeRef := sect.(treeRefProvider).getTreeRef() + sections := s.pageMap.getPagesInSection( + pageMapQueryPagesInSection{ + pageMapQueryPagesBelowPath: pageMapQueryPagesBelowPath{ + Path: "", + KeyPart: "sectionorhome", + Include: pagePredicates.KindSection.Or(pagePredicates.KindHome), + }, + IncludeSelf: true, + Recursive: true, + }, + ) - var pas page.Pages - treeRef.m.collectPages(pageMapQuery{Prefix: treeRef.key + cmBranchSeparator}, func(c *contentNode) { - pas = append(pas, c.p) - }) - page.SortByDefault(pas) - - setNextPrev(pas) + for _, section := range sections { + setNextPrev(section.RegularPages()) } - // The root section only goes one level down. - treeRef := s.home.getTreeRef() - - var pas page.Pages - treeRef.m.collectPages(pageMapQuery{Prefix: treeRef.key + cmBranchSeparator}, func(c *contentNode) { - pas = append(pas, c.p) - }) - page.SortByDefault(pas) - - setNextPrev(pas) - return nil, nil }) s.init.menus = init.Branch(func(context.Context) (any, error) { - s.assembleMenus() - return nil, nil + err := s.assembleMenus() + return nil, err }) - s.init.taxonomies = init.Branch(func(context.Context) (any, error) { - err := s.pageMap.assembleTaxonomies() - return nil, err + s.init.taxonomies = init.Branch(func(ctx context.Context) (any, error) { + if err := s.pageMap.CreateSiteTaxonomies(ctx); err != nil { + return nil, err + } + return s.taxonomies, nil }) } @@ -232,20 +210,25 @@ func (s *Site) Menus() navigation.Menus { func (s *Site) initRenderFormats() { formatSet := make(map[string]bool) formats := output.Formats{} - rssDisabled := !s.conf.IsKindEnabled("rss") - s.pageMap.pageTrees.WalkRenderable(func(s string, n *contentNode) bool { - for _, f := range n.p.m.configuredOutputFormats { - if rssDisabled && f.Name == "rss" { - // legacy - continue + + w := &doctree.NodeShiftTreeWalker[contentNodeI]{ + Tree: s.pageMap.treePages, + Handle: func(key string, n contentNodeI, match doctree.DimensionFlag) (bool, error) { + if p, ok := n.(*pageState); ok { + for _, f := range p.m.configuredOutputFormats { + if !formatSet[f.Name] { + formats = append(formats, f) + formatSet[f.Name] = true + } + } } - if !formatSet[f.Name] { - formats = append(formats, f) - formatSet[f.Name] = true - } - } - return false - }) + return false, nil + }, + } + + if err := w.Walk(context.TODO()); err != nil { + panic(err) + } // Add the per kind configured output formats for _, kind := range kinds.AllKindsInPages { @@ -275,10 +258,6 @@ func (s *Site) Languages() langs.Languages { return s.h.Configs.Languages } -func (s *Site) isEnabled(kind string) bool { - return s.conf.IsKindEnabled(kind) -} - type siteRefLinker struct { s *Site @@ -303,7 +282,7 @@ func (s siteRefLinker) logNotFound(ref, what string, p page.Page, position text. } else if p == nil { s.errorLogger.Logf("[%s] REF_NOT_FOUND: Ref %q: %s", s.s.Lang(), ref, what) } else { - s.errorLogger.Logf("[%s] REF_NOT_FOUND: Ref %q from page %q: %s", s.s.Lang(), ref, p.Pathc(), what) + s.errorLogger.Logf("[%s] REF_NOT_FOUND: Ref %q from page %q: %s", s.s.Lang(), ref, p.Path(), what) } } @@ -391,8 +370,26 @@ func (s *Site) watching() bool { } type whatChanged struct { - source bool - files map[string]bool + mu sync.Mutex + + contentChanged bool + identitySet identity.Identities +} + +func (w *whatChanged) Add(ids ...identity.Identity) { + w.mu.Lock() + defer w.mu.Unlock() + + for _, id := range ids { + w.identitySet[id] = true + } +} + +func (w *whatChanged) Changes() []identity.Identity { + if w == nil || w.identitySet == nil { + return nil + } + return w.identitySet.AsSlice() } // RegisterMediaTypes will register the Site's media types in the mime @@ -405,10 +402,10 @@ func (s *Site) RegisterMediaTypes() { } } -func (s *Site) filterFileEvents(events []fsnotify.Event) []fsnotify.Event { - var filtered []fsnotify.Event +func (h *HugoSites) fileEventsFilter(events []fsnotify.Event) []fsnotify.Event { seen := make(map[fsnotify.Event]bool) + n := 0 for _, ev := range events { // Avoid processing the same event twice. if seen[ev] { @@ -416,17 +413,7 @@ func (s *Site) filterFileEvents(events []fsnotify.Event) []fsnotify.Event { } seen[ev] = true - if s.SourceSpec.IgnoreFile(ev.Name) { - continue - } - - // Throw away any directories - isRegular, err := s.SourceSpec.IsRegularSourceFile(ev.Name) - if err != nil && herrors.IsNotExist(err) && (ev.Op&fsnotify.Remove == fsnotify.Remove || ev.Op&fsnotify.Rename == fsnotify.Rename) { - // Force keep of event - isRegular = true - } - if !isRegular { + if h.SourceSpec.IgnoreFile(ev.Name) { continue } @@ -434,23 +421,22 @@ func (s *Site) filterFileEvents(events []fsnotify.Event) []fsnotify.Event { ev.Name = norm.NFC.String(ev.Name) } - filtered = append(filtered, ev) + events[n] = ev + n++ } - - return filtered + return events[:n] } -func (s *Site) translateFileEvents(events []fsnotify.Event) []fsnotify.Event { - var filtered []fsnotify.Event - +func (h *HugoSites) fileEventsTranslate(events []fsnotify.Event) []fsnotify.Event { eventMap := make(map[string][]fsnotify.Event) // We often get a Remove etc. followed by a Create, a Create followed by a Write. - // Remove the superfluous events to mage the update logic simpler. + // Remove the superfluous events to make the update logic simpler. for _, ev := range events { eventMap[ev.Name] = append(eventMap[ev.Name], ev) } + n := 0 for _, ev := range events { mapped := eventMap[ev.Name] @@ -472,236 +458,77 @@ func (s *Site) translateFileEvents(events []fsnotify.Event) []fsnotify.Event { } } - filtered = append(filtered, kept) + events[n] = kept + n++ } - return filtered + return events } -// reBuild partially rebuilds a site given the filesystem events. -// It returns whatever the content source was changed. -// TODO(bep) clean up/rewrite this method. -func (s *Site) processPartial(config *BuildCfg, init func(config *BuildCfg) error, events []fsnotify.Event) error { - events = s.filterFileEvents(events) - events = s.translateFileEvents(events) +func (h *HugoSites) fileEventsContentPaths(p []pathChange) []pathChange { + var bundles []pathChange + var dirs []pathChange + var regular []pathChange - changeIdentities := make(identity.Identities) - - s.Log.Debugf("Rebuild for events %q", events) - - h := s.h - - // First we need to determine what changed - - var ( - sourceChanged = []fsnotify.Event{} - sourceReallyChanged = []fsnotify.Event{} - contentFilesChanged []string - - tmplChanged bool - tmplAdded bool - dataChanged bool - i18nChanged bool - - sourceFilesChanged = make(map[string]bool) - ) - - var cacheBusters []func(string) bool - bcfg := s.conf.Build - - for _, ev := range events { - component, relFilename := s.BaseFs.MakePathRelative(ev.Name) - if relFilename != "" { - p := hglob.NormalizePath(path.Join(component, relFilename)) - g, err := bcfg.MatchCacheBuster(s.Log, p) - if err == nil && g != nil { - cacheBusters = append(cacheBusters, g) - } - } - - id, found := s.eventToIdentity(ev) - if found { - changeIdentities[id] = id - - switch id.Type { - case files.ComponentFolderContent: - s.Log.Println("Source changed", ev) - sourceChanged = append(sourceChanged, ev) - case files.ComponentFolderLayouts: - tmplChanged = true - if !s.Tmpl().HasTemplate(id.Path) { - tmplAdded = true - } - if tmplAdded { - s.Log.Println("Template added", ev) - } else { - s.Log.Println("Template changed", ev) - } - - case files.ComponentFolderData: - s.Log.Println("Data changed", ev) - dataChanged = true - case files.ComponentFolderI18n: - s.Log.Println("i18n changed", ev) - i18nChanged = true - - } + var others []pathChange + for _, p := range p { + if p.isDir { + dirs = append(dirs, p) + } else { + others = append(others, p) } } - changed := &whatChanged{ - source: len(sourceChanged) > 0, - files: sourceFilesChanged, - } - - config.whatChanged = changed - - if err := init(config); err != nil { - return err - } - - var cacheBusterOr func(string) bool - if len(cacheBusters) > 0 { - cacheBusterOr = func(s string) bool { - for _, cb := range cacheBusters { - if cb(s) { - return true + // Remve all files below dir. + if len(dirs) > 0 { + n := 0 + for _, d := range dirs { + dir := d.p.Path() + "/" + for _, o := range others { + if !strings.HasPrefix(o.p.Path(), dir) { + others[n] = o + n++ } } - return false + + } + others = others[:n] + } + + for _, p := range others { + if p.p.IsBundle() { + bundles = append(bundles, p) + } else { + regular = append(regular, p) } } - // These in memory resource caches will be rebuilt on demand. - if len(cacheBusters) > 0 { - s.h.ResourceSpec.ResourceCache.DeleteMatches(cacheBusterOr) - } + // Remove any files below leaf bundles. + // Remove any files in the same folder as branch bundles. + var keepers []pathChange - if tmplChanged || i18nChanged { - s.h.init.Reset() - var prototype *deps.Deps - for i, s := range s.h.Sites { - if err := s.Deps.Compile(prototype); err != nil { - return err - } - if i == 0 { - prototype = s.Deps - } - } - } - - if dataChanged { - s.h.init.data.Reset() - } - - for _, ev := range sourceChanged { - removed := false - - if ev.Op&fsnotify.Remove == fsnotify.Remove { - removed = true - } - - // Some editors (Vim) sometimes issue only a Rename operation when writing an existing file - // Sometimes a rename operation means that file has been renamed other times it means - // it's been updated - if ev.Op&fsnotify.Rename == fsnotify.Rename { - // If the file is still on disk, it's only been updated, if it's not, it's been moved - if ex, err := afero.Exists(s.Fs.Source, ev.Name); !ex || err != nil { - removed = true + for _, o := range regular { + keep := true + for _, b := range bundles { + prefix := b.p.Base() + "/" + if b.p.IsLeafBundle() && strings.HasPrefix(o.p.Path(), prefix) { + keep = false + break + } else if b.p.IsBranchBundle() && o.p.Dir() == b.p.Dir() { + keep = false + break } } - if removed && files.IsContentFile(ev.Name) { - h.removePageByFilename(ev.Name) - } - - sourceReallyChanged = append(sourceReallyChanged, ev) - sourceFilesChanged[ev.Name] = true - } - - if config.ErrRecovery || tmplAdded || dataChanged { - h.resetPageState() - } else { - h.resetPageStateFromEvents(changeIdentities) - } - - if len(sourceReallyChanged) > 0 || len(contentFilesChanged) > 0 { - var filenamesChanged []string - for _, e := range sourceReallyChanged { - filenamesChanged = append(filenamesChanged, e.Name) - } - if len(contentFilesChanged) > 0 { - filenamesChanged = append(filenamesChanged, contentFilesChanged...) - } - - filenamesChanged = helpers.UniqueStringsReuse(filenamesChanged) - - if err := s.readAndProcessContent(*config, filenamesChanged...); err != nil { - return err - } - - } - - return nil -} - -func (s *Site) process(config BuildCfg) (err error) { - if err = s.readAndProcessContent(config); err != nil { - err = fmt.Errorf("readAndProcessContent: %w", err) - return - } - return err -} - -func (s *Site) render(ctx *siteRenderContext) (err error) { - if err := page.Clear(); err != nil { - return err - } - - if ctx.outIdx == 0 { - // Note that even if disableAliases is set, the aliases themselves are - // preserved on page. The motivation with this is to be able to generate - // 301 redirects in a .htacess file and similar using a custom output format. - if !s.conf.DisableAliases { - // Aliases must be rendered before pages. - // Some sites, Hugo docs included, have faulty alias definitions that point - // to itself or another real page. These will be overwritten in the next - // step. - if err = s.renderAliases(); err != nil { - return - } + if keep { + keepers = append(keepers, o) } } - if err = s.renderPages(ctx); err != nil { - return - } + keepers = append(dirs, keepers...) + keepers = append(bundles, keepers...) - if ctx.outIdx == 0 { - if err = s.renderSitemap(); err != nil { - return - } - - if ctx.multihost { - if err = s.renderRobotsTXT(); err != nil { - return - } - } - - if err = s.render404(); err != nil { - return - } - } - - if !ctx.renderSingletonPages() { - return - } - - if err = s.renderMainLanguageRedirect(); err != nil { - return - } - - return + return keepers } // HomeAbsURL is a convenience method giving the absolute URL to the home page. @@ -723,47 +550,20 @@ func (s *Site) SitemapAbsURL() string { return p } -func (s *Site) eventToIdentity(e fsnotify.Event) (identity.PathIdentity, bool) { - for _, fs := range s.BaseFs.SourceFilesystems.FileSystems() { - if p := fs.Path(e.Name); p != "" { - return identity.NewPathIdentity(fs.Name, filepath.ToSlash(p)), true - } - } - return identity.PathIdentity{}, false -} - -func (s *Site) readAndProcessContent(buildConfig BuildCfg, filenames ...string) error { - if s.Deps == nil { - panic("nil deps on site") - } - - sourceSpec := source.NewSourceSpec(s.PathSpec, buildConfig.ContentInclusionFilter, s.BaseFs.Content.Fs) - - proc := newPagesProcessor(s.h, sourceSpec) - - c := newPagesCollector(sourceSpec, s.h.getContentMaps(), s.Log, s.h.ContentChanges, proc, filenames...) - - if err := c.Collect(); err != nil { - return err - } - - return nil -} - func (s *Site) createNodeMenuEntryURL(in string) string { if !strings.HasPrefix(in, "/") { return in } // make it match the nodes menuEntryURL := in - menuEntryURL = helpers.SanitizeURLKeepTrailingSlash(s.s.PathSpec.URLize(menuEntryURL)) + menuEntryURL = s.s.PathSpec.URLize(menuEntryURL) if !s.conf.CanonifyURLs { menuEntryURL = paths.AddContextRoot(s.s.PathSpec.Cfg.BaseURL().String(), menuEntryURL) } return menuEntryURL } -func (s *Site) assembleMenus() { +func (s *Site) assembleMenus() error { s.menus = make(navigation.Menus) type twoD struct { @@ -775,14 +575,9 @@ func (s *Site) assembleMenus() { // add menu entries from config to flat hash for name, menu := range s.conf.Menus.Config { for _, me := range menu { - if types.IsNil(me.Page) { - if me.PageRef != "" { - // Try to resolve the page. - p, _ := s.getPageNew(nil, me.PageRef) - if !types.IsNil(p) { - navigation.SetPageValues(me, p) - } - } + if types.IsNil(me.Page) && me.PageRef != "" { + // Try to resolve the page. + me.Page, _ = s.getPage(nil, me.PageRef) } // If page is still nill, we must make sure that we have a URL that considers baseURL etc. @@ -797,37 +592,32 @@ func (s *Site) assembleMenus() { sectionPagesMenu := s.conf.SectionPagesMenu if sectionPagesMenu != "" { - s.pageMap.sections.Walk(func(s string, v any) bool { - p := v.(*contentNode).p - if p.IsHome() { - return false + if err := s.pageMap.forEachPage(pagePredicates.ShouldListGlobal, func(p *pageState) (bool, error) { + if p.IsHome() || !p.m.shouldBeCheckedForMenuDefinitions() { + return false, nil } - // From Hugo 0.22 we have nested sections, but until we get a - // feel of how that would work in this setting, let us keep - // this menu for the top level only. + // The section pages menus are attached to the top level section. id := p.Section() if _, ok := flat[twoD{sectionPagesMenu, id}]; ok { - return false + return false, nil } - me := navigation.MenuEntry{ MenuConfig: navigation.MenuConfig{ Identifier: id, Name: p.LinkTitle(), Weight: p.Weight(), }, + Page: p, } navigation.SetPageValues(&me, p) flat[twoD{sectionPagesMenu, me.KeyName()}] = &me - - return false - }) + return false, nil + }); err != nil { + return err + } } - // Add menu entries provided by pages - s.pageMap.pageTrees.WalkRenderable(func(ss string, n *contentNode) bool { - p := n.p - + if err := s.pageMap.forEachPage(pagePredicates.ShouldListGlobal, func(p *pageState) (bool, error) { for name, me := range p.pageMenus.menus() { if _, ok := flat[twoD{name, me.KeyName()}]; ok { err := p.wrapError(fmt.Errorf("duplicate menu entry with identifier %q in menu %q", me.KeyName(), name)) @@ -836,9 +626,10 @@ func (s *Site) assembleMenus() { } flat[twoD{name, me.KeyName()}] = me } - - return false - }) + return false, nil + }); err != nil { + return err + } // Create Children Menus First for _, e := range flat { @@ -871,6 +662,8 @@ func (s *Site) assembleMenus() { s.menus[menu.MenuName] = s.menus[menu.MenuName].Add(e) } } + + return nil } // get any language code to prefix the target file path with. @@ -893,39 +686,12 @@ func (s *Site) getLanguagePermalinkLang(alwaysInSubDir bool) string { } return s.GetLanguagePrefix() - -} - -func (s *Site) getTaxonomyKey(key string) string { - if s.conf.DisablePathToLower { - return s.PathSpec.MakePath(key) - } - return strings.ToLower(s.PathSpec.MakePath(key)) } // Prepare site for a new full build. func (s *Site) resetBuildState(sourceChanged bool) { s.relatedDocsHandler = s.relatedDocsHandler.Clone() s.init.Reset() - - if sourceChanged { - s.pageMap.contentMap.pageReverseIndex.Reset() - s.PageCollections = newPageCollections(s.pageMap) - s.pageMap.withEveryBundlePage(func(p *pageState) bool { - p.pagePages = &pagePages{} - if p.bucket != nil { - p.bucket.pagesMapBucketPages = &pagesMapBucketPages{} - } - p.parent = nil - p.Scratcher = maps.NewScratcher() - return false - }) - } else { - s.pageMap.withEveryBundlePage(func(p *pageState) bool { - p.Scratcher = maps.NewScratcher() - return false - }) - } } func (s *Site) errorCollator(results <-chan error, errs chan<- error) { @@ -947,7 +713,7 @@ func (s *Site) errorCollator(results <-chan error, errs chan<- error) { // as possible for existing sites. Most sites will use {{ .Site.GetPage "section" "my/section" }}, // i.e. 2 arguments, so we test for that. func (s *Site) GetPage(ref ...string) (page.Page, error) { - p, err := s.s.getPageOldVersion(ref...) + p, err := s.s.getPageForRefs(ref...) if p == nil { // The nil struct has meaning in some situations, mostly to avoid breaking @@ -959,22 +725,6 @@ func (s *Site) GetPage(ref ...string) (page.Page, error) { return p, err } -func (s *Site) GetPageWithTemplateInfo(info tpl.Info, ref ...string) (page.Page, error) { - p, err := s.GetPage(ref...) - if p != nil { - // Track pages referenced by templates/shortcodes - // when in server mode. - if im, ok := info.(identity.Manager); ok { - im.Add(p) - } - } - return p, err -} - -func (s *Site) permalink(link string) string { - return s.PathSpec.PermalinkForBaseURL(link, s.PathSpec.Cfg.BaseURL().String()) -} - func (s *Site) absURLPath(targetPath string) string { var path string if s.conf.RelativeURLs { @@ -990,46 +740,23 @@ func (s *Site) absURLPath(targetPath string) string { return path } -func (s *Site) lookupLayouts(layouts ...string) tpl.Template { - for _, l := range layouts { - if templ, found := s.Tmpl().Lookup(l); found { - return templ - } - } +const ( + pageDependencyScopeDefault int = iota + pageDependencyScopeGlobal +) - return nil -} - -func (s *Site) renderAndWriteXML(ctx context.Context, statCounter *uint64, name string, targetPath string, d any, templ tpl.Template) error { - renderBuffer := bp.GetBuffer() - defer bp.PutBuffer(renderBuffer) - - if err := s.renderForTemplate(ctx, name, "", d, renderBuffer, templ); err != nil { - return err - } - - pd := publisher.Descriptor{ - Src: renderBuffer, - TargetPath: targetPath, - StatCounter: statCounter, - // For the minification part of XML, - // we currently only use the MIME type. - OutputFormat: output.RSSFormat, - AbsURLPath: s.absURLPath(targetPath), - } - - return s.publisher.Publish(pd) -} - -func (s *Site) renderAndWritePage(statCounter *uint64, name string, targetPath string, p *pageState, templ tpl.Template) error { - s.h.IncrPageRender() +func (s *Site) renderAndWritePage(statCounter *uint64, name string, targetPath string, p *pageState, d any, templ tpl.Template) error { + s.h.buildCounters.pageRenderCounter.Add(1) renderBuffer := bp.GetBuffer() defer bp.PutBuffer(renderBuffer) of := p.outputFormat() - ctx := tpl.SetPageInContext(context.Background(), p) + p.incrRenderState() - if err := s.renderForTemplate(ctx, p.Kind(), of.Name, p, renderBuffer, templ); err != nil { + ctx := tpl.Context.Page.Set(context.Background(), p) + ctx = tpl.Context.DependencyManagerScopedProvider.Set(ctx, p) + + if err := s.renderForTemplate(ctx, p.Kind(), of.Name, d, renderBuffer, templ); err != nil { return err } @@ -1078,7 +805,6 @@ var infoOnMissingLayout = map[string]bool{ // where ITEM is the thing being hooked. type hookRendererTemplate struct { templateHandler tpl.TemplateHandler - identity.SearchProvider templ tpl.Template resolvePosition func(ctx any) text.Position } @@ -1119,92 +845,17 @@ func (s *Site) renderForTemplate(ctx context.Context, name, outputFormat string, return } -func (s *Site) lookupTemplate(layouts ...string) (tpl.Template, bool) { - for _, l := range layouts { - if templ, found := s.Tmpl().Lookup(l); found { - return templ, true - } - } - - return nil, false -} - -func (s *Site) publish(statCounter *uint64, path string, r io.Reader, fs afero.Fs) (err error) { - s.PathSpec.ProcessingStats.Incr(statCounter) - - return helpers.WriteToDisk(filepath.Clean(path), r, fs) -} - -func (s *Site) kindFromFileInfoOrSections(fi *fileInfo, sections []string) string { - if fi.TranslationBaseName() == "_index" { - if fi.Dir() == "" { - return kinds.KindHome - } - - return s.kindFromSections(sections) - - } - - return kinds.KindPage -} - -func (s *Site) kindFromSections(sections []string) string { - if len(sections) == 0 { - return kinds.KindHome - } - - return s.kindFromSectionPath(path.Join(sections...)) -} - -func (s *Site) kindFromSectionPath(sectionPath string) string { - var taxonomiesConfig taxonomiesConfig = s.conf.Taxonomies - for _, plural := range taxonomiesConfig { - if plural == sectionPath { - return kinds.KindTaxonomy - } - - if strings.HasPrefix(sectionPath, plural) { - return kinds.KindTerm - } - - } - - return kinds.KindSection -} - -func (s *Site) newPage( - n *contentNode, - parentbBucket *pagesMapBucket, - kind, title string, - sections ...string) *pageState { - m := map[string]any{} - if title != "" { - m["title"] = title - } - - p, err := newPageFromMeta( - n, - parentbBucket, - m, - &pageMeta{ - s: s, - kind: kind, - sections: sections, - }) - if err != nil { - panic(err) - } - - return p -} - func (s *Site) shouldBuild(p page.Page) bool { + if !s.conf.IsKindEnabled(p.Kind()) { + return false + } return shouldBuild(s.Conf.BuildFuture(), s.Conf.BuildExpired(), s.Conf.BuildDrafts(), p.Draft(), p.PublishDate(), p.ExpiryDate()) } func shouldBuild(buildFuture bool, buildExpired bool, buildDrafts bool, Draft bool, - publishDate time.Time, expiryDate time.Time) bool { + publishDate time.Time, expiryDate time.Time, +) bool { if !(buildDrafts || !Draft) { return false } @@ -1217,3 +868,38 @@ func shouldBuild(buildFuture bool, buildExpired bool, buildDrafts bool, Draft bo } return true } + +func (s *Site) render(ctx *siteRenderContext) (err error) { + if err := page.Clear(); err != nil { + return err + } + + if ctx.outIdx == 0 { + // Note that even if disableAliases is set, the aliases themselves are + // preserved on page. The motivation with this is to be able to generate + // 301 redirects in a .htacess file and similar using a custom output format. + if !s.conf.DisableAliases { + // Aliases must be rendered before pages. + // Some sites, Hugo docs included, have faulty alias definitions that point + // to itself or another real page. These will be overwritten in the next + // step. + if err = s.renderAliases(); err != nil { + return + } + } + } + + if err = s.renderPages(ctx); err != nil { + return + } + + if !ctx.shouldRenderStandalonePage("") { + return + } + + if err = s.renderMainLanguageRedirect(); err != nil { + return + } + + return +} diff --git a/hugolib/site_benchmark_new_test.go b/hugolib/site_benchmark_new_test.go index 5f4d3f117..023d8e4d5 100644 --- a/hugolib/site_benchmark_new_test.go +++ b/hugolib/site_benchmark_new_test.go @@ -420,11 +420,11 @@ baseURL = "https://example.com" createContent := func(dir, name string) { var content string if strings.Contains(name, "_index") { - content = pageContent(1) + // Empty } else { content = pageContentWithCategory(1, fmt.Sprintf("category%d", r.Intn(5)+1)) - sb.WithContent(filepath.Join("content", dir, name), content) } + sb.WithContent(filepath.Join("content", dir, name), content) } for level := 1; level <= r.Intn(5)+1; level++ { @@ -454,6 +454,9 @@ baseURL = "https://example.com" func TestBenchmarkSite(b *testing.T) { benchmarks := getBenchmarkSiteTestCases() for _, bm := range benchmarks { + if bm.name != "Deep content tree" { + continue + } b.Run(bm.name, func(b *testing.T) { s := bm.create(b) @@ -478,13 +481,13 @@ title: %s Edited!!`, p.Title())) - counters := &testCounters{} + counters := &buildCounters{} b.Build(BuildCfg{testCounters: counters}) // We currently rebuild all the language versions of the same content file. // We could probably optimize that case, but it's not trivial. - b.Assert(int(counters.contentRenderCounter), qt.Equals, 4) + b.Assert(int(counters.contentRenderCounter.Load()), qt.Equals, 4) b.AssertFileContent("public"+p.RelPermalink()+"index.html", "Edited!!") } @@ -534,7 +537,7 @@ func BenchmarkSiteNew(b *testing.B) { panic("infinite loop") } p = pages[rnd.Intn(len(pages))] - if !p.File().IsZero() { + if p.File() != nil { break } } diff --git a/hugolib/site_new.go b/hugolib/site_new.go index da9d19f21..ddf45c286 100644 --- a/hugolib/site_new.go +++ b/hugolib/site_new.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -22,8 +22,8 @@ import ( "sort" "time" - radix "github.com/armon/go-radix" "github.com/bep/logg" + "github.com/gohugoio/hugo/cache/dynacache" "github.com/gohugoio/hugo/common/hugo" "github.com/gohugoio/hugo/common/loggers" "github.com/gohugoio/hugo/common/maps" @@ -31,6 +31,7 @@ import ( "github.com/gohugoio/hugo/config" "github.com/gohugoio/hugo/config/allconfig" "github.com/gohugoio/hugo/deps" + "github.com/gohugoio/hugo/hugolib/doctree" "github.com/gohugoio/hugo/identity" "github.com/gohugoio/hugo/langs" "github.com/gohugoio/hugo/langs/i18n" @@ -39,9 +40,9 @@ import ( "github.com/gohugoio/hugo/navigation" "github.com/gohugoio/hugo/output" "github.com/gohugoio/hugo/publisher" - "github.com/gohugoio/hugo/resources/kinds" "github.com/gohugoio/hugo/resources/page" "github.com/gohugoio/hugo/resources/page/pagemeta" + "github.com/gohugoio/hugo/resources/page/siteidentities" "github.com/gohugoio/hugo/resources/resource" "github.com/gohugoio/hugo/tpl" "github.com/gohugoio/hugo/tpl/tplimpl" @@ -50,8 +51,10 @@ import ( var _ page.Site = (*Site)(nil) type Site struct { - conf *allconfig.Config - language *langs.Language + conf *allconfig.Config + language *langs.Language + languagei int + pageMap *pageMap // The owning container. h *HugoSites @@ -59,12 +62,10 @@ type Site struct { *deps.Deps // Page navigation. - *PageCollections + *pageFinder taxonomies page.TaxonomyList menus navigation.Menus - siteBucket *pagesMapBucket - // Shortcut to the home page. Note that this may be nil if // home page, for some odd reason, is disabled. home *pageState @@ -93,7 +94,7 @@ type Site struct { func (s *Site) Debug() { fmt.Println("Debugging site", s.Lang(), "=>") - fmt.Println(s.pageMap.testDump()) + // fmt.Println(s.pageMap.testDump()) } // NewHugoSites creates HugoSites from the given config. @@ -127,10 +128,13 @@ func NewHugoSites(cfg deps.DepsCfg) (*HugoSites, error) { logger = loggers.New(logOpts) } + memCache := dynacache.New(dynacache.Options{Running: conf.Running(), Log: logger}) + firstSiteDeps := &deps.Deps{ Fs: cfg.Fs, Log: logger, Conf: conf, + MemCache: memCache, TemplateProvider: tplimpl.DefaultTemplateProvider, TranslationProvider: i18n.NewTranslationProvider(), } @@ -142,14 +146,40 @@ func NewHugoSites(cfg deps.DepsCfg) (*HugoSites, error) { confm := cfg.Configs var sites []*Site + ns := &contentNodeShifter{ + numLanguages: len(confm.Languages), + } + + treeConfig := doctree.Config[contentNodeI]{ + Shifter: ns, + } + + pageTrees := &pageTrees{ + treePages: doctree.New( + treeConfig, + ), + treeResources: doctree.New( + treeConfig, + ), + treeTaxonomyEntries: doctree.NewTreeShiftTree[*weightedContentNode](doctree.DimensionLanguage.Index(), len(confm.Languages)), + } + + pageTrees.treePagesResources = doctree.WalkableTrees[contentNodeI]{ + pageTrees.treePages, + pageTrees.treeResources, + } + + pageTrees.resourceTrees = doctree.MutableTrees{ + pageTrees.treeResources, + } + for i, confp := range confm.ConfigLangs() { language := confp.Language() - if confp.IsLangDisabled(language.Lang) { + if language.Disabled { continue } k := language.Lang conf := confm.LanguageConfigMap[k] - frontmatterHandler, err := pagemeta.NewFrontmatterHandler(firstSiteDeps.Log, conf.Frontmatter) if err != nil { return nil, err @@ -158,11 +188,9 @@ func NewHugoSites(cfg deps.DepsCfg) (*HugoSites, error) { langs.SetParams(language, conf.Params) s := &Site{ - conf: conf, - language: language, - siteBucket: &pagesMapBucket{ - cascade: conf.Cascade.Config, - }, + conf: conf, + language: language, + languagei: i, frontmatterHandler: frontmatterHandler, } @@ -177,20 +205,9 @@ func NewHugoSites(cfg deps.DepsCfg) (*HugoSites, error) { s.Deps = d } - // Site deps start. - var taxonomiesConfig taxonomiesConfig = conf.Taxonomies - pm := &pageMap{ - contentMap: newContentMap(contentMapConfig{ - lang: k, - taxonomyConfig: taxonomiesConfig.Values(), - taxonomyDisabled: !conf.IsKindEnabled(kinds.KindTerm), - taxonomyTermDisabled: !conf.IsKindEnabled(kinds.KindTaxonomy), - pageDisabled: !conf.IsKindEnabled(kinds.KindPage), - }), - s: s, - } + s.pageMap = newPageMap(i, s, memCache, pageTrees) - s.PageCollections = newPageCollections(pm) + s.pageFinder = newPageFinder(s.pageMap) s.siteRefLinker, err = newSiteRefLinker(s) if err != nil { return nil, err @@ -217,17 +234,26 @@ func NewHugoSites(cfg deps.DepsCfg) (*HugoSites, error) { return nil, errors.New("no sites to build") } - // Sort the sites by language weight (if set) or lang. + // Pull the default content language to the top, then sort the sites by language weight (if set) or lang. + defaultContentLanguage := confm.Base.DefaultContentLanguage sort.Slice(sites, func(i, j int) bool { li := sites[i].language lj := sites[j].language + if li.Lang == defaultContentLanguage { + return true + } + + if lj.Lang == defaultContentLanguage { + return false + } + if li.Weight != lj.Weight { return li.Weight < lj.Weight } return li.Lang < lj.Lang }) - h, err := newHugoSitesNew(cfg, firstSiteDeps, sites) + h, err := newHugoSites(cfg, firstSiteDeps, pageTrees, sites) if err == nil && h == nil { panic("hugo: newHugoSitesNew returned nil error and nil HugoSites") } @@ -235,29 +261,33 @@ func NewHugoSites(cfg deps.DepsCfg) (*HugoSites, error) { return h, err } -func newHugoSitesNew(cfg deps.DepsCfg, d *deps.Deps, sites []*Site) (*HugoSites, error) { +func newHugoSites(cfg deps.DepsCfg, d *deps.Deps, pageTrees *pageTrees, sites []*Site) (*HugoSites, error) { numWorkers := config.GetNumWorkerMultiplier() - if numWorkers > len(sites) { - numWorkers = len(sites) - } - var workers *para.Workers - if numWorkers > 1 { - workers = para.New(numWorkers) + numWorkersSite := numWorkers + if numWorkersSite > len(sites) { + numWorkersSite = len(sites) } + workersSite := para.New(numWorkersSite) h := &HugoSites{ - Sites: sites, - Deps: sites[0].Deps, - Configs: cfg.Configs, - workers: workers, - numWorkers: numWorkers, + Sites: sites, + Deps: sites[0].Deps, + Configs: cfg.Configs, + workersSite: workersSite, + numWorkersSites: numWorkers, + numWorkers: numWorkers, + pageTrees: pageTrees, + cachePages: dynacache.GetOrCreatePartition[string, + page.Pages](d.MemCache, "/pags/all", + dynacache.OptionsPartition{Weight: 10, ClearWhen: dynacache.ClearOnRebuild}, + ), + translationKeyPages: maps.NewSliceCache[page.Page](), currentSite: sites[0], skipRebuildForFilenames: make(map[string]bool), init: &hugoSitesInit{ - data: lazy.New(), - layouts: lazy.New(), - gitInfo: lazy.New(), - translations: lazy.New(), + data: lazy.New(), + layouts: lazy.New(), + gitInfo: lazy.New(), }, } @@ -304,18 +334,8 @@ func newHugoSitesNew(cfg deps.DepsCfg, d *deps.Deps, sites []*Site) (*HugoSites, donec: make(chan bool), } - // Only needed in server mode. - if cfg.Configs.Base.Internal.Watch { - h.ContentChanges = &contentChangeMap{ - pathSpec: h.PathSpec, - symContent: make(map[string]map[string]bool), - leafBundles: radix.New(), - branchBundles: make(map[string]bool), - } - } - h.init.data.Add(func(context.Context) (any, error) { - err := h.loadData(h.PathSpec.BaseFs.Data.Dirs) + err := h.loadData() if err != nil { return nil, fmt.Errorf("failed to load data: %w", err) } @@ -331,15 +351,6 @@ func newHugoSitesNew(cfg deps.DepsCfg, d *deps.Deps, sites []*Site) (*HugoSites, return nil, nil }) - h.init.translations.Add(func(context.Context) (any, error) { - if len(h.Sites) > 1 { - allTranslations := pagesToTranslationsMap(h.Sites) - assignTranslationsToPages(allTranslations, h.Sites) - } - - return nil, nil - }) - h.init.gitInfo.Add(func(context.Context) (any, error) { err := h.loadGitInfo() if err != nil { @@ -422,10 +433,16 @@ func (s *Site) BaseURL() string { } // Returns the last modification date of the content. +// Deprecated: Use .Lastmod instead. func (s *Site) LastChange() time.Time { return s.lastmod } +// Returns the last modification date of the content. +func (s *Site) Lastmod() time.Time { + return s.lastmod +} + // Returns the Params configured for this site. func (s *Site) Params() maps.Params { return s.conf.Params @@ -480,12 +497,55 @@ func (s *Site) LanguagePrefix() string { return "/" + prefix } -// Returns the identity of this site. -// This is for internal use only. -func (s *Site) GetIdentity() identity.Identity { - return identity.KeyValueIdentity{Key: "site", Value: s.Lang()} -} - func (s *Site) Site() page.Site { return page.WrapSite(s) } + +func (s *Site) ForEeachIdentityByName(name string, f func(identity.Identity) bool) { + if id, found := siteidentities.FromString(name); found { + if f(id) { + return + } + } +} + +// Pages returns all pages. +// This is for the current language only. +func (s *Site) Pages() page.Pages { + return s.pageMap.getPagesInSection( + pageMapQueryPagesInSection{ + pageMapQueryPagesBelowPath: pageMapQueryPagesBelowPath{ + Path: "", + KeyPart: "global", + Include: pagePredicates.ShouldListGlobal, + }, + Recursive: true, + IncludeSelf: true, + }, + ) +} + +// RegularPages returns all the regular pages. +// This is for the current language only. +func (s *Site) RegularPages() page.Pages { + return s.pageMap.getPagesInSection( + pageMapQueryPagesInSection{ + pageMapQueryPagesBelowPath: pageMapQueryPagesBelowPath{ + Path: "", + KeyPart: "global", + Include: pagePredicates.ShouldListGlobal.And(pagePredicates.KindPage), + }, + Recursive: true, + }, + ) +} + +// AllPages returns all pages for all sites. +func (s *Site) AllPages() page.Pages { + return s.h.Pages() +} + +// AllRegularPages returns all regular pages for all sites. +func (s *Site) AllRegularPages() page.Pages { + return s.h.RegularPages() +} diff --git a/hugolib/site_output.go b/hugolib/site_output.go index d6f55cbdd..2744c0133 100644 --- a/hugolib/site_output.go +++ b/hugolib/site_output.go @@ -42,7 +42,7 @@ func createDefaultOutputFormats(allFormats output.Formats) map[string]output.For // Below are for consistency. They are currently not used during rendering. kinds.KindSitemap: {sitemapOut}, kinds.KindRobotsTXT: {robotsOut}, - kinds.Kind404: {htmlOut}, + kinds.KindStatus404: {htmlOut}, } // May be disabled diff --git a/hugolib/site_output_test.go b/hugolib/site_output_test.go index c2a14c3eb..9bcb13ea4 100644 --- a/hugolib/site_output_test.go +++ b/hugolib/site_output_test.go @@ -142,7 +142,7 @@ Len Pages: {{ .Kind }} {{ len .Site.RegularPages }} Page Number: {{ .Paginator.P s := b.H.Sites[0] b.Assert(s.language.Lang, qt.Equals, "en") - home := s.getPage(kinds.KindHome) + home := s.getPageOldVersion(kinds.KindHome) b.Assert(home, qt.Not(qt.IsNil)) @@ -314,7 +314,7 @@ baseName = "customdelimbase" th.assertFileContent("public/nosuffixbase", "no suffix") th.assertFileContent("public/customdelimbase_del", "custom delim") - home := s.getPage(kinds.KindHome) + home := s.getPageOldVersion(kinds.KindHome) c.Assert(home, qt.Not(qt.IsNil)) outputs := home.OutputFormats() @@ -383,7 +383,7 @@ func TestCreateSiteOutputFormats(t *testing.T) { c.Assert(outputs[kinds.KindRSS], deepEqualsOutputFormats, output.Formats{output.RSSFormat}) c.Assert(outputs[kinds.KindSitemap], deepEqualsOutputFormats, output.Formats{output.SitemapFormat}) c.Assert(outputs[kinds.KindRobotsTXT], deepEqualsOutputFormats, output.Formats{output.RobotsTxtFormat}) - c.Assert(outputs[kinds.Kind404], deepEqualsOutputFormats, output.Formats{output.HTMLFormat}) + c.Assert(outputs[kinds.KindStatus404], deepEqualsOutputFormats, output.Formats{output.HTMLFormat}) }) // Issue #4528 diff --git a/hugolib/site_render.go b/hugolib/site_render.go index 43371b44d..379dd6e86 100644 --- a/hugolib/site_render.go +++ b/hugolib/site_render.go @@ -20,23 +20,21 @@ import ( "strings" "sync" - "github.com/gohugoio/hugo/output/layouts" + "github.com/gohugoio/hugo/hugolib/doctree" "github.com/gohugoio/hugo/config" "github.com/gohugoio/hugo/tpl" - "errors" - - "github.com/gohugoio/hugo/output" - "github.com/gohugoio/hugo/resources/kinds" "github.com/gohugoio/hugo/resources/page" - "github.com/gohugoio/hugo/resources/page/pagemeta" ) type siteRenderContext struct { cfg *BuildCfg + // languageIdx is the zero based index of the site. + languageIdx int + // Zero based index for all output formats combined. sitesOutIdx int @@ -47,20 +45,24 @@ type siteRenderContext struct { multihost bool } -// Whether to render 404.html, robotsTXT.txt which usually is rendered -// once only in the site root. -func (s siteRenderContext) renderSingletonPages() bool { - if s.multihost { +// Whether to render 404.html, robotsTXT.txt and similar. +// These are useually rendered once in the root of public. +func (s siteRenderContext) shouldRenderStandalonePage(kind string) bool { + if s.multihost || kind == kinds.KindSitemap { // 1 per site return s.outIdx == 0 } - // 1 for all sites - return s.sitesOutIdx == 0 + if kind == kinds.KindStatus404 { + // 1 for all output formats + return s.outIdx == 0 + } + + // 1 for all sites and output formats. + return s.languageIdx == 0 && s.outIdx == 0 } -// renderPages renders pages each corresponding to a markdown file. -// TODO(bep np doc +// renderPages renders pages concurrently. func (s *Site) renderPages(ctx *siteRenderContext) error { numWorkers := config.GetNumWorkerMultiplier() @@ -79,18 +81,26 @@ func (s *Site) renderPages(ctx *siteRenderContext) error { cfg := ctx.cfg - s.pageMap.pageTrees.Walk(func(ss string, n *contentNode) bool { - - if cfg.shouldRender(n.p) { - select { - case <-s.h.Done(): - return true - default: - pages <- n.p + w := &doctree.NodeShiftTreeWalker[contentNodeI]{ + Tree: s.pageMap.treePages, + Handle: func(key string, n contentNodeI, match doctree.DimensionFlag) (bool, error) { + if p, ok := n.(*pageState); ok { + if cfg.shouldRender(p) { + select { + case <-s.h.Done(): + return true, nil + default: + pages <- p + } + } } - } - return false - }) + return false, nil + }, + } + + if err := w.Walk(context.Background()); err != nil { + return err + } close(pages) @@ -110,10 +120,15 @@ func pageRenderer( s *Site, pages <-chan *pageState, results chan<- error, - wg *sync.WaitGroup) { + wg *sync.WaitGroup, +) { defer wg.Done() for p := range pages { + if p.m.isStandalone() && !ctx.shouldRenderStandalonePage(p.Kind()) { + continue + } + if p.m.buildConfig.PublishResources { if err := p.renderResources(); err != nil { s.SendError(p.errorf(err, "failed to render page resources")) @@ -133,13 +148,33 @@ func pageRenderer( } if !found { - s.logMissingLayout("", p.Layout(), p.Kind(), p.f.Name) + s.Log.Trace( + func() string { + return fmt.Sprintf("no layout for kind %q found", p.Kind()) + }, + ) + // Don't emit warning for missing 404 etc. pages. + if !p.m.isStandalone() { + s.logMissingLayout("", p.Layout(), p.Kind(), p.f.Name) + } continue } targetPath := p.targetPaths().TargetFilename - if err := s.renderAndWritePage(&s.PathSpec.ProcessingStats.Pages, "page "+p.Title(), targetPath, p, templ); err != nil { + s.Log.Trace( + func() string { + return fmt.Sprintf("rendering outputFormat %q kind %q using layout %q to %q", p.pageOutput.f.Name, p.Kind(), templ.Name(), targetPath) + }, + ) + + var d any = p + switch p.Kind() { + case kinds.KindSitemapIndex: + d = s.h.Sites + } + + if err := s.renderAndWritePage(&s.PathSpec.ProcessingStats.Pages, "page "+p.Title(), targetPath, p, d, templ); err != nil { results <- err } @@ -205,6 +240,7 @@ func (s *Site) renderPaginator(p *pageState, templ tpl.Template) error { if err := s.writeDestAlias(targetPaths.TargetFilename, p.Permalink(), f, p); err != nil { return err } + } // Render pages for the rest @@ -217,7 +253,7 @@ func (s *Site) renderPaginator(p *pageState, templ tpl.Template) error { if err := s.renderAndWritePage( &s.PathSpec.ProcessingStats.PaginatorPages, p.Title(), - targetPaths.TargetFilename, p, templ); err != nil { + targetPaths.TargetFilename, p, p, templ); err != nil { return err } @@ -226,160 +262,72 @@ func (s *Site) renderPaginator(p *pageState, templ tpl.Template) error { return nil } -func (s *Site) render404() error { - p, err := newPageStandalone(&pageMeta{ - s: s, - kind: kinds.Kind404, - urlPaths: pagemeta.URLPath{ - URL: "404.html", - }, - }, - output.HTMLFormat, - ) - if err != nil { - return err - } - - if !p.render { - return nil - } - - var d layouts.LayoutDescriptor - d.Kind = kinds.Kind404 - - templ, found, err := s.Tmpl().LookupLayout(d, output.HTMLFormat) - if err != nil { - return err - } - if !found { - return nil - } - - targetPath := p.targetPaths().TargetFilename - - if targetPath == "" { - return errors.New("failed to create targetPath for 404 page") - } - - return s.renderAndWritePage(&s.PathSpec.ProcessingStats.Pages, "404 page", targetPath, p, templ) -} - -func (s *Site) renderSitemap() error { - p, err := newPageStandalone(&pageMeta{ - s: s, - kind: kinds.KindSitemap, - urlPaths: pagemeta.URLPath{ - URL: s.conf.Sitemap.Filename, - }, - }, - output.HTMLFormat, - ) - if err != nil { - return err - } - - if !p.render { - return nil - } - - targetPath := p.targetPaths().TargetFilename - ctx := tpl.SetPageInContext(context.Background(), p) - - if targetPath == "" { - return errors.New("failed to create targetPath for sitemap") - } - - templ := s.lookupLayouts("sitemap.xml", "_default/sitemap.xml", "_internal/_default/sitemap.xml") - - return s.renderAndWriteXML(ctx, &s.PathSpec.ProcessingStats.Sitemaps, "sitemap", targetPath, p, templ) -} - -func (s *Site) renderRobotsTXT() error { - if !s.conf.EnableRobotsTXT && s.isEnabled(kinds.KindRobotsTXT) { - return nil - } - - p, err := newPageStandalone(&pageMeta{ - s: s, - kind: kinds.KindRobotsTXT, - urlPaths: pagemeta.URLPath{ - URL: "robots.txt", - }, - }, - output.RobotsTxtFormat) - if err != nil { - return err - } - - if !p.render { - return nil - } - - templ := s.lookupLayouts("robots.txt", "_default/robots.txt", "_internal/_default/robots.txt") - - return s.renderAndWritePage(&s.PathSpec.ProcessingStats.Pages, "Robots Txt", p.targetPaths().TargetFilename, p, templ) -} - // renderAliases renders shell pages that simply have a redirect in the header. func (s *Site) renderAliases() error { - var err error - s.pageMap.pageTrees.WalkLinkable(func(ss string, n *contentNode) bool { - p := n.p - if len(p.Aliases()) == 0 { - return false - } + w := &doctree.NodeShiftTreeWalker[contentNodeI]{ + Tree: s.pageMap.treePages, + Handle: func(key string, n contentNodeI, match doctree.DimensionFlag) (bool, error) { + p := n.(*pageState) - pathSeen := make(map[string]bool) - - for _, of := range p.OutputFormats() { - if !of.Format.IsHTML { - continue + // We cannot alias a page that's not rendered. + if p.m.noLink() { + return false, nil } - f := of.Format - - if pathSeen[f.Path] { - continue + if len(p.Aliases()) == 0 { + return false, nil } - pathSeen[f.Path] = true - plink := of.Permalink() - - for _, a := range p.Aliases() { - isRelative := !strings.HasPrefix(a, "/") - - if isRelative { - // Make alias relative, where "." will be on the - // same directory level as the current page. - basePath := path.Join(p.targetPaths().SubResourceBaseLink, "..") - a = path.Join(basePath, a) - - } else { - // Make sure AMP and similar doesn't clash with regular aliases. - a = path.Join(f.Path, a) + pathSeen := make(map[string]bool) + for _, of := range p.OutputFormats() { + if !of.Format.IsHTML { + continue } - if s.conf.C.IsUglyURLSection(p.Section()) && !strings.HasSuffix(a, ".html") { - a += ".html" + f := of.Format + + if pathSeen[f.Path] { + continue } + pathSeen[f.Path] = true - lang := p.Language().Lang + plink := of.Permalink() - if s.h.Configs.IsMultihost && !strings.HasPrefix(a, "/"+lang) { - // These need to be in its language root. - a = path.Join(lang, a) - } + for _, a := range p.Aliases() { + isRelative := !strings.HasPrefix(a, "/") - err = s.writeDestAlias(a, plink, f, p) - if err != nil { - return true + if isRelative { + // Make alias relative, where "." will be on the + // same directory level as the current page. + basePath := path.Join(p.targetPaths().SubResourceBaseLink, "..") + a = path.Join(basePath, a) + + } else { + // Make sure AMP and similar doesn't clash with regular aliases. + a = path.Join(f.Path, a) + } + + if s.conf.C.IsUglyURLSection(p.Section()) && !strings.HasSuffix(a, ".html") { + a += ".html" + } + + lang := p.Language().Lang + + if s.h.Configs.IsMultihost && !strings.HasPrefix(a, "/"+lang) { + // These need to be in its language root. + a = path.Join(lang, a) + } + + err := s.writeDestAlias(a, plink, f, p) + if err != nil { + return true, err + } } } - } - return false - }) - - return err + return false, nil + }, + } + return w.Walk(context.TODO()) } // renderMainLanguageRedirect creates a redirect to the main language home, diff --git a/hugolib/site_sections_test.go b/hugolib/site_sections_test.go index f5cb41d28..4d4ff965b 100644 --- a/hugolib/site_sections_test.go +++ b/hugolib/site_sections_test.go @@ -21,6 +21,7 @@ import ( qt "github.com/frankban/quicktest" "github.com/gohugoio/hugo/deps" + "github.com/gohugoio/hugo/htesting" "github.com/gohugoio/hugo/resources/kinds" "github.com/gohugoio/hugo/resources/page" ) @@ -31,8 +32,10 @@ func TestNestedSections(t *testing.T) { cfg, fs = newTestCfg() ) + tt := htesting.NewPinnedRunner(c, "") + cfg.Set("permalinks", map[string]string{ - "perm a": ":sections/:title", + "perm-a": ":sections/:title", }) pageTemplate := `--- @@ -127,7 +130,7 @@ PAG|{{ .Title }}|{{ $sect.InSection . }} {"elsewhere", func(c *qt.C, p page.Page) { c.Assert(len(p.Pages()), qt.Equals, 1) for _, p := range p.Pages() { - c.Assert(p.SectionsPath(), qt.Equals, "elsewhere") + c.Assert(p.SectionsPath(), qt.Equals, "/elsewhere") } }}, {"post", func(c *qt.C, p page.Page) { @@ -179,8 +182,7 @@ PAG|{{ .Title }}|{{ $sect.InSection . }} c.Assert(home.IsHome(), qt.Equals, true) c.Assert(len(p.Sections()), qt.Equals, 0) c.Assert(home.CurrentSection(), qt.Equals, home) - active, err := home.InSection(home) - c.Assert(err, qt.IsNil) + active := home.InSection(home) c.Assert(active, qt.Equals, true) c.Assert(p.FirstSection(), qt.Equals, p) c.Assert(len(p.Ancestors()), qt.Equals, 1) @@ -208,29 +210,22 @@ PAG|{{ .Title }}|{{ $sect.InSection . }} } c.Assert(child.CurrentSection(), qt.Equals, p) - active, err := child.InSection(p) - c.Assert(err, qt.IsNil) + active := child.InSection(p) c.Assert(active, qt.Equals, true) - active, err = p.InSection(child) - c.Assert(err, qt.IsNil) + active = p.InSection(child) c.Assert(active, qt.Equals, true) - active, err = p.InSection(getPage(p, "/")) - c.Assert(err, qt.IsNil) + active = p.InSection(getPage(p, "/")) c.Assert(active, qt.Equals, false) - isAncestor, err := p.IsAncestor(child) - c.Assert(err, qt.IsNil) + isAncestor := p.IsAncestor(child) c.Assert(isAncestor, qt.Equals, true) - isAncestor, err = child.IsAncestor(p) - c.Assert(err, qt.IsNil) + isAncestor = child.IsAncestor(p) c.Assert(isAncestor, qt.Equals, false) - isDescendant, err := p.IsDescendant(child) - c.Assert(err, qt.IsNil) + isDescendant := p.IsDescendant(child) c.Assert(isDescendant, qt.Equals, false) - isDescendant, err = child.IsDescendant(p) - c.Assert(err, qt.IsNil) + isDescendant = child.IsDescendant(p) c.Assert(isDescendant, qt.Equals, true) } @@ -254,32 +249,26 @@ PAG|{{ .Title }}|{{ $sect.InSection . }} c.Assert(len(p.Ancestors()), qt.Equals, 3) l1 := getPage(p, "/l1") - isDescendant, err := l1.IsDescendant(p) - c.Assert(err, qt.IsNil) + isDescendant := l1.IsDescendant(p) c.Assert(isDescendant, qt.Equals, false) - isDescendant, err = l1.IsDescendant(nil) - c.Assert(err, qt.IsNil) + isDescendant = l1.IsDescendant(nil) c.Assert(isDescendant, qt.Equals, false) - isDescendant, err = nilp.IsDescendant(p) - c.Assert(err, qt.IsNil) + isDescendant = nilp.IsDescendant(p) c.Assert(isDescendant, qt.Equals, false) - isDescendant, err = p.IsDescendant(l1) - c.Assert(err, qt.IsNil) + isDescendant = p.IsDescendant(l1) c.Assert(isDescendant, qt.Equals, true) - isAncestor, err := l1.IsAncestor(p) - c.Assert(err, qt.IsNil) + isAncestor := l1.IsAncestor(p) c.Assert(isAncestor, qt.Equals, true) - isAncestor, err = p.IsAncestor(l1) - c.Assert(err, qt.IsNil) + isAncestor = p.IsAncestor(l1) c.Assert(isAncestor, qt.Equals, false) c.Assert(p.FirstSection(), qt.Equals, l1) - isAncestor, err = p.IsAncestor(nil) - c.Assert(err, qt.IsNil) + isAncestor = p.IsAncestor(nil) c.Assert(isAncestor, qt.Equals, false) - isAncestor, err = nilp.IsAncestor(l1) - c.Assert(err, qt.IsNil) c.Assert(isAncestor, qt.Equals, false) + + l3 := getPage(p, "/l1/l2/l3") + c.Assert(l3.FirstSection(), qt.Equals, l1) }}, {"perm a,link", func(c *qt.C, p page.Page) { c.Assert(p.Title(), qt.Equals, "T9_-1") @@ -294,15 +283,14 @@ PAG|{{ .Title }}|{{ $sect.InSection . }} }}, } - home := s.getPage(kinds.KindHome) + home := s.getPageOldVersion(kinds.KindHome) for _, test := range tests { test := test - t.Run(fmt.Sprintf("sections %s", test.sections), func(t *testing.T) { - t.Parallel() - c := qt.New(t) + tt.Run(fmt.Sprintf("sections %s", test.sections), func(c *qt.C) { + c.Parallel() sections := strings.Split(test.sections, ",") - p := s.getPage(kinds.KindSection, sections...) + p := s.getPageOldVersion(kinds.KindSection, sections...) c.Assert(p, qt.Not(qt.IsNil), qt.Commentf(fmt.Sprint(sections))) if p.Pages() != nil { @@ -319,19 +307,14 @@ PAG|{{ .Title }}|{{ $sect.InSection . }} c.Assert(len(home.Sections()), qt.Equals, 9) c.Assert(s.Sections(), deepEqualsPages, home.Sections()) - rootPage := s.getPage(kinds.KindPage, "mypage.md") + rootPage := s.getPageOldVersion(kinds.KindPage, "mypage.md") c.Assert(rootPage, qt.Not(qt.IsNil)) c.Assert(rootPage.Parent().IsHome(), qt.Equals, true) // https://github.com/gohugoio/hugo/issues/6365 c.Assert(rootPage.Sections(), qt.HasLen, 0) - // Add a odd test for this as this looks a little bit off, but I'm not in the mood - // to think too hard a out this right now. It works, but people will have to spell - // out the directory name as is. - // If we later decide to do something about this, we will have to do some normalization in - // getPage. - // TODO(bep) - sectionWithSpace := s.getPage(kinds.KindSection, "Spaces in Section") + sectionWithSpace := s.getPageOldVersion(kinds.KindSection, "Spaces in Section") + // s.h.pageTrees.debugPrint() c.Assert(sectionWithSpace, qt.Not(qt.IsNil)) c.Assert(sectionWithSpace.RelPermalink(), qt.Equals, "/spaces-in-section/") @@ -381,3 +364,37 @@ Next: {{ with .NextInSection }}{{ .RelPermalink }}{{ end }}| b.AssertFileContent("public/blog/cool/cool2/index.html", "Prev: |", "Next: /blog/cool/cool1/|") } + +func TestSectionEntries(t *testing.T) { + t.Parallel() + + files := ` +-- hugo.toml -- +baseURL = "https://example.com/" +-- content/myfirstsection/p1.md -- +--- +title: "P1" +--- +P1 +-- content/a/b/c/_index.md -- +--- +title: "C" +--- +C +-- content/a/b/c/mybundle/index.md -- +--- +title: "My Bundle" +--- +-- layouts/_default/list.html -- +Kind: {{ .Kind }}|RelPermalink: {{ .RelPermalink }}|SectionsPath: {{ .SectionsPath }}|SectionsEntries: {{ .SectionsEntries }}|Len: {{ len .SectionsEntries }}| +-- layouts/_default/single.html -- +Kind: {{ .Kind }}|RelPermalink: {{ .RelPermalink }}|SectionsPath: {{ .SectionsPath }}|SectionsEntries: {{ .SectionsEntries }}|Len: {{ len .SectionsEntries }}| +` + + b := Test(t, files) + + b.AssertFileContent("public/myfirstsection/p1/index.html", "RelPermalink: /myfirstsection/p1/|SectionsPath: /myfirstsection|SectionsEntries: [myfirstsection]|Len: 1") + b.AssertFileContent("public/a/b/c/index.html", "RelPermalink: /a/b/c/|SectionsPath: /a/b/c|SectionsEntries: [a b c]|Len: 3") + b.AssertFileContent("public/a/b/c/mybundle/index.html", "Kind: page|RelPermalink: /a/b/c/mybundle/|SectionsPath: /a/b/c|SectionsEntries: [a b c]|Len: 3") + b.AssertFileContent("public/index.html", "Kind: home|RelPermalink: /|SectionsPath: /|SectionsEntries: []|Len: 0") +} diff --git a/hugolib/site_stats_test.go b/hugolib/site_stats_test.go index 4ed6411d2..167194ef5 100644 --- a/hugolib/site_stats_test.go +++ b/hugolib/site_stats_test.go @@ -94,5 +94,38 @@ aliases: [/Ali%d] helpers.ProcessingStatsTable(&buff, stats...) - c.Assert(buff.String(), qt.Contains, "Pages | 19 | 6") + c.Assert(buff.String(), qt.Contains, "Pages | 21 | 7") +} + +func TestSiteLastmod(t *testing.T) { + t.Parallel() + + files := ` +-- hugo.toml -- +baseURL = "https://example.com/" +-- content/_index.md -- +--- +date: 2023-01-01 +--- +-- content/posts/_index.md -- +--- +date: 2023-02-01 +--- +-- content/posts/post-1.md -- +--- +date: 2023-03-01 +--- +-- content/posts/post-2.md -- +--- +date: 2023-04-01 +--- +-- layouts/index.html -- +site.Lastmod: {{ .Site.Lastmod.Format "2006-01-02" }} +site.LastChange: {{ .Site.LastChange.Format "2006-01-02" }} +home.Lastmod: {{ site.Home.Lastmod.Format "2006-01-02" }} + +` + b := Test(t, files) + + b.AssertFileContent("public/index.html", "site.Lastmod: 2023-04-01\nsite.LastChange: 2023-04-01\nhome.Lastmod: 2023-01-01") } diff --git a/hugolib/site_test.go b/hugolib/site_test.go index 57c6bbabe..cf0d4a032 100644 --- a/hugolib/site_test.go +++ b/hugolib/site_test.go @@ -419,7 +419,6 @@ Main section page: {{ .RelPermalink }} } func TestMainSectionsMoveToSite(t *testing.T) { - t.Run("defined in params", func(t *testing.T) { t.Parallel() @@ -510,7 +509,6 @@ MainSections Params: [mysect]| MainSections Site method: [mysect]| `) }) - } // Issue #1176 @@ -718,7 +716,7 @@ func TestOrderedPages(t *testing.T) { s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Configs: configs}, BuildCfg{SkipRender: true}) - if s.getPage(kinds.KindSection, "sect").Pages()[1].Title() != "Three" || s.getPage(kinds.KindSection, "sect").Pages()[2].Title() != "Four" { + if s.getPageOldVersion(kinds.KindSection, "sect").Pages()[1].Title() != "Three" || s.getPageOldVersion(kinds.KindSection, "sect").Pages()[2].Title() != "Four" { t.Error("Pages in unexpected order.") } @@ -1011,7 +1009,7 @@ func TestRefLinking(t *testing.T) { t.Parallel() site := setupLinkingMockSite(t) - currentPage := site.getPage(kinds.KindPage, "level2/level3/start.md") + currentPage := site.getPageOldVersion(kinds.KindPage, "level2/level3/start.md") if currentPage == nil { t.Fatalf("failed to find current page in site") } @@ -1071,7 +1069,7 @@ func TestRefLinking(t *testing.T) { func checkLinkCase(site *Site, link string, currentPage page.Page, relative bool, outputFormat string, expected string, t *testing.T, i int) { t.Helper() if out, err := site.refLink(link, currentPage, relative, outputFormat); err != nil || out != expected { - t.Fatalf("[%d] Expected %q from %q to resolve to %q, got %q - error: %s", i, link, currentPage.Pathc(), expected, out, err) + t.Fatalf("[%d] Expected %q from %q to resolve to %q, got %q - error: %s", i, link, currentPage.Path(), expected, out, err) } } @@ -1199,7 +1197,7 @@ writeStats = true writeStats = false `) - b.AssertDestinationExists("hugo_stats.json", false) + b.AssertFileExists("public/hugo_stats.json", false) b = r(` [build.buildStats] @@ -1245,8 +1243,7 @@ disableclasses = true [build.buildStats] enable = false `) - b.AssertDestinationExists("hugo_stats.json", false) - + b.AssertFileExists("public/hugo_stats.json", false) } func TestClassCollectorStress(t *testing.T) { diff --git a/hugolib/site_url_test.go b/hugolib/site_url_test.go index fd15eb5d3..2cc532854 100644 --- a/hugolib/site_url_test.go +++ b/hugolib/site_url_test.go @@ -23,20 +23,6 @@ import ( "github.com/gohugoio/hugo/resources/kinds" ) -const slugDoc1 = "---\ntitle: slug doc 1\nslug: slug-doc-1\naliases:\n - /sd1/foo/\n - /sd2\n - /sd3/\n - /sd4.html\n---\nslug doc 1 content\n" - -const slugDoc2 = `--- -title: slug doc 2 -slug: slug-doc-2 ---- -slug doc 2 content -` - -var urlFakeSource = [][2]string{ - {filepath.FromSlash("content/blue/doc1.md"), slugDoc1}, - {filepath.FromSlash("content/blue/doc2.md"), slugDoc2}, -} - func TestUglyURLsPerSection(t *testing.T) { t.Parallel() @@ -67,12 +53,12 @@ Do not go gentle into that good night. c.Assert(len(s.RegularPages()), qt.Equals, 2) - notUgly := s.getPage(kinds.KindPage, "sect1/p1.md") + notUgly := s.getPageOldVersion(kinds.KindPage, "sect1/p1.md") c.Assert(notUgly, qt.Not(qt.IsNil)) c.Assert(notUgly.Section(), qt.Equals, "sect1") c.Assert(notUgly.RelPermalink(), qt.Equals, "/sect1/p1/") - ugly := s.getPage(kinds.KindPage, "sect2/p2.md") + ugly := s.getPageOldVersion(kinds.KindPage, "sect2/p2.md") c.Assert(ugly, qt.Not(qt.IsNil)) c.Assert(ugly.Section(), qt.Equals, "sect2") c.Assert(ugly.RelPermalink(), qt.Equals, "/sect2/p2.html") @@ -124,7 +110,7 @@ Do not go gentle into that good night. c.Assert(len(s.RegularPages()), qt.Equals, 10) - sect1 := s.getPage(kinds.KindSection, "sect1") + sect1 := s.getPageOldVersion(kinds.KindSection, "sect1") c.Assert(sect1, qt.Not(qt.IsNil)) c.Assert(sect1.RelPermalink(), qt.Equals, "/ss1/") th.assertFileContent(filepath.Join("public", "ss1", "index.html"), "P1|URL: /ss1/|Next: /ss1/page/2/") diff --git a/hugolib/sitemap_test.go b/hugolib/sitemap_test.go index aae874d50..be13ba1f4 100644 --- a/hugolib/sitemap_test.go +++ b/hugolib/sitemap_test.go @@ -17,67 +17,107 @@ import ( "reflect" "testing" - qt "github.com/frankban/quicktest" "github.com/gohugoio/hugo/config" - "github.com/gohugoio/hugo/deps" ) -const sitemapTemplate = ` - {{ range .Data.Pages }} - {{- if .Permalink -}} - - {{ .Permalink }}{{ if not .Lastmod.IsZero }} - {{ safeHTML ( .Lastmod.Format "2006-01-02T15:04:05-07:00" ) }}{{ end }}{{ with .Sitemap.ChangeFreq }} - {{ . }}{{ end }}{{ if ge .Sitemap.Priority 0.0 }} - {{ .Sitemap.Priority }}{{ end }} - - {{- end -}} - {{ end }} -` - -func TestSitemapOutput(t *testing.T) { +func TestSitemapBasic(t *testing.T) { t.Parallel() - for _, internal := range []bool{false, true} { - doTestSitemapOutput(t, internal) - } + + files := ` +-- hugo.toml -- +baseURL = "https://example.com" +disableKinds = ["term", "taxonomy"] +-- content/sect/doc1.md -- +--- +title: doc1 +--- +Doc1 +-- content/sect/doc2.md -- +--- +title: doc2 +--- +Doc2 +` + + b := NewIntegrationTestBuilder( + IntegrationTestConfig{ + T: t, + TxtarString: files, + }, + ).Build() + + b.AssertFileContent("public/sitemap.xml", " https://example.com/sect/doc1/", "doc2") } -func doTestSitemapOutput(t *testing.T, internal bool) { - c := qt.New(t) - cfg, fs := newTestCfg() - cfg.Set("baseURL", "http://auth/bub/") - cfg.Set("defaultContentLanguageInSubdir", false) - configs, err := loadTestConfigFromProvider(cfg) - c.Assert(err, qt.IsNil) - writeSource(t, fs, "layouts/sitemap.xml", sitemapTemplate) - // We want to check that the 404 page is not included in the sitemap - // output. This template should have no effect either way, but include - // it for the clarity. - writeSource(t, fs, "layouts/404.html", "Not found") +func TestSitemapMultilingual(t *testing.T) { + t.Parallel() - depsCfg := deps.DepsCfg{Fs: fs, Configs: configs} + files := ` +-- hugo.toml -- +baseURL = "https://example.com" +disableKinds = ["term", "taxonomy"] +defaultContentLanguage = "en" +[languages] +[languages.en] +weight = 1 +languageName = "English" +[languages.nn] +weight = 2 +languageName = "Nynorsk" +-- content/sect/doc1.md -- +--- +title: doc1 +--- +Doc1 +-- content/sect/doc2.md -- +--- +title: doc2 +--- +Doc2 +-- content/sect/doc2.nn.md -- +--- +title: doc2 +--- +Doc2 +` - writeSourcesToSource(t, "content", fs, weightedSources...) - s := buildSingleSite(t, depsCfg, BuildCfg{}) - th := newTestHelper(s.conf, s.Fs, t) - outputSitemap := "public/sitemap.xml" + b := NewIntegrationTestBuilder( + IntegrationTestConfig{ + T: t, + TxtarString: files, + }, + ).Build() - th.assertFileContent(outputSitemap, - // Regular page - " http://auth/bub/sect/doc1/", - // Home page - "http://auth/bub/", - // Section - "http://auth/bub/sect/", - // Tax terms - "http://auth/bub/categories/", - // Tax list - "http://auth/bub/categories/hugo/", - ) + b.AssertFileContent("public/sitemap.xml", "https://example.com/en/sitemap.xml", "https://example.com/nn/sitemap.xml") + b.AssertFileContent("public/en/sitemap.xml", " https://example.com/sect/doc1/", "doc2") + b.AssertFileContent("public/nn/sitemap.xml", " https://example.com/nn/sect/doc2/") +} - content := readWorkingDir(th, th.Fs, outputSitemap) - c.Assert(content, qt.Not(qt.Contains), "404") - c.Assert(content, qt.Not(qt.Contains), "") +// https://github.com/gohugoio/hugo/issues/5910 +func TestSitemapOutputFormats(t *testing.T) { + t.Parallel() + + files := ` +-- hugo.toml -- +baseURL = "https://example.com" +disableKinds = ["term", "taxonomy"] +-- content/blog/html-amp.md -- +--- +Title: AMP and HTML +outputs: [ "html", "amp" ] +--- + +` + + b := NewIntegrationTestBuilder( + IntegrationTestConfig{ + T: t, + TxtarString: files, + }, + ).Build() + + // Should link to the HTML version. + b.AssertFileContent("public/sitemap.xml", " https://example.com/blog/html-amp/") } func TestParseSitemap(t *testing.T) { @@ -98,21 +138,3 @@ func TestParseSitemap(t *testing.T) { t.Errorf("Got \n%v expected \n%v", result, expected) } } - -// https://github.com/gohugoio/hugo/issues/5910 -func TestSitemapOutputFormats(t *testing.T) { - b := newTestSitesBuilder(t).WithSimpleConfigFile() - - b.WithContent("blog/html-amp.md", ` ---- -Title: AMP and HTML -outputs: [ "html", "amp" ] ---- - -`) - - b.Build(BuildCfg{}) - - // Should link to the HTML version. - b.AssertFileContent("public/sitemap.xml", " http://example.com/blog/html-amp/") -} diff --git a/hugolib/taxonomy_test.go b/hugolib/taxonomy_test.go index d4ded2058..3132cc485 100644 --- a/hugolib/taxonomy_test.go +++ b/hugolib/taxonomy_test.go @@ -38,6 +38,7 @@ func TestTaxonomiesCountOrder(t *testing.T) { cfg, fs := newTestCfg() + cfg.Set("titleCaseStyle", "none") cfg.Set("taxonomies", taxonomies) configs, err := loadTestConfigFromProvider(cfg) c.Assert(err, qt.IsNil) @@ -77,6 +78,7 @@ func TestTaxonomiesWithAndWithoutContentFile(t *testing.T) { func doTestTaxonomiesWithAndWithoutContentFile(t *testing.T, uglyURLs bool) { siteConfig := ` baseURL = "http://example.com/blog" +titleCaseStyle = "firstupper" uglyURLs = %t paginate = 1 defaultContentLanguage = "en" @@ -135,7 +137,7 @@ permalinkeds: } // 1. - b.AssertFileContent(pathFunc("public/categories/cat1/index.html"), "List", "cAt1") + b.AssertFileContent(pathFunc("public/categories/cat1/index.html"), "List", "CAt1") b.AssertFileContent(pathFunc("public/categories/index.html"), "Taxonomy Term Page", "Category Terms") // 2. @@ -168,7 +170,7 @@ permalinkeds: for taxonomy, count := range taxonomyTermPageCounts { msg := qt.Commentf(taxonomy) - term := s.getPage(kinds.KindTaxonomy, taxonomy) + term := s.getPageOldVersion(kinds.KindTaxonomy, taxonomy) b.Assert(term, qt.Not(qt.IsNil), msg) b.Assert(len(term.Pages()), qt.Equals, count, msg) @@ -177,7 +179,7 @@ permalinkeds: } } - cat1 := s.getPage(kinds.KindTerm, "categories", "cat1") + cat1 := s.getPageOldVersion(kinds.KindTerm, "categories", "cat1") b.Assert(cat1, qt.Not(qt.IsNil)) if uglyURLs { b.Assert(cat1.RelPermalink(), qt.Equals, "/blog/categories/cat1.html") @@ -185,8 +187,8 @@ permalinkeds: b.Assert(cat1.RelPermalink(), qt.Equals, "/blog/categories/cat1/") } - pl1 := s.getPage(kinds.KindTerm, "permalinkeds", "pl1") - permalinkeds := s.getPage(kinds.KindTaxonomy, "permalinkeds") + pl1 := s.getPageOldVersion(kinds.KindTerm, "permalinkeds", "pl1") + permalinkeds := s.getPageOldVersion(kinds.KindTaxonomy, "permalinkeds") b.Assert(pl1, qt.Not(qt.IsNil)) b.Assert(permalinkeds, qt.Not(qt.IsNil)) if uglyURLs { @@ -197,7 +199,7 @@ permalinkeds: b.Assert(permalinkeds.RelPermalink(), qt.Equals, "/blog/permalinkeds/") } - helloWorld := s.getPage(kinds.KindTerm, "others", "hello-hugo-world") + helloWorld := s.getPageOldVersion(kinds.KindTerm, "others", "hello-hugo-world") b.Assert(helloWorld, qt.Not(qt.IsNil)) b.Assert(helloWorld.Title(), qt.Equals, "Hello Hugo world") @@ -212,6 +214,7 @@ func TestTaxonomiesPathSeparation(t *testing.T) { config := ` baseURL = "https://example.com" +titleCaseStyle = "none" [taxonomies] "news/tag" = "news/tags" "news/category" = "news/categories" @@ -282,8 +285,8 @@ title: "This is S3s" b.AssertFileContent("public/t1/t2/t3s/t4/t5/index.html", "Taxonomy List Page 1|This is T5|Hello|https://example.com/t1/t2/t3s/t4/t5/|") b.AssertFileContent("public/t1/t2/t3s/t4/t5/t6/index.html", "Taxonomy List Page 1|t4/t5/t6|Hello|https://example.com/t1/t2/t3s/t4/t5/t6/|") - b.AssertFileContent("public/news/categories/index.html", "Taxonomy Term Page 1|News/Categories|Hello|https://example.com/news/categories/|") - b.AssertFileContent("public/t1/t2/t3s/index.html", "Taxonomy Term Page 1|T1/T2/T3s|Hello|https://example.com/t1/t2/t3s/|") + b.AssertFileContent("public/news/categories/index.html", "Taxonomy Term Page 1|categories|Hello|https://example.com/news/categories/|") + b.AssertFileContent("public/t1/t2/t3s/index.html", "Taxonomy Term Page 1|t3s|Hello|https://example.com/t1/t2/t3s/|") b.AssertFileContent("public/s1/s2/s3s/index.html", "Taxonomy Term Page 1|This is S3s|Hello|https://example.com/s1/s2/s3s/|") } @@ -326,8 +329,8 @@ Content. b.CreateSites().Build(BuildCfg{}) b.AssertFileContent("public/index.html", `
  • Hugo Rocks! 10
  • `) - b.AssertFileContent("public/categories/index.html", `
  • This is Cool 10
  • `) - b.AssertFileContent("public/tags/index.html", `
  • Rocks I say! 10
  • `) + b.AssertFileContent("public/categories/index.html", `
  • This Is Cool 10
  • `) + b.AssertFileContent("public/tags/index.html", `
  • Rocks I Say! 10
  • `) } // Issue 6213 @@ -355,15 +358,14 @@ categories: ["regular"] b.Assert(b.CheckExists("public/categories/regular/index.html"), qt.Equals, true) b.Assert(b.CheckExists("public/categories/drafts/index.html"), qt.Equals, false) - reg, _ := s.getPageNew(nil, "categories/regular") - dra, _ := s.getPageNew(nil, "categories/draft") + reg, _ := s.getPage(nil, "categories/regular") + dra, _ := s.getPage(nil, "categories/draft") b.Assert(reg, qt.Not(qt.IsNil)) b.Assert(dra, qt.IsNil) } func TestTaxonomiesIndexDraft(t *testing.T) { t.Parallel() - b := newTestSitesBuilder(t) b.WithContent( "categories/_index.md", `--- @@ -393,7 +395,7 @@ Content. b.Build(BuildCfg{}) b.AssertFileContentFn("public/index.html", func(s string) bool { - return !strings.Contains(s, "categories") + return !strings.Contains(s, "/categories/|") }) } @@ -468,36 +470,37 @@ categories: ["funny"] } func TestTaxonomiesRemoveOne(t *testing.T) { - b := newTestSitesBuilder(t).Running() - b.WithTemplates("index.html", ` - {{ $cats := .Site.Taxonomies.categories.cats }} - {{ if $cats }} - Len cats: {{ len $cats }} - {{ range $cats }} - Cats:|{{ .Page.RelPermalink }}| - {{ end }} - {{ end }} - {{ $funny := .Site.Taxonomies.categories.funny }} - {{ if $funny }} - Len funny: {{ len $funny }} - {{ range $funny }} - Funny:|{{ .Page.RelPermalink }}| - {{ end }} - {{ end }} - `) - - b.WithContent("p1.md", `--- + files := ` +-- hugo.toml -- +disableLiveReload = true +-- layouts/index.html -- +{{ $cats := .Site.Taxonomies.categories.cats }} +{{ if $cats }} +Len cats: {{ len $cats }} +{{ range $cats }} + Cats:|{{ .Page.RelPermalink }}| +{{ end }} +{{ end }} +{{ $funny := .Site.Taxonomies.categories.funny }} +{{ if $funny }} +Len funny: {{ len $funny }} +{{ range $funny }} + Funny:|{{ .Page.RelPermalink }}| +{{ end }} +{{ end }} +-- content/p1.md -- +--- title: Page categories: ["funny", "cats"] --- - `, "p2.md", `--- +-- content/p2.md -- +--- title: Page2 categories: ["funny", "cats"] --- - `, - ) - b.Build(BuildCfg{}) +` + b := TestRunning(t, files) b.AssertFileContent("public/index.html", ` Len cats: 2 @@ -514,7 +517,7 @@ categories: ["funny"] --- `) - b.Build(BuildCfg{}) + b.Build() b.AssertFileContent("public/index.html", ` Len cats: 1 @@ -660,6 +663,7 @@ func TestTaxonomiesDirectoryOverlaps(t *testing.T) { b.WithConfigFile("toml", ` baseURL = "https://example.org" +titleCaseStyle = "none" [taxonomies] abcdef = "abcdefs" @@ -688,18 +692,17 @@ abcdefgs: {{ template "print-page" $abcdefgs }}|IsAncestor: {{ $abcdefgs.IsAnces Page: /abcdefgh/|abcdefgh|section|Parent: /|CurrentSection: /abcdefgh/| Page: /abcdefgh/p1/|abcdefgh-p|page|Parent: /abcdefgh/|CurrentSection: /abcdefgh/| Page: /abcdefghijk/|abcdefghijk|page|Parent: /|CurrentSection: /| - Page: /abcdefghis/|Abcdefghis|taxonomy|Parent: /|CurrentSection: /| - Page: /abcdefgs/|Abcdefgs|taxonomy|Parent: /|CurrentSection: /| - Page: /abcdefs/|Abcdefs|taxonomy|Parent: /|CurrentSection: /| - abc: /abcdefgs/abc/|abc|term|Parent: /abcdefgs/|CurrentSection: /abcdefgs/| - abcdefgs: /abcdefgs/|Abcdefgs|taxonomy|Parent: /|CurrentSection: /| - abc: /abcdefgs/abc/|abc|term|Parent: /abcdefgs/|CurrentSection: /abcdefgs/|FirstSection: /|IsAncestor: false|IsDescendant: true - abcdefgs: /abcdefgs/|Abcdefgs|taxonomy|Parent: /|CurrentSection: /|FirstSection: /|IsAncestor: true|IsDescendant: false + Page: /abcdefghis/|abcdefghis|taxonomy|Parent: /|CurrentSection: /abcdefghis/| + Page: /abcdefgs/|abcdefgs|taxonomy|Parent: /|CurrentSection: /abcdefgs/| + Page: /abcdefs/|abcdefs|taxonomy|Parent: /|CurrentSection: /abcdefs/| + abc: /abcdefgs/abc/|abc|term|Parent: /abcdefgs/|CurrentSection: /abcdefgs/abc/| + abcdefgs: /abcdefgs/|abcdefgs|taxonomy|Parent: /|CurrentSection: /abcdefgs/| + abc: /abcdefgs/abc/|abc|term|Parent: /abcdefgs/|CurrentSection: /abcdefgs/abc/|FirstSection: /abcdefgs/|IsAncestor: false|IsDescendant: true + abcdefgs: /abcdefgs/|abcdefgs|taxonomy|Parent: /|CurrentSection: /abcdefgs/|FirstSection: /abcdefgs/|IsAncestor: true|IsDescendant: false `) } func TestTaxonomiesWeightSort(t *testing.T) { - files := ` -- layouts/index.html -- {{ $a := site.GetPage "tags/a"}} @@ -736,3 +739,22 @@ tags_weight: 40 b.AssertFileContent("public/index.html", `:/p1/|/p3/|/p2/|:`) } + +func TestTaxonomiesEmptyTagsString(t *testing.T) { + t.Parallel() + + files := ` +-- hugo.toml -- +[taxonomies] +tag = 'tags' +-- content/p1.md -- ++++ +title = "P1" +tags = '' ++++ +-- layouts/_default/single.html -- +Single. + +` + Test(t, files) +} diff --git a/hugolib/template_test.go b/hugolib/template_test.go index d18b4c6f0..1c60a88b3 100644 --- a/hugolib/template_test.go +++ b/hugolib/template_test.go @@ -20,12 +20,10 @@ import ( "github.com/gohugoio/hugo/config" "github.com/gohugoio/hugo/config/allconfig" - "github.com/gohugoio/hugo/identity" qt "github.com/frankban/quicktest" "github.com/gohugoio/hugo/deps" "github.com/gohugoio/hugo/hugofs" - "github.com/gohugoio/hugo/tpl" ) func TestTemplateLookupOrder(t *testing.T) { @@ -536,56 +534,6 @@ with: Zero OK `) } -func TestTemplateDependencies(t *testing.T) { - b := newTestSitesBuilder(t).Running() - - b.WithTemplates("index.html", ` -{{ $p := site.GetPage "p1" }} -{{ partial "p1.html" $p }} -{{ partialCached "p2.html" "foo" }} -{{ partials.Include "p3.html" "data" }} -{{ partials.IncludeCached "p4.html" "foo" }} -{{ $p := partial "p5" }} -{{ partial "sub/p6.html" }} -{{ partial "P7.html" }} -{{ template "_default/foo.html" }} -Partial nested: {{ partial "p10" }} - -`, - "partials/p1.html", `ps: {{ .Render "li" }}`, - "partials/p2.html", `p2`, - "partials/p3.html", `p3`, - "partials/p4.html", `p4`, - "partials/p5.html", `p5`, - "partials/sub/p6.html", `p6`, - "partials/P7.html", `p7`, - "partials/p8.html", `p8 {{ partial "p9.html" }}`, - "partials/p9.html", `p9`, - "partials/p10.html", `p10 {{ partial "p11.html" }}`, - "partials/p11.html", `p11`, - "_default/foo.html", `foo`, - "_default/li.html", `li {{ partial "p8.html" }}`, - ) - - b.WithContent("p1.md", `--- -title: P1 ---- - - -`) - - b.Build(BuildCfg{}) - - s := b.H.Sites[0] - - templ, found := s.lookupTemplate("index.html") - b.Assert(found, qt.Equals, true) - - idset := make(map[identity.Identity]bool) - collectIdentities(idset, templ.(tpl.Info)) - b.Assert(idset, qt.HasLen, 11) -} - func TestTemplateGoIssues(t *testing.T) { b := newTestSitesBuilder(t) @@ -625,16 +573,6 @@ Population in Norway is 5 MILLIONS `) } -func collectIdentities(set map[identity.Identity]bool, provider identity.Provider) { - if ids, ok := provider.(identity.IdentitiesProvider); ok { - for _, id := range ids.GetIdentities() { - collectIdentities(set, id) - } - } else { - set[provider.GetIdentity()] = true - } -} - func TestPartialInline(t *testing.T) { b := newTestSitesBuilder(t) diff --git a/hugolib/testhelpers_test.go b/hugolib/testhelpers_test.go index cf054897d..e2bd57f3c 100644 --- a/hugolib/testhelpers_test.go +++ b/hugolib/testhelpers_test.go @@ -11,14 +11,10 @@ import ( "os" "path/filepath" "regexp" - "runtime" - "sort" - "strconv" "strings" "testing" "text/template" "time" - "unicode/utf8" "github.com/gohugoio/hugo/config/allconfig" "github.com/gohugoio/hugo/config/security" @@ -117,6 +113,7 @@ type filenameContent struct { func newTestSitesBuilder(t testing.TB) *sitesBuilder { v := config.New() v.Set("publishDir", "public") + v.Set("disableLiveReload", true) fs := hugofs.NewFromOld(afero.NewMemMapFs(), v) litterOptions := litter.Options{ @@ -718,6 +715,9 @@ func (s *sitesBuilder) DumpTxtar() string { skipRe := regexp.MustCompile(`^(public|resources|package-lock.json|go.sum)`) afero.Walk(s.Fs.Source, s.workingDir, func(path string, info fs.FileInfo, err error) error { + if err != nil { + return err + } rel := strings.TrimPrefix(path, s.workingDir+"/") if skipRe.MatchString(rel) { if info.IsDir() { @@ -754,7 +754,7 @@ func (s *sitesBuilder) AssertFileContent(filename string, matches ...string) { continue } if !strings.Contains(content, match) { - s.Fatalf("No match for \n%q in content\n%q\nin file %s\n", match, content, filename) + s.Assert(content, qt.Contains, match, qt.Commentf(match+" not in: \n"+content)) } } } @@ -819,13 +819,13 @@ func (s *sitesBuilder) CheckExists(filename string) bool { } func (s *sitesBuilder) GetPage(ref string) page.Page { - p, err := s.H.Sites[0].getPageNew(nil, ref) + p, err := s.H.Sites[0].getPage(nil, ref) s.Assert(err, qt.IsNil) return p } func (s *sitesBuilder) GetPageRel(p page.Page, ref string) page.Page { - p, err := s.H.Sites[0].getPageNew(p, ref) + p, err := s.H.Sites[0].getPage(p, ref) s.Assert(err, qt.IsNil) return p } @@ -902,17 +902,6 @@ func loadTestConfigFromProvider(cfg config.Provider) (*allconfig.Configs, error) return res, err } -func newTestCfgBasic() (config.Provider, *hugofs.Fs) { - mm := afero.NewMemMapFs() - v := config.New() - v.Set("publishDir", "public") - v.Set("defaultContentLanguageInSubdir", true) - - fs := hugofs.NewFromOld(hugofs.NewBaseFileDecorator(mm), v) - - return v, fs -} - func newTestCfg(withConfig ...func(cfg config.Provider) error) (config.Provider, *hugofs.Fs) { mm := afero.NewMemMapFs() cfg := config.New() @@ -1011,106 +1000,3 @@ func content(c resource.ContentProvider) string { } return ccs } - -func pagesToString(pages ...page.Page) string { - var paths []string - for _, p := range pages { - paths = append(paths, p.Pathc()) - } - sort.Strings(paths) - return strings.Join(paths, "|") -} - -func dumpPagesLinks(pages ...page.Page) { - var links []string - for _, p := range pages { - links = append(links, p.RelPermalink()) - } - sort.Strings(links) - - for _, link := range links { - fmt.Println(link) - } -} - -func dumpPages(pages ...page.Page) { - fmt.Println("---------") - for _, p := range pages { - fmt.Printf("Kind: %s Title: %-10s RelPermalink: %-10s Path: %-10s sections: %s Lang: %s\n", - p.Kind(), p.Title(), p.RelPermalink(), p.Pathc(), p.SectionsPath(), p.Lang()) - } -} - -func dumpSPages(pages ...*pageState) { - for i, p := range pages { - fmt.Printf("%d: Kind: %s Title: %-10s RelPermalink: %-10s Path: %-10s sections: %s\n", - i+1, - p.Kind(), p.Title(), p.RelPermalink(), p.Pathc(), p.SectionsPath()) - } -} - -func printStringIndexes(s string) { - lines := strings.Split(s, "\n") - i := 0 - - for _, line := range lines { - - for _, r := range line { - fmt.Printf("%-3s", strconv.Itoa(i)) - i += utf8.RuneLen(r) - } - i++ - fmt.Println() - for _, r := range line { - fmt.Printf("%-3s", string(r)) - } - fmt.Println() - - } -} - -// See https://github.com/golang/go/issues/19280 -// Not in use. -var parallelEnabled = true - -func parallel(t *testing.T) { - if parallelEnabled { - t.Parallel() - } -} - -func skipSymlink(t *testing.T) { - if runtime.GOOS == "windows" && os.Getenv("CI") == "" { - t.Skip("skip symlink test on local Windows (needs admin)") - } -} - -func captureStderr(f func() error) (string, error) { - old := os.Stderr - r, w, _ := os.Pipe() - os.Stderr = w - - err := f() - - w.Close() - os.Stderr = old - - var buf bytes.Buffer - io.Copy(&buf, r) - return buf.String(), err -} - -func captureStdout(f func() error) (string, error) { - old := os.Stdout - r, w, _ := os.Pipe() - os.Stdout = w - - err := f() - - w.Close() - os.Stdout = old - - var buf bytes.Buffer - io.Copy(&buf, r) - return buf.String(), err -} diff --git a/hugolib/translations.go b/hugolib/translations.go deleted file mode 100644 index 5fcbc9218..000000000 --- a/hugolib/translations.go +++ /dev/null @@ -1,57 +0,0 @@ -// Copyright 2019 The Hugo Authors. All rights reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package hugolib - -import ( - "github.com/gohugoio/hugo/resources/page" -) - -func pagesToTranslationsMap(sites []*Site) map[string]page.Pages { - out := make(map[string]page.Pages) - - for _, s := range sites { - s.pageMap.pageTrees.WalkLinkable(func(ss string, n *contentNode) bool { - p := n.p - // TranslationKey is implemented for all page types. - base := p.TranslationKey() - - pageTranslations, found := out[base] - if !found { - pageTranslations = make(page.Pages, 0) - } - - pageTranslations = append(pageTranslations, p) - out[base] = pageTranslations - - return false - }) - } - - return out -} - -func assignTranslationsToPages(allTranslations map[string]page.Pages, sites []*Site) { - for _, s := range sites { - s.pageMap.pageTrees.Walk(func(ss string, n *contentNode) bool { - p := n.p - base := p.TranslationKey() - translations, found := allTranslations[base] - if !found { - return false - } - p.setTranslations(translations) - return false - }) - } -} diff --git a/identity/finder.go b/identity/finder.go new file mode 100644 index 000000000..bd23d698e --- /dev/null +++ b/identity/finder.go @@ -0,0 +1,336 @@ +// Copyright 2024 The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package identity + +import ( + "fmt" + "sync" + + "github.com/gohugoio/hugo/compare" +) + +// NewFinder creates a new Finder. +// This is a thread safe implementation with a cache. +func NewFinder(cfg FinderConfig) *Finder { + return &Finder{cfg: cfg, answers: make(map[ManagerIdentity]FinderResult), seenFindOnce: make(map[Identity]bool)} +} + +var searchIDPool = sync.Pool{ + New: func() interface{} { + return &searchID{seen: make(map[Manager]bool)} + }, +} + +func getSearchID() *searchID { + return searchIDPool.Get().(*searchID) +} + +func putSearchID(sid *searchID) { + sid.id = nil + sid.isDp = false + sid.isPeq = false + sid.hasEqer = false + sid.maxDepth = 0 + sid.dp = nil + sid.peq = nil + sid.eqer = nil + for k := range sid.seen { + delete(sid.seen, k) + } + searchIDPool.Put(sid) +} + +// GetSearchID returns a searchID from the pool. + +// Finder finds identities inside another. +type Finder struct { + cfg FinderConfig + + answers map[ManagerIdentity]FinderResult + muAnswers sync.RWMutex + + seenFindOnce map[Identity]bool + muSeenFindOnce sync.RWMutex +} + +type FinderResult int + +const ( + FinderNotFound FinderResult = iota + FinderFoundOneOfManyRepetition + FinderFoundOneOfMany + FinderFound +) + +// Contains returns whether in contains id. +func (f *Finder) Contains(id, in Identity, maxDepth int) FinderResult { + if id == Anonymous || in == Anonymous { + return FinderNotFound + } + + if id == GenghisKhan && in == GenghisKhan { + return FinderNotFound + } + + if id == GenghisKhan { + return FinderFound + } + + if id == in { + return FinderFound + } + + if id == nil || in == nil { + return FinderNotFound + } + + var ( + isDp bool + isPeq bool + + dp IsProbablyDependentProvider + peq compare.ProbablyEqer + ) + + if !f.cfg.Exact { + dp, isDp = id.(IsProbablyDependentProvider) + peq, isPeq = id.(compare.ProbablyEqer) + } + + eqer, hasEqer := id.(compare.Eqer) + + sid := getSearchID() + sid.id = id + sid.isDp = isDp + sid.isPeq = isPeq + sid.hasEqer = hasEqer + sid.dp = dp + sid.peq = peq + sid.eqer = eqer + sid.maxDepth = maxDepth + + defer putSearchID(sid) + + if r := f.checkOne(sid, in, 0); r > 0 { + return r + } + + m := GetDependencyManager(in) + if m != nil { + if r := f.checkManager(sid, m, 0); r > 0 { + return r + } + } + return FinderNotFound +} + +func (f *Finder) checkMaxDepth(sid *searchID, level int) FinderResult { + if sid.maxDepth >= 0 && level > sid.maxDepth { + return FinderNotFound + } + if level > 100 { + // This should never happen, but some false positives are probably better than a panic. + if !f.cfg.Exact { + return FinderFound + } + panic("too many levels") + } + return -1 +} + +func (f *Finder) checkManager(sid *searchID, m Manager, level int) FinderResult { + if r := f.checkMaxDepth(sid, level); r >= 0 { + return r + } + + if m == nil { + return FinderNotFound + } + if sid.seen[m] { + return FinderNotFound + } + sid.seen[m] = true + + f.muAnswers.RLock() + r, ok := f.answers[ManagerIdentity{Manager: m, Identity: sid.id}] + f.muAnswers.RUnlock() + if ok { + return r + } + + ids := m.getIdentities() + if len(ids) == 0 { + r = FinderNotFound + } else { + r = f.search(sid, ids, level) + } + + if r == FinderFoundOneOfMany { + // Don't cache this one. + return r + } + + f.muAnswers.Lock() + f.answers[ManagerIdentity{Manager: m, Identity: sid.id}] = r + f.muAnswers.Unlock() + + return r +} + +func (f *Finder) checkOne(sid *searchID, v Identity, depth int) (r FinderResult) { + if ff, ok := v.(FindFirstManagerIdentityProvider); ok { + f.muSeenFindOnce.RLock() + mi := ff.FindFirstManagerIdentity() + seen := f.seenFindOnce[mi.Identity] + f.muSeenFindOnce.RUnlock() + if seen { + return FinderFoundOneOfManyRepetition + } + + r = f.doCheckOne(sid, mi.Identity, depth) + if r == 0 { + r = f.checkManager(sid, mi.Manager, depth) + } + + if r > FinderFoundOneOfManyRepetition { + f.muSeenFindOnce.Lock() + // Double check. + if f.seenFindOnce[mi.Identity] { + f.muSeenFindOnce.Unlock() + return FinderFoundOneOfManyRepetition + } + f.seenFindOnce[mi.Identity] = true + f.muSeenFindOnce.Unlock() + r = FinderFoundOneOfMany + } + return r + } else { + return f.doCheckOne(sid, v, depth) + } +} + +func (f *Finder) doCheckOne(sid *searchID, v Identity, depth int) FinderResult { + id2 := Unwrap(v) + if id2 == Anonymous { + return FinderNotFound + } + id := sid.id + if sid.hasEqer { + if sid.eqer.Eq(id2) { + return FinderFound + } + } else if id == id2 { + return FinderFound + } + + if f.cfg.Exact { + return FinderNotFound + } + + if id2 == nil { + return FinderNotFound + } + + if id2 == GenghisKhan { + return FinderFound + } + + if id.IdentifierBase() == id2.IdentifierBase() { + return FinderFound + } + + if sid.isDp && sid.dp.IsProbablyDependent(id2) { + return FinderFound + } + + if sid.isPeq && sid.peq.ProbablyEq(id2) { + return FinderFound + } + + if pdep, ok := id2.(IsProbablyDependencyProvider); ok && pdep.IsProbablyDependency(id) { + return FinderFound + } + + if peq, ok := id2.(compare.ProbablyEqer); ok && peq.ProbablyEq(id) { + return FinderFound + } + + return FinderNotFound +} + +// search searches for id in ids. +func (f *Finder) search(sid *searchID, ids Identities, depth int) FinderResult { + if len(ids) == 0 { + return FinderNotFound + } + + id := sid.id + + if id == Anonymous { + return FinderNotFound + } + + if !f.cfg.Exact && id == GenghisKhan { + return FinderNotFound + } + + for v := range ids { + r := f.checkOne(sid, v, depth) + if r > 0 { + return r + } + + m := GetDependencyManager(v) + if r := f.checkManager(sid, m, depth+1); r > 0 { + return r + } + } + + return FinderNotFound +} + +// FinderConfig provides configuration for the Finder. +// Note that we by default will use a strategy where probable matches are +// good enough. The primary use case for this is to identity the change set +// for a given changed identity (e.g. a template), and we don't want to +// have any false negatives there, but some false positives are OK. Also, speed is important. +type FinderConfig struct { + // Match exact matches only. + Exact bool +} + +// ManagerIdentity wraps a pair of Identity and Manager. +type ManagerIdentity struct { + Identity + Manager +} + +func (p ManagerIdentity) String() string { + return fmt.Sprintf("%s:%s", p.Identity.IdentifierBase(), p.Manager.IdentifierBase()) +} + +type searchID struct { + id Identity + isDp bool + isPeq bool + hasEqer bool + + maxDepth int + + seen map[Manager]bool + + dp IsProbablyDependentProvider + peq compare.ProbablyEqer + eqer compare.Eqer +} diff --git a/identity/finder_test.go b/identity/finder_test.go new file mode 100644 index 000000000..abfab9d75 --- /dev/null +++ b/identity/finder_test.go @@ -0,0 +1,58 @@ +// Copyright 2024 The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package provides ways to identify values in Hugo. Used for dependency tracking etc. +package identity_test + +import ( + "testing" + + "github.com/gohugoio/hugo/identity" +) + +func BenchmarkFinder(b *testing.B) { + m1 := identity.NewManager("") + m2 := identity.NewManager("") + m3 := identity.NewManager("") + m1.AddIdentity( + testIdentity{"base", "id1", "", "pe1"}, + testIdentity{"base2", "id2", "eq1", ""}, + m2, + m3, + ) + + b4 := testIdentity{"base4", "id4", "", ""} + b5 := testIdentity{"base5", "id5", "", ""} + + m2.AddIdentity(b4) + + f := identity.NewFinder(identity.FinderConfig{}) + + b.Run("Find one", func(b *testing.B) { + for i := 0; i < b.N; i++ { + r := f.Contains(b4, m1, -1) + if r == 0 { + b.Fatal("not found") + } + } + }) + + b.Run("Find none", func(b *testing.B) { + for i := 0; i < b.N; i++ { + r := f.Contains(b5, m1, -1) + if r > 0 { + b.Fatal("found") + } + } + }) +} diff --git a/identity/identity.go b/identity/identity.go index e73951caf..ccb2f6e79 100644 --- a/identity/identity.go +++ b/identity/identity.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -11,160 +11,243 @@ // See the License for the specific language governing permissions and // limitations under the License. +// Package provides ways to identify values in Hugo. Used for dependency tracking etc. package identity import ( + "fmt" + "path" "path/filepath" + "sort" "strings" "sync" "sync/atomic" + + "github.com/gohugoio/hugo/common/types" + "github.com/gohugoio/hugo/compare" ) -// NewManager creates a new Manager starting at id. -func NewManager(id Provider) Manager { - return &identityManager{ - Provider: id, - ids: Identities{id.GetIdentity(): id}, +const ( + // Anonymous is an Identity that can be used when identity doesn't matter. + Anonymous = StringIdentity("__anonymous") + + // GenghisKhan is an Identity everyone relates to. + GenghisKhan = StringIdentity("__genghiskhan") +) + +var NopManager = new(nopManager) + +// NewIdentityManager creates a new Manager. +func NewManager(name string, opts ...ManagerOption) Manager { + idm := &identityManager{ + Identity: Anonymous, + name: name, + ids: Identities{}, } + + for _, o := range opts { + o(idm) + } + + return idm } -// NewPathIdentity creates a new Identity with the two identifiers -// type and path. -func NewPathIdentity(typ, pat string) PathIdentity { - pat = strings.ToLower(strings.TrimPrefix(filepath.ToSlash(pat), "/")) - return PathIdentity{Type: typ, Path: pat} +// CleanString cleans s to be suitable as an identifier. +func CleanString(s string) string { + s = strings.ToLower(s) + s = strings.TrimPrefix(filepath.ToSlash(s), "/") + return path.Clean(s) } -// Identities stores identity providers. -type Identities map[Identity]Provider +// CleanStringIdentity cleans s to be suitable as an identifier and wraps it in a StringIdentity. +func CleanStringIdentity(s string) StringIdentity { + return StringIdentity(CleanString(s)) +} -func (ids Identities) search(depth int, id Identity) Provider { - if v, found := ids[id.GetIdentity()]; found { - return v - } - - depth++ - - // There may be infinite recursion in templates. - if depth > 100 { - // Bail out. - return nil - } - - for _, v := range ids { - switch t := v.(type) { - case IdentitiesProvider: - if nested := t.GetIdentities().search(depth, id); nested != nil { - return nested - } - } +// GetDependencyManager returns the DependencyManager from v or nil if none found. +func GetDependencyManager(v any) Manager { + switch vv := v.(type) { + case Manager: + return vv + case types.Unwrapper: + return GetDependencyManager(vv.Unwrapv()) + case DependencyManagerProvider: + return vv.GetDependencyManager() } return nil } -// IdentitiesProvider provides all Identities. -type IdentitiesProvider interface { - GetIdentities() Identities -} +// GetDependencyManagerForScope returns the DependencyManager for the given scope from v or nil if none found. +// Note that it will fall back to an unscoped manager if none found for the given scope. +func GetDependencyManagerForScope(v any, scope int) Manager { + switch vv := v.(type) { + case DependencyManagerScopedProvider: + return vv.GetDependencyManagerForScope(scope) + case types.Unwrapper: + return GetDependencyManagerForScope(vv.Unwrapv(), scope) + case Manager: + return vv + case DependencyManagerProvider: + return vv.GetDependencyManager() -// Identity represents an thing that can provide an identify. This can be -// any Go type, but the Identity returned by GetIdentify must be hashable. -type Identity interface { - Provider - Name() string -} - -// Manager manages identities, and is itself a Provider of Identity. -type Manager interface { - SearchProvider - Add(ids ...Provider) - Reset() -} - -// SearchProvider provides access to the chained set of identities. -type SearchProvider interface { - Provider - IdentitiesProvider - Search(id Identity) Provider -} - -// A PathIdentity is a common identity identified by a type and a path, e.g. "layouts" and "_default/single.html". -type PathIdentity struct { - Type string - Path string -} - -// GetIdentity returns itself. -func (id PathIdentity) GetIdentity() Identity { - return id -} - -// Name returns the Path. -func (id PathIdentity) Name() string { - return id.Path -} - -// A KeyValueIdentity a general purpose identity. -type KeyValueIdentity struct { - Key string - Value string -} - -// GetIdentity returns itself. -func (id KeyValueIdentity) GetIdentity() Identity { - return id -} - -// Name returns the Key. -func (id KeyValueIdentity) Name() string { - return id.Key -} - -// Provider provides the comparable Identity. -type Provider interface { - // GetIdentity is for internal use. - GetIdentity() Identity -} - -type identityManager struct { - sync.Mutex - Provider - ids Identities -} - -func (im *identityManager) Add(ids ...Provider) { - im.Lock() - for _, id := range ids { - im.ids[id.GetIdentity()] = id } - im.Unlock() + return nil } -func (im *identityManager) Reset() { - im.Lock() - id := im.GetIdentity() - im.ids = Identities{id.GetIdentity(): id} - im.Unlock() +// FirstIdentity returns the first Identity in v, Anonymous if none found +func FirstIdentity(v any) Identity { + var result Identity = Anonymous + WalkIdentitiesShallow(v, func(level int, id Identity) bool { + result = id + return true + }) + + return result } -// TODO(bep) these identities are currently only read on server reloads -// so there should be no concurrency issues, but that may change. -func (im *identityManager) GetIdentities() Identities { - im.Lock() - defer im.Unlock() - return im.ids +// PrintIdentityInfo is used for debugging/tests only. +func PrintIdentityInfo(v any) { + WalkIdentitiesDeep(v, func(level int, id Identity) bool { + var s string + if idm, ok := id.(*identityManager); ok { + s = " " + idm.name + } + fmt.Printf("%s%s (%T)%s\n", strings.Repeat(" ", level), id.IdentifierBase(), id, s) + return false + }) } -func (im *identityManager) Search(id Identity) Provider { - im.Lock() - defer im.Unlock() - return im.ids.search(0, id.GetIdentity()) +func Unwrap(id Identity) Identity { + switch t := id.(type) { + case IdentityProvider: + return t.GetIdentity() + default: + return id + } } -// Incrementer increments and returns the value. -// Typically used for IDs. -type Incrementer interface { - Incr() int +// WalkIdentitiesDeep walks identities in v and applies cb to every identity found. +// Return true from cb to terminate. +// If deep is true, it will also walk nested Identities in any Manager found. +func WalkIdentitiesDeep(v any, cb func(level int, id Identity) bool) { + seen := make(map[Identity]bool) + walkIdentities(v, 0, true, seen, cb) +} + +// WalkIdentitiesShallow will not walk into a Manager's Identities. +// See WalkIdentitiesDeep. +// cb is called for every Identity found and returns whether to terminate the walk. +func WalkIdentitiesShallow(v any, cb func(level int, id Identity) bool) { + walkIdentitiesShallow(v, 0, cb) +} + +// WithOnAddIdentity sets a callback that will be invoked when an identity is added to the manager. +func WithOnAddIdentity(f func(id Identity)) ManagerOption { + return func(m *identityManager) { + m.onAddIdentity = f + } +} + +// DependencyManagerProvider provides a manager for dependencies. +type DependencyManagerProvider interface { + GetDependencyManager() Manager +} + +// DependencyManagerProviderFunc is a function that implements the DependencyManagerProvider interface. +type DependencyManagerProviderFunc func() Manager + +func (d DependencyManagerProviderFunc) GetDependencyManager() Manager { + return d() +} + +// DependencyManagerScopedProvider provides a manager for dependencies with a given scope. +type DependencyManagerScopedProvider interface { + GetDependencyManagerForScope(scope int) Manager +} + +// ForEeachIdentityProvider provides a way iterate over identities. +type ForEeachIdentityProvider interface { + // ForEeachIdentityProvider calls cb for each Identity. + // If cb returns true, the iteration is terminated. + ForEeachIdentity(cb func(id Identity) bool) +} + +// ForEeachIdentityByNameProvider provides a way to look up identities by name. +type ForEeachIdentityByNameProvider interface { + // ForEeachIdentityByName calls cb for each Identity that relates to name. + // If cb returns true, the iteration is terminated. + ForEeachIdentityByName(name string, cb func(id Identity) bool) +} + +type FindFirstManagerIdentityProvider interface { + Identity + FindFirstManagerIdentity() ManagerIdentity +} + +func NewFindFirstManagerIdentityProvider(m Manager, id Identity) FindFirstManagerIdentityProvider { + return findFirstManagerIdentity{ + Identity: Anonymous, + ManagerIdentity: ManagerIdentity{ + Manager: m, Identity: id, + }, + } +} + +type findFirstManagerIdentity struct { + Identity + ManagerIdentity +} + +func (f findFirstManagerIdentity) FindFirstManagerIdentity() ManagerIdentity { + return f.ManagerIdentity +} + +// Identities stores identity providers. +type Identities map[Identity]bool + +func (ids Identities) AsSlice() []Identity { + s := make([]Identity, len(ids)) + i := 0 + for v := range ids { + s[i] = v + i++ + } + sort.Slice(s, func(i, j int) bool { + return s[i].IdentifierBase() < s[j].IdentifierBase() + }) + + return s +} + +func (ids Identities) String() string { + var sb strings.Builder + i := 0 + for id := range ids { + sb.WriteString(fmt.Sprintf("[%s]", id.IdentifierBase())) + if i < len(ids)-1 { + sb.WriteString(", ") + } + i++ + } + return sb.String() +} + +// Identity represents a thing in Hugo (a Page, a template etc.) +// Any implementation must be comparable/hashable. +type Identity interface { + IdentifierBase() string +} + +// IdentityGroupProvider can be implemented by tightly connected types. +// Current use case is Resource transformation via Hugo Pipes. +type IdentityGroupProvider interface { + GetIdentityGroup() Identity +} + +// IdentityProvider can be implemented by types that isn't itself and Identity, +// usually because they're not comparable/hashable. +type IdentityProvider interface { + GetIdentity() Identity } // IncrementByOne implements Incrementer adding 1 every time Incr is called. @@ -175,3 +258,234 @@ type IncrementByOne struct { func (c *IncrementByOne) Incr() int { return int(atomic.AddUint64(&c.counter, uint64(1))) } + +// Incrementer increments and returns the value. +// Typically used for IDs. +type Incrementer interface { + Incr() int +} + +// IsProbablyDependentProvider is an optional interface for Identity. +type IsProbablyDependentProvider interface { + IsProbablyDependent(other Identity) bool +} + +// IsProbablyDependencyProvider is an optional interface for Identity. +type IsProbablyDependencyProvider interface { + IsProbablyDependency(other Identity) bool +} + +// Manager is an Identity that also manages identities, typically dependencies. +type Manager interface { + Identity + AddIdentity(ids ...Identity) + GetIdentity() Identity + Reset() + getIdentities() Identities +} + +type ManagerOption func(m *identityManager) + +// StringIdentity is an Identity that wraps a string. +type StringIdentity string + +func (s StringIdentity) IdentifierBase() string { + return string(s) +} + +type identityManager struct { + Identity + + // Only used for debugging. + name string + + // mu protects _changes_ to this manager, + // reads currently assumes no concurrent writes. + mu sync.RWMutex + ids Identities + + // Hooks used in debugging. + onAddIdentity func(id Identity) +} + +func (im *identityManager) AddIdentity(ids ...Identity) { + im.mu.Lock() + + for _, id := range ids { + if id == Anonymous { + continue + } + if _, found := im.ids[id]; !found { + if im.onAddIdentity != nil { + im.onAddIdentity(id) + } + im.ids[id] = true + } + } + im.mu.Unlock() +} + +func (im *identityManager) ContainsIdentity(id Identity) FinderResult { + if im.Identity != Anonymous && id == im.Identity { + return FinderFound + } + + f := NewFinder(FinderConfig{Exact: true}) + r := f.Contains(id, im, -1) + + return r +} + +// Managers are always anonymous. +func (im *identityManager) GetIdentity() Identity { + return im.Identity +} + +func (im *identityManager) Reset() { + im.mu.Lock() + im.ids = Identities{} + im.mu.Unlock() +} + +func (im *identityManager) GetDependencyManagerForScope(int) Manager { + return im +} + +func (im *identityManager) String() string { + return fmt.Sprintf("IdentityManager(%s)", im.name) +} + +// TODO(bep) these identities are currently only read on server reloads +// so there should be no concurrency issues, but that may change. +func (im *identityManager) getIdentities() Identities { + return im.ids +} + +type nopManager int + +func (m *nopManager) AddIdentity(ids ...Identity) { +} + +func (m *nopManager) IdentifierBase() string { + return "" +} + +func (m *nopManager) GetIdentity() Identity { + return Anonymous +} + +func (m *nopManager) Reset() { +} + +func (m *nopManager) getIdentities() Identities { + return nil +} + +// returns whether further walking should be terminated. +func walkIdentities(v any, level int, deep bool, seen map[Identity]bool, cb func(level int, id Identity) bool) { + if level > 20 { + panic("too deep") + } + var cbRecursive func(level int, id Identity) bool + cbRecursive = func(level int, id Identity) bool { + if id == nil { + return false + } + if deep && seen[id] { + return false + } + seen[id] = true + if cb(level, id) { + return true + } + + if deep { + if m := GetDependencyManager(id); m != nil { + for id2 := range m.getIdentities() { + if walkIdentitiesShallow(id2, level+1, cbRecursive) { + return true + } + } + } + } + return false + } + walkIdentitiesShallow(v, level, cbRecursive) +} + +// returns whether further walking should be terminated. +// Anonymous identities are skipped. +func walkIdentitiesShallow(v any, level int, cb func(level int, id Identity) bool) bool { + cb2 := func(level int, id Identity) bool { + if id == Anonymous { + return false + } + return cb(level, id) + } + + if id, ok := v.(Identity); ok { + if cb2(level, id) { + return true + } + } + + if ipd, ok := v.(IdentityProvider); ok { + if cb2(level, ipd.GetIdentity()) { + return true + } + } + + if ipdgp, ok := v.(IdentityGroupProvider); ok { + if cb2(level, ipdgp.GetIdentityGroup()) { + return true + } + } + + return false +} + +var ( + _ Identity = (*orIdentity)(nil) + _ compare.ProbablyEqer = (*orIdentity)(nil) +) + +func Or(a, b Identity) Identity { + return orIdentity{a: a, b: b} +} + +type orIdentity struct { + a, b Identity +} + +func (o orIdentity) IdentifierBase() string { + return o.a.IdentifierBase() +} + +func (o orIdentity) ProbablyEq(other any) bool { + otherID, ok := other.(Identity) + if !ok { + return false + } + + return probablyEq(o.a, otherID) || probablyEq(o.b, otherID) +} + +func probablyEq(a, b Identity) bool { + if a == b { + return true + } + + if a == Anonymous || b == Anonymous { + return false + } + + if a.IdentifierBase() == b.IdentifierBase() { + return true + } + + if a2, ok := a.(IsProbablyDependentProvider); ok { + return a2.IsProbablyDependent(b) + } + + return false +} diff --git a/identity/identity_test.go b/identity/identity_test.go index baf2628bb..d003caaf0 100644 --- a/identity/identity_test.go +++ b/identity/identity_test.go @@ -1,4 +1,4 @@ -// Copyright 2019 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -11,79 +11,201 @@ // See the License for the specific language governing permissions and // limitations under the License. -package identity +package identity_test import ( "fmt" - "math/rand" - "strconv" "testing" qt "github.com/frankban/quicktest" + "github.com/gohugoio/hugo/identity" + "github.com/gohugoio/hugo/identity/identitytesting" ) -func TestIdentityManager(t *testing.T) { - c := qt.New(t) - - id1 := testIdentity{name: "id1"} - im := NewManager(id1) - - c.Assert(im.Search(id1).GetIdentity(), qt.Equals, id1) - c.Assert(im.Search(testIdentity{name: "notfound"}), qt.Equals, nil) -} - func BenchmarkIdentityManager(b *testing.B) { - createIds := func(num int) []Identity { - ids := make([]Identity, num) + createIds := func(num int) []identity.Identity { + ids := make([]identity.Identity, num) for i := 0; i < num; i++ { - ids[i] = testIdentity{name: fmt.Sprintf("id%d", i)} + name := fmt.Sprintf("id%d", i) + ids[i] = &testIdentity{base: name, name: name} } return ids } - b.Run("Add", func(b *testing.B) { - c := qt.New(b) - b.StopTimer() - ids := createIds(b.N) - im := NewManager(testIdentity{"first"}) - b.StartTimer() - + b.Run("identity.NewManager", func(b *testing.B) { for i := 0; i < b.N; i++ { - im.Add(ids[i]) + m := identity.NewManager("") + if m == nil { + b.Fatal("manager is nil") + } } - - b.StopTimer() - c.Assert(im.GetIdentities(), qt.HasLen, b.N+1) }) - b.Run("Search", func(b *testing.B) { - c := qt.New(b) - b.StopTimer() + b.Run("Add unique", func(b *testing.B) { ids := createIds(b.N) - im := NewManager(testIdentity{"first"}) + im := identity.NewManager("") + b.ResetTimer() for i := 0; i < b.N; i++ { - im.Add(ids[i]) + im.AddIdentity(ids[i]) } - b.StartTimer() + b.StopTimer() + }) + b.Run("Add duplicates", func(b *testing.B) { + id := &testIdentity{base: "a", name: "b"} + im := identity.NewManager("") + + b.ResetTimer() for i := 0; i < b.N; i++ { - name := "id" + strconv.Itoa(rand.Intn(b.N)) - id := im.Search(testIdentity{name: name}) - c.Assert(id.GetIdentity().Name(), qt.Equals, name) + im.AddIdentity(id) } + + b.StopTimer() + }) + + b.Run("Nop StringIdentity const", func(b *testing.B) { + const id = identity.StringIdentity("test") + for i := 0; i < b.N; i++ { + identity.NopManager.AddIdentity(id) + } + }) + + b.Run("Nop StringIdentity const other package", func(b *testing.B) { + for i := 0; i < b.N; i++ { + identity.NopManager.AddIdentity(identitytesting.TestIdentity) + } + }) + + b.Run("Nop StringIdentity var", func(b *testing.B) { + id := identity.StringIdentity("test") + for i := 0; i < b.N; i++ { + identity.NopManager.AddIdentity(id) + } + }) + + b.Run("Nop pointer identity", func(b *testing.B) { + id := &testIdentity{base: "a", name: "b"} + for i := 0; i < b.N; i++ { + identity.NopManager.AddIdentity(id) + } + }) + + b.Run("Nop Anonymous", func(b *testing.B) { + for i := 0; i < b.N; i++ { + identity.NopManager.AddIdentity(identity.Anonymous) + } + }) +} + +func BenchmarkIsNotDependent(b *testing.B) { + runBench := func(b *testing.B, id1, id2 identity.Identity) { + for i := 0; i < b.N; i++ { + isNotDependent(id1, id2) + } + } + + newNestedManager := func(depth, count int) identity.Manager { + m1 := identity.NewManager("") + for i := 0; i < depth; i++ { + m2 := identity.NewManager("") + m1.AddIdentity(m2) + for j := 0; j < count; j++ { + id := fmt.Sprintf("id%d", j) + m2.AddIdentity(&testIdentity{id, id, "", ""}) + } + m1 = m2 + } + return m1 + } + + type depthCount struct { + depth int + count int + } + + for _, dc := range []depthCount{{10, 5}} { + b.Run(fmt.Sprintf("Nested not found %d %d", dc.depth, dc.count), func(b *testing.B) { + im := newNestedManager(dc.depth, dc.count) + id1 := identity.StringIdentity("idnotfound") + b.ResetTimer() + runBench(b, im, id1) + }) + } +} + +func TestIdentityManager(t *testing.T) { + c := qt.New(t) + + newNestedManager := func() identity.Manager { + m1 := identity.NewManager("") + m2 := identity.NewManager("") + m3 := identity.NewManager("") + m1.AddIdentity( + testIdentity{"base", "id1", "", "pe1"}, + testIdentity{"base2", "id2", "eq1", ""}, + m2, + m3, + ) + + m2.AddIdentity(testIdentity{"base4", "id4", "", ""}) + + return m1 + } + + c.Run("Anonymous", func(c *qt.C) { + im := newNestedManager() + c.Assert(im.GetIdentity(), qt.Equals, identity.Anonymous) + im.AddIdentity(identity.Anonymous) + c.Assert(isNotDependent(identity.Anonymous, identity.Anonymous), qt.IsTrue) + }) + + c.Run("GenghisKhan", func(c *qt.C) { + c.Assert(isNotDependent(identity.GenghisKhan, identity.GenghisKhan), qt.IsTrue) }) } type testIdentity struct { + base string name string + + idEq string + idProbablyEq string } -func (id testIdentity) GetIdentity() Identity { - return id +func (id testIdentity) Eq(other any) bool { + ot, ok := other.(testIdentity) + if !ok { + return false + } + if ot.idEq == "" || id.idEq == "" { + return false + } + return ot.idEq == id.idEq +} + +func (id testIdentity) IdentifierBase() string { + return id.base } func (id testIdentity) Name() string { return id.name } + +func (id testIdentity) ProbablyEq(other any) bool { + ot, ok := other.(testIdentity) + if !ok { + return false + } + if ot.idProbablyEq == "" || id.idProbablyEq == "" { + return false + } + return ot.idProbablyEq == id.idProbablyEq +} + +func isNotDependent(a, b identity.Identity) bool { + f := identity.NewFinder(identity.FinderConfig{}) + r := f.Contains(b, a, -1) + return r == 0 +} diff --git a/identity/identityhash.go b/identity/identityhash.go index ef7b5afa7..8760ff64d 100644 --- a/identity/identityhash.go +++ b/identity/identityhash.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -59,10 +59,10 @@ type keyer interface { // so rewrite the input slice for known identity types. func toHashable(v any) any { switch t := v.(type) { - case Provider: - return t.GetIdentity() case keyer: return t.Key() + case IdentityProvider: + return t.GetIdentity() default: return v } diff --git a/identity/identityhash_test.go b/identity/identityhash_test.go index 378c0160d..1ecaf7612 100644 --- a/identity/identityhash_test.go +++ b/identity/identityhash_test.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -29,7 +29,6 @@ func TestHashString(t *testing.T) { c.Assert(HashString(vals...), qt.Equals, "12599484872364427450") c.Assert(vals[2], qt.Equals, tstKeyer{"c"}) - } type tstKeyer struct { diff --git a/identity/identitytesting/identitytesting.go b/identity/identitytesting/identitytesting.go new file mode 100644 index 000000000..74f3ec540 --- /dev/null +++ b/identity/identitytesting/identitytesting.go @@ -0,0 +1,5 @@ +package identitytesting + +import "github.com/gohugoio/hugo/identity" + +const TestIdentity = identity.StringIdentity("__testIdentity") diff --git a/identity/predicate_identity.go b/identity/predicate_identity.go new file mode 100644 index 000000000..bad247867 --- /dev/null +++ b/identity/predicate_identity.go @@ -0,0 +1,78 @@ +// Copyright 2024 The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package provides ways to identify values in Hugo. Used for dependency tracking etc. +package identity + +import ( + "fmt" + "sync/atomic" + + hglob "github.com/gohugoio/hugo/hugofs/glob" +) + +// NewGlobIdentity creates a new Identity that +// is probably dependent on any other Identity +// that matches the given pattern. +func NewGlobIdentity(pattern string) Identity { + glob, err := hglob.GetGlob(pattern) + if err != nil { + panic(err) + } + + predicate := func(other Identity) bool { + return glob.Match(other.IdentifierBase()) + } + + return NewPredicateIdentity(predicate, nil) +} + +var predicateIdentityCounter = &atomic.Uint32{} + +type predicateIdentity struct { + id string + probablyDependent func(Identity) bool + probablyDependency func(Identity) bool +} + +var ( + _ IsProbablyDependencyProvider = &predicateIdentity{} + _ IsProbablyDependentProvider = &predicateIdentity{} +) + +// NewPredicateIdentity creates a new Identity that implements both IsProbablyDependencyProvider and IsProbablyDependentProvider +// using the provided functions, both of which are optional. +func NewPredicateIdentity( + probablyDependent func(Identity) bool, + probablyDependency func(Identity) bool, +) *predicateIdentity { + if probablyDependent == nil { + probablyDependent = func(Identity) bool { return false } + } + if probablyDependency == nil { + probablyDependency = func(Identity) bool { return false } + } + return &predicateIdentity{probablyDependent: probablyDependent, probablyDependency: probablyDependency, id: fmt.Sprintf("predicate%d", predicateIdentityCounter.Add(1))} +} + +func (id *predicateIdentity) IdentifierBase() string { + return id.id +} + +func (id *predicateIdentity) IsProbablyDependent(other Identity) bool { + return id.probablyDependent(other) +} + +func (id *predicateIdentity) IsProbablyDependency(other Identity) bool { + return id.probablyDependency(other) +} diff --git a/identity/predicate_identity_test.go b/identity/predicate_identity_test.go new file mode 100644 index 000000000..3a54dee75 --- /dev/null +++ b/identity/predicate_identity_test.go @@ -0,0 +1,58 @@ +// Copyright 2024 The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package provides ways to identify values in Hugo. Used for dependency tracking etc. +package identity + +import ( + "testing" + + qt "github.com/frankban/quicktest" +) + +func TestGlobIdentity(t *testing.T) { + c := qt.New(t) + + gid := NewGlobIdentity("/a/b/*") + + c.Assert(isNotDependent(gid, StringIdentity("/a/b/c")), qt.IsFalse) + c.Assert(isNotDependent(gid, StringIdentity("/a/c/d")), qt.IsTrue) + c.Assert(isNotDependent(StringIdentity("/a/b/c"), gid), qt.IsTrue) + c.Assert(isNotDependent(StringIdentity("/a/c/d"), gid), qt.IsTrue) +} + +func isNotDependent(a, b Identity) bool { + f := NewFinder(FinderConfig{}) + r := f.Contains(a, b, -1) + return r == 0 +} + +func TestPredicateIdentity(t *testing.T) { + c := qt.New(t) + + isDependent := func(id Identity) bool { + return id.IdentifierBase() == "foo" + } + isDependency := func(id Identity) bool { + return id.IdentifierBase() == "baz" + } + + id := NewPredicateIdentity(isDependent, isDependency) + + c.Assert(id.IsProbablyDependent(StringIdentity("foo")), qt.IsTrue) + c.Assert(id.IsProbablyDependent(StringIdentity("bar")), qt.IsFalse) + c.Assert(id.IsProbablyDependent(id), qt.IsFalse) + c.Assert(id.IsProbablyDependent(NewPredicateIdentity(isDependent, nil)), qt.IsFalse) + c.Assert(id.IsProbablyDependency(StringIdentity("baz")), qt.IsTrue) + c.Assert(id.IsProbablyDependency(StringIdentity("foo")), qt.IsFalse) +} diff --git a/identity/question.go b/identity/question.go new file mode 100644 index 000000000..78fcb8234 --- /dev/null +++ b/identity/question.go @@ -0,0 +1,57 @@ +// Copyright 2024 The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package identity + +import "sync" + +// NewQuestion creates a new question with the given identity. +func NewQuestion[T any](id Identity) *Question[T] { + return &Question[T]{ + Identity: id, + } +} + +// Answer takes a func that knows the answer. +// Note that this is a one-time operation, +// fn will not be invoked again it the question is already answered. +// Use Result to check if the question is answered. +func (q *Question[T]) Answer(fn func() T) { + q.mu.Lock() + defer q.mu.Unlock() + + if q.answered { + return + } + + q.fasit = fn() + q.answered = true +} + +// Result returns the fasit of the question (if answered), +// and a bool indicating if the question has been answered. +func (q *Question[T]) Result() (any, bool) { + q.mu.RLock() + defer q.mu.RUnlock() + + return q.fasit, q.answered +} + +// A Question is defined by its Identity and can be answered once. +type Question[T any] struct { + Identity + fasit T + + mu sync.RWMutex + answered bool +} diff --git a/identity/question_test.go b/identity/question_test.go new file mode 100644 index 000000000..bf1e1d06d --- /dev/null +++ b/identity/question_test.go @@ -0,0 +1,38 @@ +// Copyright 2024 The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package identity + +import ( + "testing" + + qt "github.com/frankban/quicktest" +) + +func TestQuestion(t *testing.T) { + c := qt.New(t) + + q := NewQuestion[int](StringIdentity("2+2?")) + + v, ok := q.Result() + c.Assert(ok, qt.Equals, false) + c.Assert(v, qt.Equals, 0) + + q.Answer(func() int { + return 4 + }) + + v, ok = q.Result() + c.Assert(ok, qt.Equals, true) + c.Assert(v, qt.Equals, 4) +} diff --git a/langs/i18n/i18n_test.go b/langs/i18n/i18n_test.go index 8629c35fc..8d34e069d 100644 --- a/langs/i18n/i18n_test.go +++ b/langs/i18n/i18n_test.go @@ -20,7 +20,6 @@ import ( "testing" "github.com/bep/logg" - "github.com/gohugoio/hugo/common/loggers" "github.com/gohugoio/hugo/common/types" "github.com/gohugoio/hugo/config/testconfig" @@ -35,8 +34,6 @@ import ( "github.com/gohugoio/hugo/config" ) -var logger = loggers.NewDefault() - type i18nTest struct { name string data map[string][]byte @@ -390,14 +387,13 @@ other = "{{ . }} miesiąca" }, }, } { - c.Run(test.name, func(c *qt.C) { cfg := config.New() cfg.Set("enableMissingTranslationPlaceholders", true) cfg.Set("publishDir", "public") afs := afero.NewMemMapFs() - err := afero.WriteFile(afs, filepath.Join("i18n", test.lang+".toml"), []byte(test.templ), 0755) + err := afero.WriteFile(afs, filepath.Join("i18n", test.lang+".toml"), []byte(test.templ), 0o755) c.Assert(err, qt.IsNil) d, tp := prepareDeps(afs, cfg) @@ -409,9 +405,7 @@ other = "{{ . }} miesiąca" c.Assert(f(ctx, test.id, variant.Key), qt.Equals, variant.Value, qt.Commentf("input: %v", variant.Key)) c.Assert(d.Log.LoggCount(logg.LevelWarn), qt.Equals, 0) } - }) - } } @@ -429,8 +423,7 @@ type noCountField struct { Counts int } -type countMethod struct { -} +type countMethod struct{} func (c countMethod) Count() any { return 32.5 @@ -468,7 +461,7 @@ func prepareTranslationProvider(t testing.TB, test i18nTest, cfg config.Provider afs := afero.NewMemMapFs() for file, content := range test.data { - err := afero.WriteFile(afs, filepath.Join("i18n", file), []byte(content), 0755) + err := afero.WriteFile(afs, filepath.Join("i18n", file), []byte(content), 0o755) c.Assert(err, qt.IsNil) } diff --git a/langs/i18n/integration_test.go b/langs/i18n/integration_test.go index c010ac111..7deae6451 100644 --- a/langs/i18n/integration_test.go +++ b/langs/i18n/integration_test.go @@ -1,4 +1,4 @@ -// Copyright 2022 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/langs/i18n/translationProvider.go b/langs/i18n/translationProvider.go index 2c3c15710..ab5247413 100644 --- a/langs/i18n/translationProvider.go +++ b/langs/i18n/translationProvider.go @@ -46,9 +46,7 @@ func NewTranslationProvider() *TranslationProvider { // Update updates the i18n func in the provided Deps. func (tp *TranslationProvider) NewResource(dst *deps.Deps) error { - spec := source.NewSourceSpec(dst.PathSpec, nil, nil) - - var defaultLangTag, err = language.Parse(dst.Conf.DefaultContentLanguage()) + defaultLangTag, err := language.Parse(dst.Conf.DefaultContentLanguage()) if err != nil { defaultLangTag = language.English } @@ -59,21 +57,19 @@ func (tp *TranslationProvider) NewResource(dst *deps.Deps) error { bundle.RegisterUnmarshalFunc("yml", yaml.Unmarshal) bundle.RegisterUnmarshalFunc("json", json.Unmarshal) - // The source dirs are ordered so the most important comes first. Since this is a - // last key win situation, we have to reverse the iteration order. - dirs := dst.BaseFs.I18n.Dirs - for i := len(dirs) - 1; i >= 0; i-- { - dir := dirs[i] - src := spec.NewFilesystemFromFileMetaInfo(dir) - files, err := src.Files() - if err != nil { - return err - } - for _, file := range files { - if err := addTranslationFile(bundle, file); err != nil { - return err - } - } + w := hugofs.NewWalkway( + hugofs.WalkwayConfig{ + Fs: dst.BaseFs.I18n.Fs, + WalkFn: func(path string, info hugofs.FileMetaInfo) error { + if info.IsDir() { + return nil + } + return addTranslationFile(bundle, source.NewFileInfo(info)) + }, + }) + + if err := w.Walk(); err != nil { + return err } tp.t = NewTranslator(bundle, dst.Conf, dst.Log) @@ -81,12 +77,11 @@ func (tp *TranslationProvider) NewResource(dst *deps.Deps) error { dst.Translate = tp.t.Func(dst.Conf.Language().Lang) return nil - } const artificialLangTagPrefix = "art-x-" -func addTranslationFile(bundle *i18n.Bundle, r source.File) error { +func addTranslationFile(bundle *i18n.Bundle, r *source.File) error { f, err := r.FileInfo().Meta().Open() if err != nil { return fmt.Errorf("failed to open translations file %q:: %w", r.LogicalName(), err) @@ -129,13 +124,8 @@ func (tp *TranslationProvider) CloneResource(dst, src *deps.Deps) error { return nil } -func errWithFileContext(inerr error, r source.File) error { - fim, ok := r.FileInfo().(hugofs.FileMetaInfo) - if !ok { - return inerr - } - - meta := fim.Meta() +func errWithFileContext(inerr error, r *source.File) error { + meta := r.FileInfo().Meta() realFilename := meta.Filename f, err := meta.Open() if err != nil { @@ -144,5 +134,4 @@ func errWithFileContext(inerr error, r source.File) error { defer f.Close() return herrors.NewFileErrorFromName(inerr, realFilename).UpdateContent(f, nil) - } diff --git a/langs/language.go b/langs/language.go index 2cd608675..d34ea1cc7 100644 --- a/langs/language.go +++ b/langs/language.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -95,22 +95,13 @@ func NewLanguage(lang, defaultContentLanguage, timeZone string, languageConfig L // This is injected from hugolib to avoid circular dependencies. var DeprecationFunc = func(item, alternative string, err bool) {} -const paramsDeprecationWarning = `.Language.Params is deprecated and will be removed in a future release. Use site.Params instead. - -- For all but custom parameters, you need to use the built in Hugo variables, e.g. site.Title, site.LanguageCode; site.Language.Params.Title will not work. -- All custom parameters needs to be placed below params, e.g. [languages.en.params] in TOML. - -See https://gohugo.io/content-management/multilingual/#changes-in-hugo-01120 - -` - // Params returns the language params. // Note that this is the same as the Site.Params, but we keep it here for legacy reasons. // Deprecated: Use the site.Params instead. func (l *Language) Params() maps.Params { // TODO(bep) Remove this for now as it created a little too much noise. Need to think about this. // See https://github.com/gohugoio/hugo/issues/11025 - //DeprecationFunc(".Language.Params", paramsDeprecationWarning, false) + // DeprecationFunc(".Language.Params", paramsDeprecationWarning, false) return l.params } @@ -147,7 +138,8 @@ func (l Languages) AsSet() map[string]bool { return m } -func (l Languages) AsOrdinalSet() map[string]int { +// AsIndexSet returns a map with the language code as key and index in l as value. +func (l Languages) AsIndexSet() map[string]int { m := make(map[string]int) for i, lang := range l { m[lang.Lang] = i diff --git a/lazy/init.go b/lazy/init.go index 9a25e1e05..7b88a5351 100644 --- a/lazy/init.go +++ b/lazy/init.go @@ -15,11 +15,10 @@ package lazy import ( "context" + "errors" "sync" "sync/atomic" "time" - - "errors" ) // New creates a new empty Init. @@ -197,6 +196,7 @@ func (ini *Init) withTimeout(ctx context.Context, timeout time.Duration, f func( select { case <-waitCtx.Done(): + //lint:ignore ST1005 end user message. return nil, errors.New("timed out initializing value. You may have a circular loop in a shortcode, or your site may have resources that take longer to build than the `timeout` limit in your Hugo config file.") case ve := <-c: return ve.v, ve.err diff --git a/main.go b/main.go index 311cb3645..eb1fc0bea 100644 --- a/main.go +++ b/main.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/main_test.go b/main_test.go index 4b6ad4caf..75f5ed949 100644 --- a/main_test.go +++ b/main_test.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -50,15 +50,12 @@ func TestUnfinished(t *testing.T) { p := commonTestScriptsParam p.Dir = "testscripts/unfinished" - //p.UpdateScripts = true + // p.UpdateScripts = true testscript.Run(t, p) } func TestMain(m *testing.M) { - type testInfo struct { - BaseURLs []string - } os.Exit( testscript.RunMain(m, map[string]func() int{ // The main program. @@ -91,7 +88,7 @@ var commonTestScriptsParam = testscript.Params{ ts.Fatalf("%v", err) } b = bytes.Replace(b, []byte("\r\n"), []byte{'\n'}, -1) - if err := os.WriteFile(filename, b, 0666); err != nil { + if err := os.WriteFile(filename, b, 0o666); err != nil { ts.Fatalf("%v", err) } }, @@ -115,15 +112,10 @@ var commonTestScriptsParam = testscript.Params{ } } time.Sleep(time.Duration(i) * time.Second) - }, // ls lists a directory to stdout. "ls": func(ts *testscript.TestScript, neg bool, args []string) { - var dirname string - if len(args) > 0 { - dirname = args[0] - } - dirname = ts.MkAbs(args[0]) + dirname := ts.MkAbs(args[0]) dir, err := os.Open(dirname) if err != nil { @@ -223,7 +215,6 @@ var commonTestScriptsParam = testscript.Params{ } } return nil - } // The timing on server rebuilds can be a little tricky to get right, @@ -350,7 +341,6 @@ var commonTestScriptsParam = testscript.Params{ return } - }, "stopServer": func(ts *testscript.TestScript, neg bool, args []string) { baseURL := ts.Getenv("HUGOTEST_BASEURL_0") @@ -367,7 +357,6 @@ var commonTestScriptsParam = testscript.Params{ resp.Body.Close() // Allow some time for the server to shut down. time.Sleep(2 * time.Second) - }, }, } @@ -384,13 +373,13 @@ func testSetupFunc() func(env *testscript.Env) error { keyVals = append(keyVals, "HOME", home) if runtime.GOOS == "darwin" { - if err := os.MkdirAll(filepath.Join(home, "Library", "Caches"), 0777); err != nil { + if err := os.MkdirAll(filepath.Join(home, "Library", "Caches"), 0o777); err != nil { return err } } if runtime.GOOS == "linux" { - if err := os.MkdirAll(xdghome, 0777); err != nil { + if err := os.MkdirAll(xdghome, 0o777); err != nil { return err } } diff --git a/markup/asciidocext/convert_test.go b/markup/asciidocext/convert_test.go index 459686139..9ccc807f1 100644 --- a/markup/asciidocext/convert_test.go +++ b/markup/asciidocext/convert_test.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/markup/blackfriday/anchors.go b/markup/blackfriday/anchors.go index 90f65a64c..987f46fc6 100644 --- a/markup/blackfriday/anchors.go +++ b/markup/blackfriday/anchors.go @@ -1,4 +1,4 @@ -// Copyright 2022 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/markup/converter/converter.go b/markup/converter/converter.go index 7c4898592..b66cb8730 100644 --- a/markup/converter/converter.go +++ b/markup/converter/converter.go @@ -89,7 +89,6 @@ func (nopConverter) Supports(feature identity.Identity) bool { // another format, e.g. Markdown to HTML. type Converter interface { Convert(ctx RenderContext) (ResultRender, error) - Supports(feature identity.Identity) bool } // ParseRenderer is an optional interface. @@ -156,5 +155,3 @@ type RenderContext struct { // GerRenderer provides hook renderers on demand. GetRenderer hooks.GetRendererFunc } - -var FeatureRenderHooks = identity.NewPathIdentity("markup", "renderingHooks") diff --git a/markup/converter/hooks/hooks.go b/markup/converter/hooks/hooks.go index c5be4d1f0..bdc38f119 100644 --- a/markup/converter/hooks/hooks.go +++ b/markup/converter/hooks/hooks.go @@ -20,7 +20,6 @@ import ( "github.com/gohugoio/hugo/common/hugio" "github.com/gohugoio/hugo/common/text" "github.com/gohugoio/hugo/common/types/hstring" - "github.com/gohugoio/hugo/identity" "github.com/gohugoio/hugo/markup/internal/attributes" ) @@ -89,12 +88,10 @@ type AttributesOptionsSliceProvider interface { type LinkRenderer interface { RenderLink(cctx context.Context, w io.Writer, ctx LinkContext) error - identity.Provider } type CodeBlockRenderer interface { RenderCodeblock(cctx context.Context, w hugio.FlexiWriter, ctx CodeblockContext) error - identity.Provider } type IsDefaultCodeBlockRendererProvider interface { @@ -123,7 +120,6 @@ type HeadingContext interface { type HeadingRenderer interface { // RenderHeading writes the rendered content to w using the data in w. RenderHeading(cctx context.Context, w io.Writer, ctx HeadingContext) error - identity.Provider } // ElementPositionResolver provides a way to resolve the start Position diff --git a/markup/goldmark/codeblocks/integration_test.go b/markup/goldmark/codeblocks/integration_test.go index 7f0201878..5597fc507 100644 --- a/markup/goldmark/codeblocks/integration_test.go +++ b/markup/goldmark/codeblocks/integration_test.go @@ -1,4 +1,4 @@ -// Copyright 2022 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -339,7 +339,6 @@ Attributes: {{ .Attributes }}|Options: {{ .Options }}| } func TestPanics(t *testing.T) { - files := ` -- config.toml -- [markup] @@ -384,7 +383,6 @@ Common b.AssertFileContent("public/p1/index.html", "Common") }) } - } // Issue 10835 @@ -421,5 +419,4 @@ Attributes: {{ .Attributes }}|Type: {{ .Type }}| b.Assert(err, qt.Not(qt.IsNil)) b.Assert(err.Error(), qt.Contains, "p1.md:7:9\": failed to parse Markdown attributes; you may need to quote the values") - } diff --git a/markup/goldmark/codeblocks/render.go b/markup/goldmark/codeblocks/render.go index 5f053d278..5f479bf23 100644 --- a/markup/goldmark/codeblocks/render.go +++ b/markup/goldmark/codeblocks/render.go @@ -1,4 +1,4 @@ -// Copyright 2022 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -133,8 +133,6 @@ func (r *htmlRenderer) renderCodeBlock(w util.BufWriter, src []byte, node ast.No cbctx, ) - ctx.AddIdentity(cr) - if err != nil { return ast.WalkContinue, herrors.NewFileErrorFromPos(err, cbctx.createPos()) } diff --git a/markup/goldmark/convert.go b/markup/goldmark/convert.go index 56cc56fcd..de06bedff 100644 --- a/markup/goldmark/convert.go +++ b/markup/goldmark/convert.go @@ -17,8 +17,6 @@ package goldmark import ( "bytes" - "github.com/gohugoio/hugo/identity" - "github.com/gohugoio/hugo-goldmark-extensions/passthrough" "github.com/gohugoio/hugo/markup/goldmark/codeblocks" "github.com/gohugoio/hugo/markup/goldmark/goldmark_config" @@ -213,8 +211,6 @@ func newMarkdown(pcfg converter.ProviderConfig) goldmark.Markdown { return md } -var _ identity.IdentitiesProvider = (*converterResult)(nil) - type parserResult struct { doc any toc *tableofcontents.Fragments @@ -230,25 +226,17 @@ func (p parserResult) TableOfContents() *tableofcontents.Fragments { type renderResult struct { converter.ResultRender - ids identity.Identities -} - -func (r renderResult) GetIdentities() identity.Identities { - return r.ids } type converterResult struct { converter.ResultRender tableOfContentsProvider - identity.IdentitiesProvider } type tableOfContentsProvider interface { TableOfContents() *tableofcontents.Fragments } -var converterIdentity = identity.KeyValueIdentity{Key: "goldmark", Value: "converter"} - func (c *goldmarkConverter) Parse(ctx converter.RenderContext) (converter.ResultParse, error) { pctx := c.newParserContext(ctx) reader := text.NewReader(ctx.Src) @@ -262,8 +250,8 @@ func (c *goldmarkConverter) Parse(ctx converter.RenderContext) (converter.Result doc: doc, toc: pctx.TableOfContents(), }, nil - } + func (c *goldmarkConverter) Render(ctx converter.RenderContext, doc any) (converter.ResultRender, error) { n := doc.(ast.Node) buf := &render.BufWriter{Buffer: &bytes.Buffer{}} @@ -271,7 +259,6 @@ func (c *goldmarkConverter) Render(ctx converter.RenderContext, doc any) (conver rcx := &render.RenderContextDataHolder{ Rctx: ctx, Dctx: c.ctx, - IDs: identity.NewManager(converterIdentity), } w := &render.Context{ @@ -285,9 +272,7 @@ func (c *goldmarkConverter) Render(ctx converter.RenderContext, doc any) (conver return renderResult{ ResultRender: buf, - ids: rcx.IDs.GetIdentities(), }, nil - } func (c *goldmarkConverter) Convert(ctx converter.RenderContext) (converter.ResultRender, error) { @@ -302,17 +287,7 @@ func (c *goldmarkConverter) Convert(ctx converter.RenderContext) (converter.Resu return converterResult{ ResultRender: renderResult, tableOfContentsProvider: parseResult, - IdentitiesProvider: renderResult.(identity.IdentitiesProvider), }, nil - -} - -var featureSet = map[identity.Identity]bool{ - converter.FeatureRenderHooks: true, -} - -func (c *goldmarkConverter) Supports(feature identity.Identity) bool { - return featureSet[feature.GetIdentity()] } func (c *goldmarkConverter) newParserContext(rctx converter.RenderContext) *parserContext { @@ -349,5 +324,4 @@ func toTypographicPunctuationMap(t goldmark_config.Typographer) map[extension.Ty extension.RightAngleQuote: []byte(t.RightAngleQuote), extension.Apostrophe: []byte(t.Apostrophe), } - } diff --git a/markup/goldmark/convert_test.go b/markup/goldmark/convert_test.go index c97156f7a..266f0f9ab 100644 --- a/markup/goldmark/convert_test.go +++ b/markup/goldmark/convert_test.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -483,7 +483,6 @@ noclasses=false }) c.Run("Highlight lines, default config", func(c *qt.C) { - result := convertForConfig(c, cfgStrHighlichgtNoClasses, lines, `bash {linenos=table,hl_lines=[2 "4-5"],linenostart=3}`) c.Assert(result, qt.Contains, "
    \n
    \n
    4")
    @@ -614,7 +613,6 @@ func unsafeConf() config.AllProvider {
     unsafe = true
     `)
     	return testconfig.GetTestConfig(nil, cfg)
    -
     }
     
     func safeConf() config.AllProvider {
    @@ -624,7 +622,6 @@ func safeConf() config.AllProvider {
     unsafe = false
     `)
     	return testconfig.GetTestConfig(nil, cfg)
    -
     }
     
     func TestConvertCJK(t *testing.T) {
    diff --git a/markup/goldmark/goldmark_config/config.go b/markup/goldmark/goldmark_config/config.go
    index ba1874a18..1c393e3f4 100644
    --- a/markup/goldmark/goldmark_config/config.go
    +++ b/markup/goldmark/goldmark_config/config.go
    @@ -73,9 +73,10 @@ var Default = Config{
     
     // Config configures Goldmark.
     type Config struct {
    -	Renderer   Renderer
    -	Parser     Parser
    -	Extensions Extensions
    +	DuplicateResourceFiles bool
    +	Renderer               Renderer
    +	Parser                 Parser
    +	Extensions             Extensions
     }
     
     type Extensions struct {
    diff --git a/markup/goldmark/images/integration_test.go b/markup/goldmark/images/integration_test.go
    index e8d1b880e..8b0ba99c1 100644
    --- a/markup/goldmark/images/integration_test.go
    +++ b/markup/goldmark/images/integration_test.go
    @@ -39,10 +39,10 @@ This is an inline image: ![Inline Image](/inline.jpg). Some more text.
     		files = files + `-- layouts/_default/_markup/render-image.html --
     {{ if .IsBlock }}
     
    - {{ .Text }} + {{ .Text }}|{{ .Ordinal }}
    {{ else }} - {{ .Text }} + {{ .Text }}|{{ .Ordinal }} {{ end }} ` b := hugolib.NewIntegrationTestBuilder( @@ -54,8 +54,8 @@ This is an inline image: ![Inline Image](/inline.jpg). Some more text. ).Build() b.AssertFileContent("public/p1/index.html", - "This is an inline image: \n\t\"Inline\n. Some more text.

    ", - "
    \n\t\"Block", + "This is an inline image: \n\t\"Inline\n. Some more text.

    ", + "
    \n\t\"Block", ) }) @@ -109,5 +109,4 @@ This is an inline image: ![Inline Image](/inline.jpg). Some more text. b.AssertFileContent("public/p1/index.html", "

    \"Block

    ") }) - } diff --git a/markup/goldmark/internal/render/context.go b/markup/goldmark/internal/render/context.go index b18983ef3..578714339 100644 --- a/markup/goldmark/internal/render/context.go +++ b/markup/goldmark/internal/render/context.go @@ -1,4 +1,4 @@ -// Copyright 2022 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -17,7 +17,6 @@ import ( "bytes" "math/bits" - "github.com/gohugoio/hugo/identity" "github.com/gohugoio/hugo/markup/converter" ) @@ -59,13 +58,11 @@ func (ctx *Context) PopPos() int { type ContextData interface { RenderContext() converter.RenderContext DocumentContext() converter.DocumentContext - AddIdentity(id identity.Provider) } type RenderContextDataHolder struct { Rctx converter.RenderContext Dctx converter.DocumentContext - IDs identity.Manager } func (ctx *RenderContextDataHolder) RenderContext() converter.RenderContext { @@ -75,7 +72,3 @@ func (ctx *RenderContextDataHolder) RenderContext() converter.RenderContext { func (ctx *RenderContextDataHolder) DocumentContext() converter.DocumentContext { return ctx.Dctx } - -func (ctx *RenderContextDataHolder) AddIdentity(id identity.Provider) { - ctx.IDs.Add(id) -} diff --git a/markup/goldmark/links/integration_test.go b/markup/goldmark/links/integration_test.go deleted file mode 100644 index 20d4d74b1..000000000 --- a/markup/goldmark/links/integration_test.go +++ /dev/null @@ -1,113 +0,0 @@ -package images_test - -import ( - "strings" - "testing" - - "github.com/gohugoio/hugo/hugolib" -) - -func TestDisableWrapStandAloneImageWithinParagraph(t *testing.T) { - t.Parallel() - - filesTemplate := ` --- config.toml -- -[markup.goldmark.renderer] - unsafe = false -[markup.goldmark.parser] -wrapStandAloneImageWithinParagraph = CONFIG_VALUE -[markup.goldmark.parser.attribute] - block = true - title = true --- content/p1.md -- ---- -title: "p1" ---- - -This is an inline image: ![Inline Image](/inline.jpg). Some more text. - -![Block Image](/block.jpg) -{.b} - - --- layouts/_default/single.html -- -{{ .Content }} -` - - t.Run("With Hook, no wrap", func(t *testing.T) { - files := strings.ReplaceAll(filesTemplate, "CONFIG_VALUE", "false") - files = files + `-- layouts/_default/_markup/render-image.html -- -{{ if .IsBlock }} -
    - {{ .Text }}|{{ .Ordinal }} -
    -{{ else }} - {{ .Text }}|{{ .Ordinal }} -{{ end }} -` - b := hugolib.NewIntegrationTestBuilder( - hugolib.IntegrationTestConfig{ - T: t, - TxtarString: files, - NeedsOsFS: false, - }, - ).Build() - - b.AssertFileContent("public/p1/index.html", - "This is an inline image: \n\t\"Inline\n. Some more text.

    ", - "
    \n\t\"Block", - ) - }) - - t.Run("With Hook, wrap", func(t *testing.T) { - files := strings.ReplaceAll(filesTemplate, "CONFIG_VALUE", "true") - files = files + `-- layouts/_default/_markup/render-image.html -- -{{ if .IsBlock }} -
    - {{ .Text }} -
    -{{ else }} - {{ .Text }} -{{ end }} -` - b := hugolib.NewIntegrationTestBuilder( - hugolib.IntegrationTestConfig{ - T: t, - TxtarString: files, - NeedsOsFS: false, - }, - ).Build() - - b.AssertFileContent("public/p1/index.html", - "This is an inline image: \n\t\"Inline\n. Some more text.

    ", - "

    \n\t\"Block\n

    ", - ) - }) - - t.Run("No Hook, no wrap", func(t *testing.T) { - files := strings.ReplaceAll(filesTemplate, "CONFIG_VALUE", "false") - b := hugolib.NewIntegrationTestBuilder( - hugolib.IntegrationTestConfig{ - T: t, - TxtarString: files, - NeedsOsFS: false, - }, - ).Build() - - b.AssertFileContent("public/p1/index.html", "

    This is an inline image: \"Inline. Some more text.

    \n\"Block") - }) - - t.Run("No Hook, wrap", func(t *testing.T) { - files := strings.ReplaceAll(filesTemplate, "CONFIG_VALUE", "true") - b := hugolib.NewIntegrationTestBuilder( - hugolib.IntegrationTestConfig{ - T: t, - TxtarString: files, - NeedsOsFS: false, - }, - ).Build() - - b.AssertFileContent("public/p1/index.html", "

    \"Block

    ") - }) - -} diff --git a/markup/goldmark/links/transform.go b/markup/goldmark/links/transform.go deleted file mode 100644 index 2a7815b70..000000000 --- a/markup/goldmark/links/transform.go +++ /dev/null @@ -1,57 +0,0 @@ -package images - -import ( - "github.com/yuin/goldmark/ast" - "github.com/yuin/goldmark/parser" - "github.com/yuin/goldmark/text" -) - -type ( - linksExtension struct { - wrapStandAloneImageWithinParagraph bool - } -) - -const ( - // Used to signal to the rendering step that an image is used in a block context. - // Dont's change this; the prefix must match the internalAttrPrefix in the root goldmark package. - AttrIsBlock = "_h__isBlock" -) - -type Transformer struct { - wrapStandAloneImageWithinParagraph bool -} - -// Transform transforms the provided Markdown AST. -func (t *Transformer) Transform(doc *ast.Document, reader text.Reader, pctx parser.Context) { - ast.Walk(doc, func(node ast.Node, enter bool) (ast.WalkStatus, error) { - if !enter { - return ast.WalkContinue, nil - } - - if n, ok := node.(*ast.Image); ok { - parent := n.Parent() - - if !t.wrapStandAloneImageWithinParagraph { - isBlock := parent.ChildCount() == 1 - if isBlock { - n.SetAttributeString(AttrIsBlock, true) - } - - if isBlock && parent.Kind() == ast.KindParagraph { - for _, attr := range parent.Attributes() { - // Transfer any attribute set down to the image. - // Image elements does not support attributes on its own, - // so it's safe to just set without checking first. - n.SetAttribute(attr.Name, attr.Value) - } - grandParent := parent.Parent() - grandParent.ReplaceChild(grandParent, parent, n) - } - } - - } - - return ast.WalkContinue, nil - }) -} diff --git a/markup/goldmark/render_hooks.go b/markup/goldmark/render_hooks.go index ecdd7f91e..8dcdc39c3 100644 --- a/markup/goldmark/render_hooks.go +++ b/markup/goldmark/render_hooks.go @@ -197,8 +197,6 @@ func (r *hookedRenderer) renderImage(w util.BufWriter, source []byte, node ast.N }, ) - ctx.AddIdentity(lr) - return ast.WalkContinue, err } @@ -284,11 +282,6 @@ func (r *hookedRenderer) renderLink(w util.BufWriter, source []byte, node ast.No }, ) - // TODO(bep) I have a working branch that fixes these rather confusing identity types, - // but for now it's important that it's not .GetIdentity() that's added here, - // to make sure we search the entire chain on changes. - ctx.AddIdentity(lr) - return ast.WalkContinue, err } @@ -353,11 +346,6 @@ func (r *hookedRenderer) renderAutoLink(w util.BufWriter, source []byte, node as }, ) - // TODO(bep) I have a working branch that fixes these rather confusing identity types, - // but for now it's important that it's not .GetIdentity() that's added here, - // to make sure we search the entire chain on changes. - ctx.AddIdentity(lr) - return ast.WalkContinue, err } @@ -443,8 +431,6 @@ func (r *hookedRenderer) renderHeading(w util.BufWriter, source []byte, node ast }, ) - ctx.AddIdentity(hr) - return ast.WalkContinue, err } diff --git a/markup/goldmark/toc_test.go b/markup/goldmark/toc_test.go index 1b846877b..96983dfa6 100644 --- a/markup/goldmark/toc_test.go +++ b/markup/goldmark/toc_test.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/markup/highlight/chromalexers/chromalexers.go b/markup/highlight/chromalexers/chromalexers.go index 41fd76261..6ab4a7bbe 100644 --- a/markup/highlight/chromalexers/chromalexers.go +++ b/markup/highlight/chromalexers/chromalexers.go @@ -1,4 +1,4 @@ -// Copyright 2022 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/markup/highlight/highlight.go b/markup/highlight/highlight.go index 85ea74124..a284b5981 100644 --- a/markup/highlight/highlight.go +++ b/markup/highlight/highlight.go @@ -27,7 +27,6 @@ import ( "github.com/alecthomas/chroma/v2/styles" "github.com/gohugoio/hugo/common/hugio" "github.com/gohugoio/hugo/common/text" - "github.com/gohugoio/hugo/identity" "github.com/gohugoio/hugo/markup/converter/hooks" "github.com/gohugoio/hugo/markup/highlight/chromalexers" "github.com/gohugoio/hugo/markup/internal/attributes" @@ -146,13 +145,6 @@ func (h chromaHighlighter) IsDefaultCodeBlockRenderer() bool { return true } -var id = identity.NewPathIdentity("chroma", "highlight") - -// GetIdentity is for internal use. -func (h chromaHighlighter) GetIdentity() identity.Identity { - return id -} - // HighlightResult holds the result of an highlighting operation. type HighlightResult struct { innerLow int @@ -188,7 +180,7 @@ func highlight(fw hugio.FlexiWriter, code, lang string, attributes []attributes. if lexer == nil { if cfg.Hl_inline { - fmt.Fprint(w, fmt.Sprintf("%s
    ", inlineCodeAttrs(lang), gohtml.EscapeString(code))) + fmt.Fprintf(w, "%s", inlineCodeAttrs(lang), gohtml.EscapeString(code)) } else { preWrapper := getPreWrapper(lang, w) fmt.Fprint(w, preWrapper.Start(true, "")) @@ -278,8 +270,6 @@ func (p *preWrapper) Start(code bool, styleAttr string) string { } func inlineCodeAttrs(lang string) string { - if lang == "" { - } return fmt.Sprintf(` class="code-inline language-%s"`, lang) } diff --git a/markup/highlight/highlight_test.go b/markup/highlight/highlight_test.go index 662f07c93..732dbfa64 100644 --- a/markup/highlight/highlight_test.go +++ b/markup/highlight/highlight_test.go @@ -87,7 +87,6 @@ User-Agent: foo result, _ := h.Highlight(lines, "bash", "") c.Assert(result, qt.Contains, "2\n") - result, _ = h.Highlight(lines, "bash", "lineanchors=test") result, _ = h.Highlight(lines, "bash", "anchorlinenos=false,hl_lines=2") c.Assert(result, qt.Not(qt.Contains), "id=\"2\"") }) diff --git a/markup/highlight/integration_test.go b/markup/highlight/integration_test.go index ce6705f02..b53b585c0 100644 --- a/markup/highlight/integration_test.go +++ b/markup/highlight/integration_test.go @@ -1,4 +1,4 @@ -// Copyright 2022 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/markup/internal/attributes/attributes.go b/markup/internal/attributes/attributes.go index 91181c78c..4e81afe04 100644 --- a/markup/internal/attributes/attributes.go +++ b/markup/internal/attributes/attributes.go @@ -1,4 +1,4 @@ -// Copyright 2022 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/markup/markup.go b/markup/markup.go index ebd86f38f..835c7bbec 100644 --- a/markup/markup.go +++ b/markup/markup.go @@ -95,6 +95,7 @@ func NewConverterProvider(cfg converter.ProviderConfig) (ConverterProvider, erro type ConverterProvider interface { Get(name string) converter.Provider + IsGoldmark(name string) bool // Default() converter.Provider GetMarkupConfig() markup_config.Config GetHighlighter() highlight.Highlighter @@ -110,6 +111,11 @@ type converterRegistry struct { config converter.ProviderConfig } +func (r *converterRegistry) IsGoldmark(name string) bool { + cp := r.Get(name) + return cp != nil && cp.Name() == "goldmark" +} + func (r *converterRegistry) Get(name string) converter.Provider { return r.converters[strings.ToLower(name)] } diff --git a/markup/markup_test.go b/markup/markup_test.go index 5cf08758d..172099d5c 100644 --- a/markup/markup_test.go +++ b/markup/markup_test.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/markup/org/convert_test.go b/markup/org/convert_test.go index 1422585af..16c4306ff 100644 --- a/markup/org/convert_test.go +++ b/markup/org/convert_test.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/markup/tableofcontents/integration_test.go b/markup/tableofcontents/integration_test.go index a51ad3d45..87a7c0108 100644 --- a/markup/tableofcontents/integration_test.go +++ b/markup/tableofcontents/integration_test.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/media/config.go b/media/config.go index b356132be..cdec2e438 100644 --- a/media/config.go +++ b/media/config.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/media/config_test.go b/media/config_test.go index 75ede75bd..4803eb42a 100644 --- a/media/config_test.go +++ b/media/config_test.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/media/mediaType.go b/media/mediaType.go index 8204fc435..367c8ecc9 100644 --- a/media/mediaType.go +++ b/media/mediaType.go @@ -129,15 +129,6 @@ func FromStringAndExt(t, ext string) (Type, error) { return tp, nil } -// MustFromString is like FromString but panics on error. -func MustFromString(t string) Type { - tp, err := FromString(t) - if err != nil { - panic(err) - } - return tp -} - // FromString creates a new Type given a type string on the form MainType/SubType and // an optional suffix, e.g. "text/html" or "text/html+html". func FromString(t string) (Type, error) { @@ -209,14 +200,6 @@ func (m *Type) init() { } } -// WithDelimiterAndSuffixes is used in tests. -func WithDelimiterAndSuffixes(t Type, delimiter, suffixesCSV string) Type { - t.Delimiter = delimiter - t.SuffixesCSV = suffixesCSV - t.init() - return t -} - func newMediaType(main, sub string, suffixes []string) Type { t := Type{MainType: main, SubType: sub, SuffixesCSV: strings.Join(suffixes, ","), Delimiter: DefaultDelimiter} t.init() @@ -315,7 +298,6 @@ func (t Types) IsTextSuffix(suffix string) bool { } } return false - } func (m Type) hasSuffix(suffix string) bool { diff --git a/minifiers/config_test.go b/minifiers/config_test.go index 9dc20c655..7edd8734e 100644 --- a/minifiers/config_test.go +++ b/minifiers/config_test.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/modules/client.go b/modules/client.go index b9a2a48d4..fae6a4c0a 100644 --- a/modules/client.go +++ b/modules/client.go @@ -42,7 +42,7 @@ import ( "github.com/gohugoio/hugo/config" - "github.com/rogpeppe/go-internal/module" + "golang.org/x/mod/module" "github.com/gohugoio/hugo/common/hugio" diff --git a/modules/client_test.go b/modules/client_test.go index 75e3c2b08..ea910580f 100644 --- a/modules/client_test.go +++ b/modules/client_test.go @@ -49,7 +49,7 @@ github.com/gohugoio/hugoTestModules1_darwin/modh2_2@v1.4.0 github.com/gohugoio/h workingDir, clean, err := htesting.CreateTempDir(hugofs.Os, fmt.Sprintf("%s-%d", modName, clientID)) c.Assert(err, qt.IsNil) themesDir := filepath.Join(workingDir, "themes") - err = os.Mkdir(themesDir, 0777) + err = os.Mkdir(themesDir, 0o777) c.Assert(err, qt.IsNil) ccfg := ClientConfig{ @@ -184,7 +184,7 @@ project github.com/gohugoio/hugoTestModules1_darwin/modh2_2_2@v1.3.0+vendor c.Assert(err, qt.IsNil) c.Assert(dirname, qt.Equals, filepath.Join(client.ccfg.ThemesDir, "../../foo")) - dirname, err = client.createThemeDirname("../../foo", false) + _, err = client.createThemeDirname("../../foo", false) c.Assert(err, qt.Not(qt.IsNil)) absDir := filepath.Join(client.ccfg.WorkingDir, "..", "..") diff --git a/modules/collect.go b/modules/collect.go index 6c47bde5c..a4066a46c 100644 --- a/modules/collect.go +++ b/modules/collect.go @@ -17,6 +17,7 @@ import ( "bufio" "errors" "fmt" + "io/fs" "os" "path/filepath" "regexp" @@ -36,7 +37,7 @@ import ( "github.com/gohugoio/hugo/hugofs/files" - "github.com/rogpeppe/go-internal/module" + "golang.org/x/mod/module" "github.com/gohugoio/hugo/config" "github.com/spf13/afero" @@ -282,6 +283,7 @@ func (c *collector) add(owner *moduleAdapter, moduleImport Import) (*moduleAdapt return nil, nil } if found, _ := afero.Exists(c.fs, moduleDir); !found { + //lint:ignore ST1005 end user message. c.err = c.wrapModuleNotFound(fmt.Errorf(`module %q not found in %q; either add it as a Hugo Module or store it in %q.`, modulePath, moduleDir, c.ccfg.ThemesDir)) return nil, nil } @@ -599,7 +601,12 @@ func (c *collector) mountCommonJSConfig(owner *moduleAdapter, mounts []Mount) ([ } // Mount the common JS config files. - fis, err := afero.ReadDir(c.fs, owner.Dir()) + d, err := c.fs.Open(owner.Dir()) + if err != nil { + return mounts, fmt.Errorf("failed to open dir %q: %q", owner.Dir(), err) + } + defer d.Close() + fis, err := d.(fs.ReadDirFile).ReadDir(-1) if err != nil { return mounts, fmt.Errorf("failed to read dir %q: %q", owner.Dir(), err) } diff --git a/modules/config.go b/modules/config.go index f8faf7969..62671613c 100644 --- a/modules/config.go +++ b/modules/config.go @@ -29,7 +29,6 @@ import ( const WorkspaceDisabled = "off" var DefaultModuleConfig = Config{ - // Default to direct, which means "git clone" and similar. We // will investigate proxy settings in more depth later. // See https://github.com/golang/go/issues/26334 @@ -58,7 +57,6 @@ var DefaultModuleConfig = Config{ // ApplyProjectConfigDefaults applies default/missing module configuration for // the main project. func ApplyProjectConfigDefaults(mod Module, cfgs ...config.AllProvider) error { - moda := mod.(*moduleAdapter) // To bridge between old and new configuration format we need @@ -99,14 +97,19 @@ func ApplyProjectConfigDefaults(mod Module, cfgs ...config.AllProvider) error { dir = dirs.ContentDir dropLang = dir == dirsBase.ContentDir case files.ComponentFolderData: + //lint:ignore SA1019 Keep as adapter for now. dir = dirs.DataDir case files.ComponentFolderLayouts: + //lint:ignore SA1019 Keep as adapter for now. dir = dirs.LayoutDir case files.ComponentFolderI18n: + //lint:ignore SA1019 Keep as adapter for now. dir = dirs.I18nDir case files.ComponentFolderArchetypes: + //lint:ignore SA1019 Keep as adapter for now. dir = dirs.ArcheTypeDir case files.ComponentFolderAssets: + //lint:ignore SA1019 Keep as adapter for now. dir = dirs.AssetDir case files.ComponentFolderStatic: // For static dirs, we only care about the language in multihost setups. @@ -230,6 +233,7 @@ func decodeConfig(cfg config.Provider, pathReplacements map[string]string) (Conf c.Workspace = filepath.Join(workingDir, c.Workspace) } if _, err := os.Stat(c.Workspace); err != nil { + //lint:ignore ST1005 end user message. return c, fmt.Errorf("module workspace %q does not exist. Check your module.workspace setting (or HUGO_MODULE_WORKSPACE env var).", c.Workspace) } } diff --git a/modules/npm/package_builder.go b/modules/npm/package_builder.go index 9bdc7eb78..0deed2f42 100644 --- a/modules/npm/package_builder.go +++ b/modules/npm/package_builder.go @@ -18,6 +18,7 @@ import ( "encoding/json" "fmt" "io" + "io/fs" "strings" "github.com/gohugoio/hugo/common/hugio" @@ -44,17 +45,17 @@ const ( }` ) -func Pack(fs afero.Fs, fis []hugofs.FileMetaInfo) error { +func Pack(sourceFs, assetsWithDuplicatesPreservedFs afero.Fs) error { var b *packageBuilder // Have a package.hugo.json? - fi, err := fs.Stat(files.FilenamePackageHugoJSON) + fi, err := sourceFs.Stat(files.FilenamePackageHugoJSON) if err != nil { // Have a package.json? - fi, err = fs.Stat(packageJSONName) + fi, err = sourceFs.Stat(packageJSONName) if err == nil { // Preserve the original in package.hugo.json. - if err = hugio.CopyFile(fs, packageJSONName, files.FilenamePackageHugoJSON); err != nil { + if err = hugio.CopyFile(sourceFs, packageJSONName, files.FilenamePackageHugoJSON); err != nil { return fmt.Errorf("npm pack: failed to copy package file: %w", err) } } else { @@ -62,15 +63,15 @@ func Pack(fs afero.Fs, fis []hugofs.FileMetaInfo) error { name := "project" // Use the Hugo site's folder name as the default name. // The owner can change it later. - rfi, err := fs.Stat("") + rfi, err := sourceFs.Stat("") if err == nil { name = rfi.Name() } packageJSONContent := fmt.Sprintf(packageJSONTemplate, name, "0.1.0") - if err = afero.WriteFile(fs, files.FilenamePackageHugoJSON, []byte(packageJSONContent), 0666); err != nil { + if err = afero.WriteFile(sourceFs, files.FilenamePackageHugoJSON, []byte(packageJSONContent), 0o666); err != nil { return err } - fi, err = fs.Stat(files.FilenamePackageHugoJSON) + fi, err = sourceFs.Stat(files.FilenamePackageHugoJSON) if err != nil { return err } @@ -86,9 +87,18 @@ func Pack(fs afero.Fs, fis []hugofs.FileMetaInfo) error { b = newPackageBuilder(meta.Module, f) f.Close() + d, err := assetsWithDuplicatesPreservedFs.Open(files.FolderJSConfig) + if err != nil { + return nil + } + + fis, err := d.(fs.ReadDirFile).ReadDir(-1) + if err != nil { + return fmt.Errorf("npm pack: failed to read assets: %w", err) + } + for _, fi := range fis { if fi.IsDir() { - // We only care about the files in the root. continue } @@ -137,7 +147,7 @@ func Pack(fs afero.Fs, fis []hugofs.FileMetaInfo) error { return fmt.Errorf("npm pack: failed to marshal JSON: %w", err) } - if err := afero.WriteFile(fs, packageJSONName, packageJSONData.Bytes(), 0666); err != nil { + if err := afero.WriteFile(sourceFs, packageJSONName, packageJSONData.Bytes(), 0o666); err != nil { return fmt.Errorf("npm pack: failed to write package.json: %w", err) } diff --git a/navigation/menu.go b/navigation/menu.go index 50e51bcbe..3802014b1 100644 --- a/navigation/menu.go +++ b/navigation/menu.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -49,7 +49,6 @@ type MenuEntry struct { } func (m *MenuEntry) URL() string { - // Check page first. // In Hugo 0.86.0 we added `pageRef`, // a way to connect menu items in site config to pages. @@ -88,7 +87,7 @@ type Page interface { Weight() int IsPage() bool IsSection() bool - IsAncestor(other any) (bool, error) + IsAncestor(other any) bool Params() maps.Params } @@ -290,7 +289,6 @@ func DecodeConfig(in any) (*config.ConfigNamespace[map[string]MenuConfig, Menus] if err != nil { return ret, nil, err } else { - for _, entry := range m { var menuConfig MenuConfig if err := mapstructure.WeakDecode(entry, &menuConfig); err != nil { @@ -312,7 +310,6 @@ func DecodeConfig(in any) (*config.ConfigNamespace[map[string]MenuConfig, Menus] } return ret, menus, nil - } return config.DecodeNamespace[map[string]MenuConfig](in, buildConfig) diff --git a/navigation/menu_cache.go b/navigation/menu_cache.go index 4287ed875..b6350cd01 100644 --- a/navigation/menu_cache.go +++ b/navigation/menu_cache.go @@ -39,12 +39,6 @@ func newMenuCache() *menuCache { return &menuCache{m: make(map[string][]menuCacheEntry)} } -func (c *menuCache) clear() { - c.Lock() - defer c.Unlock() - c.m = make(map[string][]menuCacheEntry) -} - type menuCache struct { sync.RWMutex m map[string][]menuCacheEntry diff --git a/navigation/pagemenus.go b/navigation/pagemenus.go index 6321a8a63..ab57231c3 100644 --- a/navigation/pagemenus.go +++ b/navigation/pagemenus.go @@ -125,7 +125,7 @@ type pageMenus struct { func (pm *pageMenus) HasMenuCurrent(menuID string, me *MenuEntry) bool { if !types.IsNil(me.Page) && me.Page.IsSection() { - if ok, _ := me.Page.IsAncestor(pm.p); ok { + if ok := me.Page.IsAncestor(pm.p); ok { return true } } diff --git a/output/config.go b/output/config.go index 86e5bcfaa..a7ebf5107 100644 --- a/output/config.go +++ b/output/config.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -140,5 +140,4 @@ func decode(mediaTypes media.Types, input any, output *Format) error { } return nil - } diff --git a/output/config_test.go b/output/config_test.go index 52381c5d2..c2f0af980 100644 --- a/output/config_test.go +++ b/output/config_test.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/output/layouts/layout.go b/output/layouts/layout.go index 9c5ef17a1..c05841ae3 100644 --- a/output/layouts/layout.go +++ b/output/layouts/layout.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -90,7 +90,7 @@ type layoutBuilder struct { layoutVariations []string typeVariations []string d LayoutDescriptor - //f Format + // f Format } func (l *layoutBuilder) addLayoutVariations(vars ...string) { @@ -184,9 +184,18 @@ func resolvePageTemplate(d LayoutDescriptor) []string { case "404": b.addLayoutVariations("404") b.addTypeVariations("") + case "robotstxt": + b.addLayoutVariations("robots") + b.addTypeVariations("") + case "sitemap": + b.addLayoutVariations("sitemap") + b.addTypeVariations("") + case "sitemapindex": + b.addLayoutVariations("sitemapindex") + b.addTypeVariations("") } - isRSS := strings.EqualFold(d.OutputFormatName, "rss") + isRSS := d.OutputFormatName == "rss" if !d.RenderingHook && !d.Baseof && isRSS { // The historic and common rss.xml case b.addLayoutVariations("") @@ -212,6 +221,15 @@ func resolvePageTemplate(d LayoutDescriptor) []string { layouts = append(layouts, "_internal/_default/rss.xml") } + switch d.Kind { + case "robotsTXT": + layouts = append(layouts, "_internal/_default/robots.txt") + case "sitemap": + layouts = append(layouts, "_internal/_default/sitemap.xml") + case "sitemapindex": + layouts = append(layouts, "_internal/_default/sitemapindex.xml") + } + return layouts } diff --git a/output/outputFormat.go b/output/outputFormat.go index f602c03f3..54e7fe98d 100644 --- a/output/outputFormat.go +++ b/output/outputFormat.go @@ -56,12 +56,19 @@ type Format struct { // Enable to ignore the global uglyURLs setting. NoUgly bool `json:"noUgly"` + // Enable to override the global uglyURLs setting. + Ugly bool `json:"ugly"` + // Enable if it doesn't make sense to include this format in an alternative // format listing, CSS being one good example. // Note that we use the term "alternative" and not "alternate" here, as it // does not necessarily replace the other format, it is an alternative representation. NotAlternative bool `json:"notAlternative"` + // Eneable if this is a resource which path always starts at the root, + // e.g. /robots.txt. + Root bool `json:"root"` + // Setting this will make this output format control the value of // .Permalink and .RelPermalink for a rendered Page. // If not set, these values will point to the main (first) output format @@ -75,7 +82,7 @@ type Format struct { Weight int `json:"weight"` } -// An ordered list of built-in output formats. +// Built-in output formats. var ( AMPFormat = Format{ Name: "amp", @@ -156,6 +163,7 @@ var ( MediaType: media.Builtin.TextType, BaseName: "robots", IsPlainText: true, + Root: true, Rel: "alternate", } @@ -171,9 +179,27 @@ var ( Name: "sitemap", MediaType: media.Builtin.XMLType, BaseName: "sitemap", - NoUgly: true, + Ugly: true, Rel: "sitemap", } + + SitemapIndexFormat = Format{ + Name: "sitemapindex", + MediaType: media.Builtin.XMLType, + BaseName: "sitemap", + Ugly: true, + Root: true, + Rel: "sitemap", + } + + HTTPStatusHTMLFormat = Format{ + Name: "httpstatus", + MediaType: media.Builtin.HTMLType, + NotAlternative: true, + Ugly: true, + IsHTML: true, + Permalinkable: true, + } ) // DefaultFormats contains the default output formats supported by Hugo. @@ -297,6 +323,11 @@ func (f Format) BaseFilename() string { return f.BaseName + f.MediaType.FirstSuffix.FullSuffix } +// IsZero returns true if f represents a zero value. +func (f Format) IsZero() bool { + return f.Name == "" +} + // MarshalJSON returns the JSON encoding of f. // For internal use only. func (f Format) MarshalJSON() ([]byte, error) { diff --git a/parser/lowercase_camel_json.go b/parser/lowercase_camel_json.go index d48aa40c4..3dd4c24b0 100644 --- a/parser/lowercase_camel_json.go +++ b/parser/lowercase_camel_json.go @@ -25,8 +25,7 @@ import ( // Regexp definitions var ( - keyMatchRegex = regexp.MustCompile(`\"(\w+)\":`) - wordBarrierRegex = regexp.MustCompile(`(\w)([A-Z])`) + keyMatchRegex = regexp.MustCompile(`\"(\w+)\":`) ) // Code adapted from https://gist.github.com/piersy/b9934790a8892db1a603820c0c23e4a7 @@ -92,19 +91,17 @@ func (c ReplacingJSONMarshaller) MarshalJSON() ([]byte, error) { if !hreflect.IsTruthful(v) { delete(m, k) } else { - switch v.(type) { + switch vv := v.(type) { case map[string]interface{}: - removeZeroVAlues(v.(map[string]any)) + removeZeroVAlues(vv) case []interface{}: - for _, vv := range v.([]interface{}) { - if m, ok := vv.(map[string]any); ok { + for _, vvv := range vv { + if m, ok := vvv.(map[string]any); ok { removeZeroVAlues(m) } } } - } - } } removeZeroVAlues(m) diff --git a/parser/metadecoders/decoder.go b/parser/metadecoders/decoder.go index 8d93d86a0..5dac23f03 100644 --- a/parser/metadecoders/decoder.go +++ b/parser/metadecoders/decoder.go @@ -174,22 +174,22 @@ func (d Decoder) UnmarshalTo(data []byte, f Format, v any) error { // and change all maps to map[string]interface{} like we would've // gotten from `json`. var ptr any - switch v.(type) { + switch vv := v.(type) { case *map[string]any: - ptr = *v.(*map[string]any) + ptr = *vv case *any: - ptr = *v.(*any) + ptr = *vv default: // Not a map. } if ptr != nil { if mm, changed := stringifyMapKeys(ptr); changed { - switch v.(type) { + switch vv := v.(type) { case *map[string]any: - *v.(*map[string]any) = mm.(map[string]any) + *vv = mm.(map[string]any) case *any: - *v.(*any) = mm + *vv = mm } } } @@ -218,9 +218,9 @@ func (d Decoder) unmarshalCSV(data []byte, v any) error { return err } - switch v.(type) { + switch vv := v.(type) { case *any: - *v.(*any) = records + *vv = records default: return fmt.Errorf("CSV cannot be unmarshaled into %T", v) @@ -257,11 +257,11 @@ func (d Decoder) unmarshalORG(data []byte, v any) error { frontMatter[k] = v } } - switch v.(type) { + switch vv := v.(type) { case *map[string]any: - *v.(*map[string]any) = frontMatter - default: - *v.(*any) = frontMatter + *vv = frontMatter + case *any: + *vv = frontMatter } return nil } diff --git a/parser/pageparser/pagelexer.go b/parser/pageparser/pagelexer.go index 64cd4bfc1..bd903b771 100644 --- a/parser/pageparser/pagelexer.go +++ b/parser/pageparser/pagelexer.go @@ -50,6 +50,9 @@ type pageLexer struct { // items delivered to client items Items + + // error delivered to the client + err error } // Implement the Result interface @@ -164,7 +167,6 @@ func (l *pageLexer) emit(t ItemType) { } l.append(Item{Type: t, low: l.start, high: l.pos}) - } // sends a string item back to the client. @@ -210,7 +212,6 @@ func (l *pageLexer) ignoreEscapesAndEmit(t ItemType, isString bool) { } l.start = l.pos - } // gets the current value (for debugging and error handling) @@ -227,7 +228,14 @@ var lf = []byte("\n") // nil terminates the parser func (l *pageLexer) errorf(format string, args ...any) stateFunc { - l.append(Item{Type: tError, Err: fmt.Errorf(format, args...)}) + l.append(Item{Type: tError, Err: fmt.Errorf(format, args...), low: l.start, high: l.pos}) + return nil +} + +// documentError can be used to signal a fatal error in the lexing process. +// nil terminates the parser +func (l *pageLexer) documentError(err error) stateFunc { + l.err = err return nil } @@ -465,6 +473,7 @@ func lexDone(l *pageLexer) stateFunc { return nil } +//lint:ignore U1000 useful for debugging func (l *pageLexer) printCurrentInput() { fmt.Printf("input[%d:]: %q", l.pos, string(l.input[l.pos:])) } @@ -475,10 +484,6 @@ func (l *pageLexer) index(sep []byte) int { return bytes.Index(l.input[l.pos:], sep) } -func (l *pageLexer) indexByte(sep byte) int { - return bytes.IndexByte(l.input[l.pos:], sep) -} - func (l *pageLexer) hasPrefix(prefix []byte) bool { return bytes.HasPrefix(l.input[l.pos:], prefix) } diff --git a/parser/pageparser/pagelexer_intro.go b/parser/pageparser/pagelexer_intro.go index 6e4617998..25af4170b 100644 --- a/parser/pageparser/pagelexer_intro.go +++ b/parser/pageparser/pagelexer_intro.go @@ -13,6 +13,10 @@ package pageparser +import "errors" + +var ErrPlainHTMLDocumentsNotSupported = errors.New("plain HTML documents not supported") + func lexIntroSection(l *pageLexer) stateFunc { l.summaryDivider = summaryDivider @@ -45,7 +49,7 @@ LOOP: l.emit(TypeIgnore) continue LOOP } else { - return l.errorf("plain HTML documents not supported") + return l.documentError(ErrPlainHTMLDocumentsNotSupported) } } break LOOP diff --git a/parser/pageparser/pageparser.go b/parser/pageparser/pageparser.go index 8d4c757af..9e8b6d803 100644 --- a/parser/pageparser/pageparser.go +++ b/parser/pageparser/pageparser.go @@ -34,9 +34,22 @@ type Result interface { var _ Result = (*pageLexer)(nil) -// Parse parses the page in the given reader according to the given Config. -func Parse(r io.Reader, cfg Config) (Result, error) { - return parseSection(r, cfg, lexIntroSection) +// ParseBytes parses the page in b according to the given Config. +func ParseBytes(b []byte, cfg Config) (Items, error) { + l, err := parseBytes(b, cfg, lexIntroSection) + if err != nil { + return nil, err + } + return l.items, l.err +} + +// ParseBytesMain parses b starting with the main section. +func ParseBytesMain(b []byte, cfg Config) (Items, error) { + l, err := parseBytes(b, cfg, lexMainSection) + if err != nil { + return nil, err + } + return l.items, l.err } type ContentFrontMatter struct { @@ -50,24 +63,29 @@ type ContentFrontMatter struct { func ParseFrontMatterAndContent(r io.Reader) (ContentFrontMatter, error) { var cf ContentFrontMatter - psr, err := Parse(r, Config{}) + input, err := io.ReadAll(r) + if err != nil { + return cf, fmt.Errorf("failed to read page content: %w", err) + } + + psr, err := ParseBytes(input, Config{}) if err != nil { return cf, err } var frontMatterSource []byte - iter := psr.Iterator() + iter := NewIterator(psr) walkFn := func(item Item) bool { if frontMatterSource != nil { // The rest is content. - cf.Content = psr.Input()[item.low:] + cf.Content = input[item.low:] // Done return false } else if item.IsFrontMatter() { cf.FrontMatterFormat = FormatFromFrontMatterType(item.Type) - frontMatterSource = item.Val(psr.Input()) + frontMatterSource = item.Val(input) } return true } @@ -106,7 +124,7 @@ func parseSection(r io.Reader, cfg Config, start stateFunc) (Result, error) { return parseBytes(b, cfg, start) } -func parseBytes(b []byte, cfg Config, start stateFunc) (Result, error) { +func parseBytes(b []byte, cfg Config, start stateFunc) (*pageLexer, error) { lexer := newPageLexer(b, start, cfg) lexer.run() return lexer, nil diff --git a/parser/pageparser/pageparser_intro_test.go b/parser/pageparser/pageparser_intro_test.go index 1b2d59ccc..df2f2579b 100644 --- a/parser/pageparser/pageparser_intro_test.go +++ b/parser/pageparser/pageparser_intro_test.go @@ -25,6 +25,7 @@ type lexerTest struct { name string input string items []typeText + err error } type typeText struct { @@ -58,34 +59,40 @@ var crLfReplacer = strings.NewReplacer("\r", "#", "\n", "$") // TODO(bep) a way to toggle ORG mode vs the rest. var frontMatterTests = []lexerTest{ - {"empty", "", []typeText{tstEOF}}, - {"Byte order mark", "\ufeff\nSome text.\n", []typeText{nti(TypeIgnore, "\ufeff"), tstSomeText, tstEOF}}, - {"HTML Document", ` `, []typeText{nti(tError, "plain HTML documents not supported")}}, - {"HTML Document with shortcode", `{{< sc1 >}}`, []typeText{nti(tError, "plain HTML documents not supported")}}, - {"No front matter", "\nSome text.\n", []typeText{tstSomeText, tstEOF}}, - {"YAML front matter", "---\nfoo: \"bar\"\n---\n\nSome text.\n", []typeText{tstFrontMatterYAML, tstSomeText, tstEOF}}, - {"YAML empty front matter", "---\n---\n\nSome text.\n", []typeText{nti(TypeFrontMatterYAML, ""), tstSomeText, tstEOF}}, - {"YAML commented out front matter", "\nSome text.\n", []typeText{nti(TypeIgnore, ""), tstSomeText, tstEOF}}, - {"YAML commented out front matter, no end", "\nSome text.\n", []typeText{nti(TypeIgnore, ""), tstSomeText, tstEOF}, nil}, + {"YAML commented out front matter, no end", "\nSome text.\n", []typeText{tstFrontMatterTOML, tstSomeText, tstSummaryDivider, nti(tText, "Some text.\n"), tstEOF}}, - {"Summary divider same line", "+++\nfoo = \"bar\"\n+++\n\nSome text.Some text.\n", []typeText{tstFrontMatterTOML, nti(tText, "\nSome text."), nti(TypeLeadSummaryDivider, ""), nti(tText, "Some text.\n"), tstEOF}}, + {"YAML front matter CRLF", "---\r\nfoo: \"bar\"\r\n---\n\nSome text.\n", []typeText{tstFrontMatterYAMLCRLF, tstSomeText, tstEOF}, nil}, + {"TOML front matter", "+++\nfoo = \"bar\"\n+++\n\nSome text.\n", []typeText{tstFrontMatterTOML, tstSomeText, tstEOF}, nil}, + {"JSON front matter", tstJSON + "\r\n\nSome text.\n", []typeText{tstFrontMatterJSON, tstSomeText, tstEOF}, nil}, + {"ORG front matter", tstORG + "\nSome text.\n", []typeText{tstFrontMatterORG, tstSomeText, tstEOF}, nil}, + {"Summary divider ORG", tstORG + "\nSome text.\n# more\nSome text.\n", []typeText{tstFrontMatterORG, tstSomeText, nti(TypeLeadSummaryDivider, "# more\n"), nti(tText, "Some text.\n"), tstEOF}, nil}, + {"Summary divider", "+++\nfoo = \"bar\"\n+++\n\nSome text.\n\nSome text.\n", []typeText{tstFrontMatterTOML, tstSomeText, tstSummaryDivider, nti(tText, "Some text.\n"), tstEOF}, nil}, + {"Summary divider same line", "+++\nfoo = \"bar\"\n+++\n\nSome text.Some text.\n", []typeText{tstFrontMatterTOML, nti(tText, "\nSome text."), nti(TypeLeadSummaryDivider, ""), nti(tText, "Some text.\n"), tstEOF}, nil}, // https://github.com/gohugoio/hugo/issues/5402 - {"Summary and shortcode, no space", "+++\nfoo = \"bar\"\n+++\n\nSome text.\n{{< sc1 >}}\nSome text.\n", []typeText{tstFrontMatterTOML, tstSomeText, nti(TypeLeadSummaryDivider, ""), tstLeftNoMD, tstSC1, tstRightNoMD, tstSomeText, tstEOF}}, + {"Summary and shortcode, no space", "+++\nfoo = \"bar\"\n+++\n\nSome text.\n{{< sc1 >}}\nSome text.\n", []typeText{tstFrontMatterTOML, tstSomeText, nti(TypeLeadSummaryDivider, ""), tstLeftNoMD, tstSC1, tstRightNoMD, tstSomeText, tstEOF}, nil}, // https://github.com/gohugoio/hugo/issues/5464 - {"Summary and shortcode only", "+++\nfoo = \"bar\"\n+++\n{{< sc1 >}}\n\n{{< sc2 >}}", []typeText{tstFrontMatterTOML, tstLeftNoMD, tstSC1, tstRightNoMD, tstNewline, tstSummaryDivider, tstLeftNoMD, tstSC2, tstRightNoMD, tstEOF}}, + {"Summary and shortcode only", "+++\nfoo = \"bar\"\n+++\n{{< sc1 >}}\n\n{{< sc2 >}}", []typeText{tstFrontMatterTOML, tstLeftNoMD, tstSC1, tstRightNoMD, tstNewline, tstSummaryDivider, tstLeftNoMD, tstSC2, tstRightNoMD, tstEOF}, nil}, } func TestFrontMatter(t *testing.T) { t.Parallel() c := qt.New(t) for i, test := range frontMatterTests { - items := collect([]byte(test.input), false, lexIntroSection) + items, err := collect([]byte(test.input), false, lexIntroSection) + if err != nil { + c.Assert(err, qt.Equals, test.err) + continue + } else { + c.Assert(test.err, qt.IsNil) + } if !equal(test.input, items, test.items) { got := itemsToString(items, []byte(test.input)) expected := testItemsToString(test.items) @@ -124,12 +131,15 @@ func testItemsToString(items []typeText) string { return crLfReplacer.Replace(sb.String()) } -func collectWithConfig(input []byte, skipFrontMatter bool, stateStart stateFunc, cfg Config) (items []Item) { +func collectWithConfig(input []byte, skipFrontMatter bool, stateStart stateFunc, cfg Config) (items []Item, err error) { l := newPageLexer(input, stateStart, cfg) l.run() iter := NewIterator(l.items) for { + if l.err != nil { + return nil, l.err + } item := iter.Next() items = append(items, item) if item.Type == tEOF || item.Type == tError { @@ -139,13 +149,13 @@ func collectWithConfig(input []byte, skipFrontMatter bool, stateStart stateFunc, return } -func collect(input []byte, skipFrontMatter bool, stateStart stateFunc) (items []Item) { +func collect(input []byte, skipFrontMatter bool, stateStart stateFunc) (items []Item, err error) { var cfg Config return collectWithConfig(input, skipFrontMatter, stateStart, cfg) } -func collectStringMain(input string) []Item { +func collectStringMain(input string) ([]Item, error) { return collect([]byte(input), true, lexMainSection) } diff --git a/parser/pageparser/pageparser_shortcode_test.go b/parser/pageparser/pageparser_shortcode_test.go index 26d836e32..327da30ee 100644 --- a/parser/pageparser/pageparser_shortcode_test.go +++ b/parser/pageparser/pageparser_shortcode_test.go @@ -20,46 +20,42 @@ import ( ) var ( - tstEOF = nti(tEOF, "") - tstLeftNoMD = nti(tLeftDelimScNoMarkup, "{{<") - tstRightNoMD = nti(tRightDelimScNoMarkup, ">}}") - tstLeftMD = nti(tLeftDelimScWithMarkup, "{{%") - tstRightMD = nti(tRightDelimScWithMarkup, "%}}") - tstSCClose = nti(tScClose, "/") - tstSC1 = nti(tScName, "sc1") - tstSC1Inline = nti(tScNameInline, "sc1.inline") - tstSC2Inline = nti(tScNameInline, "sc2.inline") - tstSC2 = nti(tScName, "sc2") - tstSC3 = nti(tScName, "sc3") - tstSCSlash = nti(tScName, "sc/sub") - tstParam1 = nti(tScParam, "param1") - tstParam2 = nti(tScParam, "param2") - tstParamBoolTrue = nti(tScParam, "true") - tstParamBoolFalse = nti(tScParam, "false") - tstParamInt = nti(tScParam, "32") - tstParamFloat = nti(tScParam, "3.14") - tstVal = nti(tScParamVal, "Hello World") - tstText = nti(tText, "Hello World") + tstEOF = nti(tEOF, "") + tstLeftNoMD = nti(tLeftDelimScNoMarkup, "{{<") + tstRightNoMD = nti(tRightDelimScNoMarkup, ">}}") + tstLeftMD = nti(tLeftDelimScWithMarkup, "{{%") + tstRightMD = nti(tRightDelimScWithMarkup, "%}}") + tstSCClose = nti(tScClose, "/") + tstSC1 = nti(tScName, "sc1") + tstSC1Inline = nti(tScNameInline, "sc1.inline") + tstSC2Inline = nti(tScNameInline, "sc2.inline") + tstSC2 = nti(tScName, "sc2") + tstSC3 = nti(tScName, "sc3") + tstSCSlash = nti(tScName, "sc/sub") + tstParam1 = nti(tScParam, "param1") + tstParam2 = nti(tScParam, "param2") + tstVal = nti(tScParamVal, "Hello World") + tstText = nti(tText, "Hello World") ) var shortCodeLexerTests = []lexerTest{ - {"empty", "", []typeText{tstEOF}}, - {"spaces", " \t\n", []typeText{nti(tText, " \t\n"), tstEOF}}, - {"text", `to be or not`, []typeText{nti(tText, "to be or not"), tstEOF}}, - {"no markup", `{{< sc1 >}}`, []typeText{tstLeftNoMD, tstSC1, tstRightNoMD, tstEOF}}, - {"with EOL", "{{< sc1 \n >}}", []typeText{tstLeftNoMD, tstSC1, tstRightNoMD, tstEOF}}, + {"empty", "", []typeText{tstEOF}, nil}, + {"spaces", " \t\n", []typeText{nti(tText, " \t\n"), tstEOF}, nil}, + {"text", `to be or not`, []typeText{nti(tText, "to be or not"), tstEOF}, nil}, + {"no markup", `{{< sc1 >}}`, []typeText{tstLeftNoMD, tstSC1, tstRightNoMD, tstEOF}, nil}, + {"with EOL", "{{< sc1 \n >}}", []typeText{tstLeftNoMD, tstSC1, tstRightNoMD, tstEOF}, nil}, - {"forward slash inside name", `{{< sc/sub >}}`, []typeText{tstLeftNoMD, tstSCSlash, tstRightNoMD, tstEOF}}, + {"forward slash inside name", `{{< sc/sub >}}`, []typeText{tstLeftNoMD, tstSCSlash, tstRightNoMD, tstEOF}, nil}, - {"simple with markup", `{{% sc1 %}}`, []typeText{tstLeftMD, tstSC1, tstRightMD, tstEOF}}, - {"with spaces", `{{< sc1 >}}`, []typeText{tstLeftNoMD, tstSC1, tstRightNoMD, tstEOF}}, - {"indented on new line", "Hello\n {{% sc1 %}}", []typeText{nti(tText, "Hello\n"), nti(tIndentation, " "), tstLeftMD, tstSC1, tstRightMD, tstEOF}}, - {"indented on new line tab", "Hello\n\t{{% sc1 %}}", []typeText{nti(tText, "Hello\n"), nti(tIndentation, "\t"), tstLeftMD, tstSC1, tstRightMD, tstEOF}}, - {"indented on first line", " {{% sc1 %}}", []typeText{nti(tIndentation, " "), tstLeftMD, tstSC1, tstRightMD, tstEOF}}, + {"simple with markup", `{{% sc1 %}}`, []typeText{tstLeftMD, tstSC1, tstRightMD, tstEOF}, nil}, + {"with spaces", `{{< sc1 >}}`, []typeText{tstLeftNoMD, tstSC1, tstRightNoMD, tstEOF}, nil}, + {"indented on new line", "Hello\n {{% sc1 %}}", []typeText{nti(tText, "Hello\n"), nti(tIndentation, " "), tstLeftMD, tstSC1, tstRightMD, tstEOF}, nil}, + {"indented on new line tab", "Hello\n\t{{% sc1 %}}", []typeText{nti(tText, "Hello\n"), nti(tIndentation, "\t"), tstLeftMD, tstSC1, tstRightMD, tstEOF}, nil}, + {"indented on first line", " {{% sc1 %}}", []typeText{nti(tIndentation, " "), tstLeftMD, tstSC1, tstRightMD, tstEOF}, nil}, {"mismatched rightDelim", `{{< sc1 %}}`, []typeText{ tstLeftNoMD, tstSC1, nti(tError, "unrecognized character in shortcode action: U+0025 '%'. Note: Parameters with non-alphanumeric args must be quoted"), - }}, + }, nil}, {"inner, markup", `{{% sc1 %}} inner {{% /sc1 %}}`, []typeText{ tstLeftMD, tstSC1, @@ -70,79 +66,79 @@ var shortCodeLexerTests = []lexerTest{ tstSC1, tstRightMD, tstEOF, - }}, + }, nil}, {"close, but no open", `{{< /sc1 >}}`, []typeText{ tstLeftNoMD, nti(tError, "got closing shortcode, but none is open"), - }}, + }, nil}, {"close wrong", `{{< sc1 >}}{{< /another >}}`, []typeText{ tstLeftNoMD, tstSC1, tstRightNoMD, tstLeftNoMD, tstSCClose, nti(tError, "closing tag for shortcode 'another' does not match start tag"), - }}, + }, nil}, {"close, but no open, more", `{{< sc1 >}}{{< /sc1 >}}{{< /another >}}`, []typeText{ tstLeftNoMD, tstSC1, tstRightNoMD, tstLeftNoMD, tstSCClose, tstSC1, tstRightNoMD, tstLeftNoMD, tstSCClose, nti(tError, "closing tag for shortcode 'another' does not match start tag"), - }}, + }, nil}, {"close with extra keyword", `{{< sc1 >}}{{< /sc1 keyword>}}`, []typeText{ tstLeftNoMD, tstSC1, tstRightNoMD, tstLeftNoMD, tstSCClose, tstSC1, nti(tError, "unclosed shortcode"), - }}, + }, nil}, {"float param, positional", `{{< sc1 3.14 >}}`, []typeText{ tstLeftNoMD, tstSC1, nti(tScParam, "3.14"), tstRightNoMD, tstEOF, - }}, + }, nil}, {"float param, named", `{{< sc1 param1=3.14 >}}`, []typeText{ tstLeftNoMD, tstSC1, tstParam1, nti(tScParamVal, "3.14"), tstRightNoMD, tstEOF, - }}, + }, nil}, {"named param, raw string", `{{< sc1 param1=` + "`" + "Hello World" + "`" + " >}}", []typeText{ tstLeftNoMD, tstSC1, tstParam1, nti(tScParamVal, "Hello World"), tstRightNoMD, tstEOF, - }}, + }, nil}, {"float param, named, space before", `{{< sc1 param1= 3.14 >}}`, []typeText{ tstLeftNoMD, tstSC1, tstParam1, nti(tScParamVal, "3.14"), tstRightNoMD, tstEOF, - }}, + }, nil}, {"Youtube id", `{{< sc1 -ziL-Q_456igdO-4 >}}`, []typeText{ tstLeftNoMD, tstSC1, nti(tScParam, "-ziL-Q_456igdO-4"), tstRightNoMD, tstEOF, - }}, + }, nil}, {"non-alphanumerics param quoted", `{{< sc1 "-ziL-.%QigdO-4" >}}`, []typeText{ tstLeftNoMD, tstSC1, nti(tScParam, "-ziL-.%QigdO-4"), tstRightNoMD, tstEOF, - }}, + }, nil}, {"raw string", `{{< sc1` + "`" + "Hello World" + "`" + ` >}}`, []typeText{ tstLeftNoMD, tstSC1, nti(tScParam, "Hello World"), tstRightNoMD, tstEOF, - }}, + }, nil}, {"raw string with newline", `{{< sc1` + "`" + `Hello World` + "`" + ` >}}`, []typeText{ tstLeftNoMD, tstSC1, nti(tScParam, `Hello World`), tstRightNoMD, tstEOF, - }}, + }, nil}, {"raw string with escape character", `{{< sc1` + "`" + `Hello \b World` + "`" + ` >}}`, []typeText{ tstLeftNoMD, tstSC1, nti(tScParam, `Hello \b World`), tstRightNoMD, tstEOF, - }}, + }, nil}, {"two params", `{{< sc1 param1 param2 >}}`, []typeText{ tstLeftNoMD, tstSC1, tstParam1, tstParam2, tstRightNoMD, tstEOF, - }}, + }, nil}, // issue #934 {"self-closing", `{{< sc1 />}}`, []typeText{ tstLeftNoMD, tstSC1, tstSCClose, tstRightNoMD, tstEOF, - }}, + }, nil}, // Issue 2498 {"multiple self-closing", `{{< sc1 />}}{{< sc1 />}}`, []typeText{ tstLeftNoMD, tstSC1, tstSCClose, tstRightNoMD, tstLeftNoMD, tstSC1, tstSCClose, tstRightNoMD, tstEOF, - }}, + }, nil}, {"self-closing with param", `{{< sc1 param1 />}}`, []typeText{ tstLeftNoMD, tstSC1, tstParam1, tstSCClose, tstRightNoMD, tstEOF, - }}, + }, nil}, {"multiple self-closing with param", `{{< sc1 param1 />}}{{< sc1 param1 />}}`, []typeText{ tstLeftNoMD, tstSC1, tstParam1, tstSCClose, tstRightNoMD, tstLeftNoMD, tstSC1, tstParam1, tstSCClose, tstRightNoMD, tstEOF, - }}, + }, nil}, {"multiple different self-closing with param", `{{< sc1 param1 />}}{{< sc2 param1 />}}`, []typeText{ tstLeftNoMD, tstSC1, tstParam1, tstSCClose, tstRightNoMD, tstLeftNoMD, tstSC2, tstParam1, tstSCClose, tstRightNoMD, tstEOF, - }}, + }, nil}, {"nested simple", `{{< sc1 >}}{{< sc2 >}}{{< /sc1 >}}`, []typeText{ tstLeftNoMD, tstSC1, tstRightNoMD, tstLeftNoMD, tstSC2, tstRightNoMD, tstLeftNoMD, tstSCClose, tstSC1, tstRightNoMD, tstEOF, - }}, + }, nil}, {"nested complex", `{{< sc1 >}}ab{{% sc2 param1 %}}cd{{< sc3 >}}ef{{< /sc3 >}}gh{{% /sc2 %}}ij{{< /sc1 >}}kl`, []typeText{ tstLeftNoMD, tstSC1, tstRightNoMD, nti(tText, "ab"), @@ -156,30 +152,31 @@ var shortCodeLexerTests = []lexerTest{ nti(tText, "ij"), tstLeftNoMD, tstSCClose, tstSC1, tstRightNoMD, nti(tText, "kl"), tstEOF, - }}, + }, nil}, {"two quoted params", `{{< sc1 "param nr. 1" "param nr. 2" >}}`, []typeText{ tstLeftNoMD, tstSC1, nti(tScParam, "param nr. 1"), nti(tScParam, "param nr. 2"), tstRightNoMD, tstEOF, - }}, + }, nil}, {"two named params", `{{< sc1 param1="Hello World" param2="p2Val">}}`, []typeText{ tstLeftNoMD, tstSC1, tstParam1, tstVal, tstParam2, nti(tScParamVal, "p2Val"), tstRightNoMD, tstEOF, - }}, + }, nil}, {"escaped quotes", `{{< sc1 param1=\"Hello World\" >}}`, []typeText{ tstLeftNoMD, tstSC1, tstParam1, tstVal, tstRightNoMD, tstEOF, - }}, + }, nil}, {"escaped quotes, positional param", `{{< sc1 \"param1\" >}}`, []typeText{ tstLeftNoMD, tstSC1, tstParam1, tstRightNoMD, tstEOF, - }}, + }, nil}, {"escaped quotes inside escaped quotes", `{{< sc1 param1=\"Hello \"escaped\" World\" >}}`, []typeText{ tstLeftNoMD, tstSC1, tstParam1, nti(tScParamVal, `Hello `), nti(tError, `got positional parameter 'escaped'. Cannot mix named and positional parameters`), - }}, + }, nil}, { "escaped quotes inside nonescaped quotes", `{{< sc1 param1="Hello \"escaped\" World" >}}`, []typeText{ tstLeftNoMD, tstSC1, tstParam1, nti(tScParamVal, `Hello "escaped" World`), tstRightNoMD, tstEOF, }, + nil, }, { "escaped quotes inside nonescaped quotes in positional param", @@ -187,68 +184,69 @@ var shortCodeLexerTests = []lexerTest{ []typeText{ tstLeftNoMD, tstSC1, nti(tScParam, `Hello "escaped" World`), tstRightNoMD, tstEOF, }, + nil, }, {"escaped raw string, named param", `{{< sc1 param1=` + `\` + "`" + "Hello World" + `\` + "`" + ` >}}`, []typeText{ tstLeftNoMD, tstSC1, tstParam1, nti(tError, "unrecognized escape character"), - }}, + }, nil}, {"escaped raw string, positional param", `{{< sc1 param1 ` + `\` + "`" + "Hello World" + `\` + "`" + ` >}}`, []typeText{ tstLeftNoMD, tstSC1, tstParam1, nti(tError, "unrecognized escape character"), - }}, + }, nil}, {"two raw string params", `{{< sc1` + "`" + "Hello World" + "`" + "`" + "Second Param" + "`" + ` >}}`, []typeText{ tstLeftNoMD, tstSC1, nti(tScParam, "Hello World"), nti(tScParam, "Second Param"), tstRightNoMD, tstEOF, - }}, + }, nil}, {"unterminated quote", `{{< sc1 param2="Hello World>}}`, []typeText{ tstLeftNoMD, tstSC1, tstParam2, nti(tError, "unterminated quoted string in shortcode parameter-argument: 'Hello World>}}'"), - }}, + }, nil}, {"unterminated raw string", `{{< sc1` + "`" + "Hello World" + ` >}}`, []typeText{ tstLeftNoMD, tstSC1, nti(tError, "unterminated raw string in shortcode parameter-argument: 'Hello World >}}'"), - }}, + }, nil}, {"unterminated raw string in second argument", `{{< sc1` + "`" + "Hello World" + "`" + "`" + "Second Param" + ` >}}`, []typeText{ tstLeftNoMD, tstSC1, nti(tScParam, "Hello World"), nti(tError, "unterminated raw string in shortcode parameter-argument: 'Second Param >}}'"), - }}, + }, nil}, {"one named param, one not", `{{< sc1 param1="Hello World" p2 >}}`, []typeText{ tstLeftNoMD, tstSC1, tstParam1, tstVal, nti(tError, "got positional parameter 'p2'. Cannot mix named and positional parameters"), - }}, + }, nil}, {"one named param, one quoted positional param, both raw strings", `{{< sc1 param1=` + "`" + "Hello World" + "`" + "`" + "Second Param" + "`" + ` >}}`, []typeText{ tstLeftNoMD, tstSC1, tstParam1, tstVal, nti(tError, "got quoted positional parameter. Cannot mix named and positional parameters"), - }}, + }, nil}, {"one named param, one quoted positional param", `{{< sc1 param1="Hello World" "And Universe" >}}`, []typeText{ tstLeftNoMD, tstSC1, tstParam1, tstVal, nti(tError, "got quoted positional parameter. Cannot mix named and positional parameters"), - }}, + }, nil}, {"one quoted positional param, one named param", `{{< sc1 "param1" param2="And Universe" >}}`, []typeText{ tstLeftNoMD, tstSC1, tstParam1, nti(tError, "got named parameter 'param2'. Cannot mix named and positional parameters"), - }}, + }, nil}, {"ono positional param, one not", `{{< sc1 param1 param2="Hello World">}}`, []typeText{ tstLeftNoMD, tstSC1, tstParam1, nti(tError, "got named parameter 'param2'. Cannot mix named and positional parameters"), - }}, + }, nil}, {"commented out", `{{}}`, []typeText{ nti(tText, "{{<"), nti(tText, " sc1 "), nti(tText, ">}}"), tstEOF, - }}, + }, nil}, {"commented out, with asterisk inside", `{{}}`, []typeText{ nti(tText, "{{<"), nti(tText, " sc1 \"**/*.pdf\" "), nti(tText, ">}}"), tstEOF, - }}, + }, nil}, {"commented out, missing close", `{{}}`, []typeText{ nti(tError, "comment must be closed"), - }}, + }, nil}, {"commented out, misplaced close", `{{}}*/`, []typeText{ nti(tError, "comment must be closed"), - }}, + }, nil}, // Inline shortcodes - {"basic inline", `{{< sc1.inline >}}Hello World{{< /sc1.inline >}}`, []typeText{tstLeftNoMD, tstSC1Inline, tstRightNoMD, tstText, tstLeftNoMD, tstSCClose, tstSC1Inline, tstRightNoMD, tstEOF}}, - {"basic inline with space", `{{< sc1.inline >}}Hello World{{< / sc1.inline >}}`, []typeText{tstLeftNoMD, tstSC1Inline, tstRightNoMD, tstText, tstLeftNoMD, tstSCClose, tstSC1Inline, tstRightNoMD, tstEOF}}, - {"inline self closing", `{{< sc1.inline >}}Hello World{{< /sc1.inline >}}Hello World{{< sc1.inline />}}`, []typeText{tstLeftNoMD, tstSC1Inline, tstRightNoMD, tstText, tstLeftNoMD, tstSCClose, tstSC1Inline, tstRightNoMD, tstText, tstLeftNoMD, tstSC1Inline, tstSCClose, tstRightNoMD, tstEOF}}, + {"basic inline", `{{< sc1.inline >}}Hello World{{< /sc1.inline >}}`, []typeText{tstLeftNoMD, tstSC1Inline, tstRightNoMD, tstText, tstLeftNoMD, tstSCClose, tstSC1Inline, tstRightNoMD, tstEOF}, nil}, + {"basic inline with space", `{{< sc1.inline >}}Hello World{{< / sc1.inline >}}`, []typeText{tstLeftNoMD, tstSC1Inline, tstRightNoMD, tstText, tstLeftNoMD, tstSCClose, tstSC1Inline, tstRightNoMD, tstEOF}, nil}, + {"inline self closing", `{{< sc1.inline >}}Hello World{{< /sc1.inline >}}Hello World{{< sc1.inline />}}`, []typeText{tstLeftNoMD, tstSC1Inline, tstRightNoMD, tstText, tstLeftNoMD, tstSCClose, tstSC1Inline, tstRightNoMD, tstText, tstLeftNoMD, tstSC1Inline, tstSCClose, tstRightNoMD, tstEOF}, nil}, {"inline self closing, then a new inline", `{{< sc1.inline >}}Hello World{{< /sc1.inline >}}Hello World{{< sc1.inline />}}{{< sc2.inline >}}Hello World{{< /sc2.inline >}}`, []typeText{ tstLeftNoMD, tstSC1Inline, tstRightNoMD, tstText, tstLeftNoMD, tstSCClose, tstSC1Inline, tstRightNoMD, tstText, tstLeftNoMD, tstSC1Inline, tstSCClose, tstRightNoMD, tstLeftNoMD, tstSC2Inline, tstRightNoMD, tstText, tstLeftNoMD, tstSCClose, tstSC2Inline, tstRightNoMD, tstEOF, - }}, - {"inline with template syntax", `{{< sc1.inline >}}{{ .Get 0 }}{{ .Get 1 }}{{< /sc1.inline >}}`, []typeText{tstLeftNoMD, tstSC1Inline, tstRightNoMD, nti(tText, "{{ .Get 0 }}"), nti(tText, "{{ .Get 1 }}"), tstLeftNoMD, tstSCClose, tstSC1Inline, tstRightNoMD, tstEOF}}, - {"inline with nested shortcode (not supported)", `{{< sc1.inline >}}Hello World{{< sc1 >}}{{< /sc1.inline >}}`, []typeText{tstLeftNoMD, tstSC1Inline, tstRightNoMD, tstText, nti(tError, "inline shortcodes do not support nesting")}}, - {"inline case mismatch", `{{< sc1.Inline >}}Hello World{{< /sc1.Inline >}}`, []typeText{tstLeftNoMD, nti(tError, "period in shortcode name only allowed for inline identifiers")}}, + }, nil}, + {"inline with template syntax", `{{< sc1.inline >}}{{ .Get 0 }}{{ .Get 1 }}{{< /sc1.inline >}}`, []typeText{tstLeftNoMD, tstSC1Inline, tstRightNoMD, nti(tText, "{{ .Get 0 }}"), nti(tText, "{{ .Get 1 }}"), tstLeftNoMD, tstSCClose, tstSC1Inline, tstRightNoMD, tstEOF}, nil}, + {"inline with nested shortcode (not supported)", `{{< sc1.inline >}}Hello World{{< sc1 >}}{{< /sc1.inline >}}`, []typeText{tstLeftNoMD, tstSC1Inline, tstRightNoMD, tstText, nti(tError, "inline shortcodes do not support nesting")}, nil}, + {"inline case mismatch", `{{< sc1.Inline >}}Hello World{{< /sc1.Inline >}}`, []typeText{tstLeftNoMD, nti(tError, "period in shortcode name only allowed for inline identifiers")}, nil}, } func TestShortcodeLexer(t *testing.T) { @@ -256,7 +254,8 @@ func TestShortcodeLexer(t *testing.T) { c := qt.New(t) for i, test := range shortCodeLexerTests { t.Run(test.name, func(t *testing.T) { - items := collect([]byte(test.input), true, lexMainSection) + items, err := collect([]byte(test.input), true, lexMainSection) + c.Assert(err, qt.IsNil) if !equal(test.input, items, test.items) { got := itemsToString(items, []byte(test.input)) expected := testItemsToString(test.items) @@ -275,8 +274,9 @@ func BenchmarkShortcodeLexer(b *testing.B) { b.ResetTimer() for i := 0; i < b.N; i++ { for _, input := range testInputs { - items := collectWithConfig(input, true, lexMainSection, cfg) - if len(items) == 0 { + _, err := collectWithConfig(input, true, lexMainSection, cfg) + if err != nil { + b.Fatal(err) } } diff --git a/parser/pageparser/pageparser_test.go b/parser/pageparser/pageparser_test.go index c58018f0e..a50ab46e9 100644 --- a/parser/pageparser/pageparser_test.go +++ b/parser/pageparser/pageparser_test.go @@ -68,7 +68,8 @@ func TestIsProbablyItemsSource(t *testing.T) { c := qt.New(t) input := ` {{< foo >}} ` - items := collectStringMain(input) + items, err := collectStringMain(input) + c.Assert(err, qt.IsNil) c.Assert(IsProbablySourceOfItems([]byte(input), items), qt.IsTrue) c.Assert(IsProbablySourceOfItems(bytes.Repeat([]byte(" "), len(input)), items), qt.IsFalse) @@ -83,7 +84,6 @@ func TestHasShortcode(t *testing.T) { c.Assert(HasShortcode("aSDasd SDasd aSD\n\nasdfadf{{% foo %}}\nasdf"), qt.IsTrue) c.Assert(HasShortcode("{{}}"), qt.IsFalse) c.Assert(HasShortcode("{{%/* foo */%}}"), qt.IsFalse) - } func BenchmarkHasShortcode(b *testing.B) { @@ -100,5 +100,4 @@ func BenchmarkHasShortcode(b *testing.B) { HasShortcode(withoutShortcode) } }) - } diff --git a/publisher/publisher.go b/publisher/publisher.go index 39274b2a9..bbe65ff8a 100644 --- a/publisher/publisher.go +++ b/publisher/publisher.go @@ -169,7 +169,7 @@ func (p DestinationPublisher) createTransformerChain(f Descriptor) transform.Cha if isHTML { if f.LiveReloadBaseURL != nil { - transformers = append(transformers, livereloadinject.New(*f.LiveReloadBaseURL)) + transformers = append(transformers, livereloadinject.New(f.LiveReloadBaseURL)) } // This is only injected on the home page. diff --git a/related/integration_test.go b/related/integration_test.go index 4cb537f1f..2c71c1d1a 100644 --- a/related/integration_test.go +++ b/related/integration_test.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -135,7 +135,6 @@ Related 2: 2 "Related 1: 0: p2: h0: First title|ref1|::END", "Related 2: 0: p5: h0: Common p3, p4, p5|common-p3-p4-p5|::END 1: p4: h0: Common p3, p4, p5|common-p3-p4-p5|::END", ) - } func BenchmarkRelatedSite(b *testing.B) { @@ -170,7 +169,6 @@ keywords: ['k%d'] } return fmt.Sprintf(base, n, rand.Intn(32)) - } for i := 1; i < 100; i++ { diff --git a/related/inverted_index.go b/related/inverted_index.go index fcebdc716..7e171cf53 100644 --- a/related/inverted_index.go +++ b/related/inverted_index.go @@ -265,7 +265,6 @@ func (idx *InvertedIndex) Finalize(ctx context.Context) error { idx.finalized = true return nil - } // queryElement holds the index name and keywords that can be used to compose a @@ -346,7 +345,6 @@ type SearchOpts struct { // threshold (normalize to 0..100) will be removed. // If an index name is provided, only that index will be queried. func (idx *InvertedIndex) Search(ctx context.Context, opts SearchOpts) ([]Document, error) { - var ( queryElements []queryElement configs IndicesConfig @@ -379,7 +377,6 @@ func (idx *InvertedIndex) Search(ctx context.Context, opts SearchOpts) ([]Docume keywords = append(keywords, FragmentKeyword(fragment)) } if opts.Document != nil { - if fp, ok := opts.Document.(FragmentProvider); ok { for _, fragment := range fp.Fragments(ctx).Identifiers { keywords = append(keywords, FragmentKeyword(fragment)) diff --git a/resources/docs.go b/resources/docs.go index f992893da..16fe34027 100644 --- a/resources/docs.go +++ b/resources/docs.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/resources/image.go b/resources/image.go index 6c34795f8..2e351bd28 100644 --- a/resources/image.go +++ b/resources/image.go @@ -20,25 +20,23 @@ import ( "image/color" "image/draw" "image/gif" - _ "image/gif" _ "image/png" "io" "os" - "path" - "path/filepath" "strings" "sync" color_extractor "github.com/marekm4/color-extractor" + "github.com/gohugoio/hugo/cache/filecache" "github.com/gohugoio/hugo/common/hstrings" "github.com/gohugoio/hugo/common/paths" "github.com/gohugoio/hugo/identity" "github.com/disintegration/gift" - "github.com/gohugoio/hugo/cache/filecache" "github.com/gohugoio/hugo/resources/images/exif" + "github.com/gohugoio/hugo/resources/internal" "github.com/gohugoio/hugo/resources/resource" @@ -50,9 +48,10 @@ import ( ) var ( - _ images.ImageResource = (*imageResource)(nil) - _ resource.Source = (*imageResource)(nil) - _ resource.Cloner = (*imageResource)(nil) + _ images.ImageResource = (*imageResource)(nil) + _ resource.Source = (*imageResource)(nil) + _ resource.Cloner = (*imageResource)(nil) + _ resource.NameOriginalProvider = (*imageResource)(nil) ) // imageResource represents an image resource. @@ -107,6 +106,7 @@ func (i *imageResource) getExif() *exif.ExifInfo { } create := func(info filecache.ItemInfo, w io.WriteCloser) (err error) { + defer w.Close() f, err := i.root.ReadSeekCloser() if err != nil { i.metaInitErr = err @@ -127,7 +127,7 @@ func (i *imageResource) getExif() *exif.ExifInfo { return enc.Encode(i.meta) } - _, i.metaInitErr = i.getSpec().ImageCache.fileCache.ReadOrCreate(key, read, create) + _, i.metaInitErr = i.getSpec().ImageCache.fcache.ReadOrCreate(key, read, create) }) if i.metaInitErr != nil { @@ -369,17 +369,14 @@ func (i *imageResource) doWithImageConfig(conf images.ImageConfig, f func(src im <-imageProcSem }() - errOp := conf.Action - errPath := i.getSourceFilename() - src, err := i.DecodeImage() if err != nil { - return nil, nil, &os.PathError{Op: errOp, Path: errPath, Err: err} + return nil, nil, &os.PathError{Op: conf.Action, Path: i.TargetPath(), Err: err} } converted, err := f(src) if err != nil { - return nil, nil, &os.PathError{Op: errOp, Path: errPath, Err: err} + return nil, nil, &os.PathError{Op: conf.Action, Path: i.TargetPath(), Err: err} } hasAlpha := !images.IsOpaque(converted) @@ -414,16 +411,15 @@ func (i *imageResource) doWithImageConfig(conf images.ImageConfig, f func(src im } ci := i.clone(converted) - ci.setBasePath(conf) + targetPath := i.relTargetPathFromConfig(conf) + ci.setTargetPath(targetPath) ci.Format = conf.TargetFormat ci.setMediaType(conf.TargetFormat.MediaType()) return ci, converted, nil }) if err != nil { - if i.root != nil && i.root.getFileInfo() != nil { - return nil, fmt.Errorf("image %q: %w", i.root.getFileInfo().Meta().Filename, err) - } + return nil, err } return img, nil } @@ -474,32 +470,25 @@ func (i *imageResource) clone(img image.Image) *imageResource { } } -func (i *imageResource) setBasePath(conf images.ImageConfig) { - i.getResourcePaths().relTargetDirFile = i.relTargetPathFromConfig(conf) -} - func (i *imageResource) getImageMetaCacheTargetPath() string { const imageMetaVersionNumber = 1 // Increment to invalidate the meta cache cfgHash := i.getSpec().imaging.Cfg.SourceHash - df := i.getResourcePaths().relTargetDirFile - if fi := i.getFileInfo(); fi != nil { - df.dir = filepath.Dir(fi.Meta().Path) - } - p1, _ := paths.FileAndExt(df.file) - h, _ := i.hash() + df := i.getResourcePaths() + p1, _ := paths.FileAndExt(df.File) + h := i.hash() idStr := identity.HashString(h, i.size(), imageMetaVersionNumber, cfgHash) - p := path.Join(df.dir, fmt.Sprintf("%s_%s.json", p1, idStr)) - return p + df.File = fmt.Sprintf("%s_%s.json", p1, idStr) + return df.TargetPath() } -func (i *imageResource) relTargetPathFromConfig(conf images.ImageConfig) dirFile { - p1, p2 := paths.FileAndExt(i.getResourcePaths().relTargetDirFile.file) +func (i *imageResource) relTargetPathFromConfig(conf images.ImageConfig) internal.ResourcePaths { + p1, p2 := paths.FileAndExt(i.getResourcePaths().File) if conf.TargetFormat != i.Format { p2 = conf.TargetFormat.DefaultExtension() } - h, _ := i.hash() + h := i.hash() idStr := fmt.Sprintf("_hu%s_%d", h, i.size()) // Do not change for no good reason. @@ -526,8 +515,8 @@ func (i *imageResource) relTargetPathFromConfig(conf images.ImageConfig) dirFile idStr = "" } - return dirFile{ - dir: i.getResourcePaths().relTargetDirFile.dir, - file: fmt.Sprintf("%s%s_%s%s", p1, idStr, key, p2), - } + rp := i.getResourcePaths() + rp.File = fmt.Sprintf("%s%s_%s%s", p1, idStr, key, p2) + + return rp } diff --git a/resources/image_cache.go b/resources/image_cache.go index f416f0230..f9770ffc1 100644 --- a/resources/image_cache.go +++ b/resources/image_cache.go @@ -16,12 +16,11 @@ package resources import ( "image" "io" - "path/filepath" - "strings" - "sync" + "github.com/gohugoio/hugo/common/hugio" "github.com/gohugoio/hugo/resources/images" + "github.com/gohugoio/hugo/cache/dynacache" "github.com/gohugoio/hugo/cache/filecache" "github.com/gohugoio/hugo/helpers" ) @@ -30,132 +29,88 @@ import ( type ImageCache struct { pathSpec *helpers.PathSpec - fileCache *filecache.Cache - - *imageCacheStore -} - -type imageCacheStore struct { - mu sync.RWMutex - store map[string]*resourceAdapter -} - -// WithPathSpec returns a copy of the ImageCache with the given PathSpec set. -func (c ImageCache) WithPathSpec(ps *helpers.PathSpec) *ImageCache { - c.pathSpec = ps - return &c -} - -func (c *ImageCache) deleteIfContains(s string) { - c.mu.Lock() - defer c.mu.Unlock() - s = c.normalizeKeyBase(s) - for k := range c.store { - if strings.Contains(k, s) { - delete(c.store, k) - } - } -} - -// The cache key is a lowercase path with Unix style slashes and it always starts with -// a leading slash. -func (c *ImageCache) normalizeKey(key string) string { - return "/" + c.normalizeKeyBase(key) -} - -func (c *ImageCache) normalizeKeyBase(key string) string { - return strings.Trim(strings.ToLower(filepath.ToSlash(key)), "/") -} - -func (c *ImageCache) clear() { - c.mu.Lock() - defer c.mu.Unlock() - c.store = make(map[string]*resourceAdapter) + fcache *filecache.Cache + mcache *dynacache.Partition[string, *resourceAdapter] } func (c *ImageCache) getOrCreate( parent *imageResource, conf images.ImageConfig, - createImage func() (*imageResource, image.Image, error)) (*resourceAdapter, error) { + createImage func() (*imageResource, image.Image, error), +) (*resourceAdapter, error) { relTarget := parent.relTargetPathFromConfig(conf) - memKey := parent.relTargetPathForRel(relTarget.path(), false, false, false) - memKey = c.normalizeKey(memKey) + relTargetPath := relTarget.TargetPath() + memKey := dynacache.CleanKey(relTargetPath) - // For the file cache we want to generate and store it once if possible. - fileKeyPath := relTarget - if fi := parent.root.getFileInfo(); fi != nil { - fileKeyPath.dir = filepath.ToSlash(filepath.Dir(fi.Meta().Path)) - } - fileKey := fileKeyPath.path() + v, err := c.mcache.GetOrCreate(memKey, func(key string) (*resourceAdapter, error) { + var img *imageResource - // First check the in-memory store, then the disk. - c.mu.RLock() - cachedImage, found := c.store[memKey] - c.mu.RUnlock() + // These funcs are protected by a named lock. + // read clones the parent to its new name and copies + // the content to the destinations. + read := func(info filecache.ItemInfo, r io.ReadSeeker) error { + img = parent.clone(nil) + targetPath := img.getResourcePaths() + targetPath.File = relTarget.File + img.setTargetPath(targetPath) + img.setOpenSource(func() (hugio.ReadSeekCloser, error) { + return c.fcache.Fs.Open(info.Name) + }) + img.setSourceFilenameIsHash(true) + img.setMediaType(conf.TargetFormat.MediaType()) - if found { - return cachedImage, nil - } + if err := img.InitConfig(r); err != nil { + return err + } - var img *imageResource - - // These funcs are protected by a named lock. - // read clones the parent to its new name and copies - // the content to the destinations. - read := func(info filecache.ItemInfo, r io.ReadSeeker) error { - img = parent.clone(nil) - rp := img.getResourcePaths() - rp.relTargetDirFile.file = relTarget.file - img.setSourceFilename(info.Name) - img.setSourceFilenameIsHash(true) - img.setMediaType(conf.TargetFormat.MediaType()) - - return img.InitConfig(r) - } - - // create creates the image and encodes it to the cache (w). - create := func(info filecache.ItemInfo, w io.WriteCloser) (err error) { - defer w.Close() - - var conv image.Image - img, conv, err = createImage() - if err != nil { - return + return nil } - rp := img.getResourcePaths() - rp.relTargetDirFile.file = relTarget.file - img.setSourceFilename(info.Name) - return img.EncodeTo(conf, conv, w) - } + // create creates the image and encodes it to the cache (w). + create := func(info filecache.ItemInfo, w io.WriteCloser) (err error) { + defer w.Close() - // Now look in the file cache. + var conv image.Image + img, conv, err = createImage() + if err != nil { + return + } + targetPath := img.getResourcePaths() + targetPath.File = relTarget.File + img.setTargetPath(targetPath) + img.setOpenSource(func() (hugio.ReadSeekCloser, error) { + return c.fcache.Fs.Open(info.Name) + }) + return img.EncodeTo(conf, conv, w) + } - // The definition of this counter is not that we have processed that amount - // (e.g. resized etc.), it can be fetched from file cache, - // but the count of processed image variations for this site. - c.pathSpec.ProcessingStats.Incr(&c.pathSpec.ProcessingStats.ProcessedImages) + // Now look in the file cache. - _, err := c.fileCache.ReadOrCreate(fileKey, read, create) - if err != nil { - return nil, err - } + // The definition of this counter is not that we have processed that amount + // (e.g. resized etc.), it can be fetched from file cache, + // but the count of processed image variations for this site. + c.pathSpec.ProcessingStats.Incr(&c.pathSpec.ProcessingStats.ProcessedImages) - // The file is now stored in this cache. - img.setSourceFs(c.fileCache.Fs) + _, err := c.fcache.ReadOrCreate(relTargetPath, read, create) + if err != nil { + return nil, err + } - c.mu.Lock() - if cachedImage, found = c.store[memKey]; found { - c.mu.Unlock() - return cachedImage, nil - } + imgAdapter := newResourceAdapter(parent.getSpec(), true, img) - imgAdapter := newResourceAdapter(parent.getSpec(), true, img) - c.store[memKey] = imgAdapter - c.mu.Unlock() + return imgAdapter, nil + }) - return imgAdapter, nil + return v, err } -func newImageCache(fileCache *filecache.Cache, ps *helpers.PathSpec) *ImageCache { - return &ImageCache{fileCache: fileCache, pathSpec: ps, imageCacheStore: &imageCacheStore{store: make(map[string]*resourceAdapter)}} +func newImageCache(fileCache *filecache.Cache, memCache *dynacache.Cache, ps *helpers.PathSpec) *ImageCache { + return &ImageCache{ + fcache: fileCache, + mcache: dynacache.GetOrCreatePartition[string, *resourceAdapter]( + memCache, + "/imgs", + dynacache.OptionsPartition{ClearWhen: dynacache.ClearOnChange, Weight: 70}, + ), + pathSpec: ps, + } } diff --git a/resources/image_extended_test.go b/resources/image_extended_test.go index 4da603fc4..429e51fb6 100644 --- a/resources/image_extended_test.go +++ b/resources/image_extended_test.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/resources/image_test.go b/resources/image_test.go index 96cc07b3b..44861d629 100644 --- a/resources/image_test.go +++ b/resources/image_test.go @@ -22,7 +22,6 @@ import ( "math/big" "math/rand" "os" - "path" "path/filepath" "runtime" "strconv" @@ -31,7 +30,6 @@ import ( "testing" "time" - "github.com/gohugoio/hugo/resources" "github.com/gohugoio/hugo/resources/images/webp" "github.com/gohugoio/hugo/common/paths" @@ -80,8 +78,7 @@ var eq = qt.CmpEquals( func TestImageTransformBasic(t *testing.T) { c := qt.New(t) - spec, image := fetchSunset(c) - fileCache := spec.FileCaches.ImageCache().Fs + _, image := fetchSunset(c) assertWidthHeight := func(img images.ImageResource, w, h int) { assertWidthHeight(c, img, w, h) @@ -104,12 +101,10 @@ func TestImageTransformBasic(t *testing.T) { resized0x, err := image.Resize("x200") c.Assert(err, qt.IsNil) assertWidthHeight(resized0x, 320, 200) - assertFileCache(c, fileCache, path.Base(resized0x.RelPermalink()), 320, 200) resizedx0, err := image.Resize("200x") c.Assert(err, qt.IsNil) assertWidthHeight(resizedx0, 200, 125) - assertFileCache(c, fileCache, path.Base(resizedx0.RelPermalink()), 200, 125) resizedAndRotated, err := image.Resize("x200 r90") c.Assert(err, qt.IsNil) @@ -203,8 +198,7 @@ func TestImageProcess(t *testing.T) { func TestImageTransformFormat(t *testing.T) { c := qt.New(t) - spec, image := fetchSunset(c) - fileCache := spec.FileCaches.ImageCache().Fs + _, image := fetchSunset(c) assertExtWidthHeight := func(img images.ImageResource, ext string, w, h int) { c.Helper() @@ -226,8 +220,6 @@ func TestImageTransformFormat(t *testing.T) { c.Assert(imagePng.Name(), qt.Equals, "sunset.jpg") c.Assert(imagePng.MediaType().String(), qt.Equals, "image/png") - assertFileCache(c, fileCache, path.Base(imagePng.RelPermalink()), 450, 281) - imageGif, err := image.Resize("225x gif") c.Assert(err, qt.IsNil) c.Assert(imageGif.RelPermalink(), qt.Equals, "/a/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_225x0_resize_linear.gif") @@ -235,8 +227,6 @@ func TestImageTransformFormat(t *testing.T) { assertExtWidthHeight(imageGif, ".gif", 225, 141) c.Assert(imageGif.Name(), qt.Equals, "sunset.jpg") c.Assert(imageGif.MediaType().String(), qt.Equals, "image/gif") - - assertFileCache(c, fileCache, path.Base(imageGif.RelPermalink()), 225, 141) } // https://github.com/gohugoio/hugo/issues/5730 @@ -275,7 +265,7 @@ func TestImagePermalinkPublishOrder(t *testing.T) { resized, err := original.Resize("100x50") c.Assert(err, qt.IsNil) - check1(resized.(images.ImageResource)) + check1(resized) if !checkOriginalFirst { check2(original) @@ -386,27 +376,6 @@ func TestImageTransformConcurrent(t *testing.T) { wg.Wait() } -func TestImageWithMetadata(t *testing.T) { - c := qt.New(t) - - _, image := fetchSunset(c) - - meta := []map[string]any{ - { - "title": "My Sunset", - "name": "Sunset #:counter", - "src": "*.jpg", - }, - } - - c.Assert(resources.AssignMetadata(meta, image), qt.IsNil) - c.Assert(image.Name(), qt.Equals, "Sunset #1") - - resized, err := image.Resize("200x") - c.Assert(err, qt.IsNil) - c.Assert(resized.Name(), qt.Equals, "Sunset #1") -} - func TestImageResize8BitPNG(t *testing.T) { c := qt.New(t) @@ -424,38 +393,6 @@ func TestImageResize8BitPNG(t *testing.T) { c.Assert(resized.Width(), qt.Equals, 800) } -func TestImageResizeInSubPath(t *testing.T) { - c := qt.New(t) - - spec, image := fetchImage(c, "sub/gohugoio2.png") - - c.Assert(image.MediaType(), eq, media.Builtin.PNGType) - c.Assert(image.RelPermalink(), qt.Equals, "/a/sub/gohugoio2.png") - c.Assert(image.ResourceType(), qt.Equals, "image") - c.Assert(image.Exif(), qt.IsNil) - - resized, err := image.Resize("101x101") - c.Assert(err, qt.IsNil) - c.Assert(resized.MediaType().Type, qt.Equals, "image/png") - c.Assert(resized.RelPermalink(), qt.Equals, "/a/sub/gohugoio2_hu0e1b9e4a4be4d6f86c7b37b9ccce3fbc_73886_101x101_resize_linear_3.png") - c.Assert(resized.Width(), qt.Equals, 101) - c.Assert(resized.Exif(), qt.IsNil) - - publishedImageFilename := filepath.Clean(resized.RelPermalink()) - - assertImageFile(c, spec.BaseFs.PublishFs, publishedImageFilename, 101, 101) - c.Assert(spec.BaseFs.PublishFs.Remove(publishedImageFilename), qt.IsNil) - - // Clear mem cache to simulate reading from the file cache. - spec.ClearCaches() - - resizedAgain, err := image.Resize("101x101") - c.Assert(err, qt.IsNil) - c.Assert(resizedAgain.RelPermalink(), qt.Equals, "/a/sub/gohugoio2_hu0e1b9e4a4be4d6f86c7b37b9ccce3fbc_73886_101x101_resize_linear_3.png") - c.Assert(resizedAgain.Width(), qt.Equals, 101) - assertImageFile(c, spec.BaseFs.PublishFs, publishedImageFilename, 101, 101) -} - func TestSVGImage(t *testing.T) { c := qt.New(t) spec := newTestResourceSpec(specDescriptor{c: c}) @@ -640,7 +577,7 @@ func TestImageOperationsGoldenWebp(t *testing.T) { return } - dir1 := filepath.Join(workDir, "resources/_gen/images") + dir1 := filepath.Join(workDir, "resources/_gen/images/a") dir2 := filepath.FromSlash("testdata/golden_webp") assetGoldenDirs(c, dir1, dir2) @@ -694,8 +631,10 @@ func TestImageOperationsGolden(t *testing.T) { opacity30, err := orig.Filter(f.Opacity(30)) c.Assert(err, qt.IsNil) overlay, err := sunset.Filter(f.Overlay(opacity30.(images.ImageSource), 20, 20)) + c.Assert(err, qt.IsNil) rel := overlay.RelPermalink() c.Assert(rel, qt.Not(qt.Equals), "") + } // A simple Gif file (no animation). @@ -782,7 +721,7 @@ func TestImageOperationsGolden(t *testing.T) { return } - dir1 := filepath.Join(workDir, "resources/_gen/images") + dir1 := filepath.Join(workDir, "resources/_gen/images/a/") dir2 := filepath.FromSlash("testdata/golden") assetGoldenDirs(c, dir1, dir2) @@ -798,7 +737,7 @@ func assetGoldenDirs(c *qt.C, dir1, dir2 string) { for i, fi1 := range dirinfos1 { fi2 := dirinfos2[i] - c.Assert(fi1.Name(), qt.Equals, fi2.Name()) + c.Assert(fi1.Name(), qt.Equals, fi2.Name(), qt.Commentf("i=%d", i)) f1, err := os.Open(filepath.Join(dir1, fi1.Name())) c.Assert(err, qt.IsNil) diff --git a/resources/images/auto_orient.go b/resources/images/auto_orient.go index 194efefb5..ed86979e1 100644 --- a/resources/images/auto_orient.go +++ b/resources/images/auto_orient.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/resources/images/exif/exif.go b/resources/images/exif/exif.go index 90198eeed..af92366ca 100644 --- a/resources/images/exif/exif.go +++ b/resources/images/exif/exif.go @@ -117,7 +117,7 @@ func NewDecoder(options ...func(*Decoder) error) (*Decoder, error) { func (d *Decoder) Decode(r io.Reader) (ex *ExifInfo, err error) { defer func() { if r := recover(); r != nil { - err = fmt.Errorf("Exif failed: %v", r) + err = fmt.Errorf("exif failed: %v", r) } }() diff --git a/resources/images/exif/exif_test.go b/resources/images/exif/exif_test.go index 821367550..64c5a39e3 100644 --- a/resources/images/exif/exif_test.go +++ b/resources/images/exif/exif_test.go @@ -58,6 +58,7 @@ func TestExif(t *testing.T) { c.Assert(err, qt.IsNil) x2 := &ExifInfo{} err = json.Unmarshal(data, x2) + c.Assert(err, qt.IsNil) c.Assert(x2, eq, x) } @@ -135,7 +136,6 @@ var eq = qt.CmpEquals( ) func TestIssue10738(t *testing.T) { - c := qt.New(t) testFunc := func(path, include string) any { @@ -153,6 +153,7 @@ func TestIssue10738(t *testing.T) { c.Assert(err, qt.IsNil) x2 := &ExifInfo{} err = json.Unmarshal(data, x2) + c.Assert(err, qt.IsNil) c.Assert(x2, eq, x) @@ -300,15 +301,13 @@ func TestIssue10738(t *testing.T) { for _, tt := range tests { c.Run(tt.name, func(c *qt.C) { got := testFunc(tt.args.path, tt.args.include) - switch got.(type) { + switch v := got.(type) { case float64: - eTime, ok := got.(float64) - c.Assert(ok, qt.Equals, true) - c.Assert(eTime, qt.Equals, float64(tt.want.vN)) + c.Assert(v, qt.Equals, float64(tt.want.vN)) case *big.Rat: - eTime, ok := got.(*big.Rat) - c.Assert(ok, qt.Equals, true) - c.Assert(eTime, eq, big.NewRat(tt.want.vN, tt.want.vD)) + c.Assert(v, eq, big.NewRat(tt.want.vN, tt.want.vD)) + default: + c.Fatalf("unexpected type: %T", got) } }) } diff --git a/resources/images/image_resource.go b/resources/images/image_resource.go index be40418b1..e6be757c2 100644 --- a/resources/images/image_resource.go +++ b/resources/images/image_resource.go @@ -1,4 +1,4 @@ -// Copyright 2022 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/resources/images/opacity.go b/resources/images/opacity.go index 4b60e30a4..482476c5b 100644 --- a/resources/images/opacity.go +++ b/resources/images/opacity.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/resources/images/padding.go b/resources/images/padding.go index 153d0bd82..4399312f8 100644 --- a/resources/images/padding.go +++ b/resources/images/padding.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/resources/images/process.go b/resources/images/process.go index 984ac3c8f..fb2e995ce 100644 --- a/resources/images/process.go +++ b/resources/images/process.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/resources/integration_test.go b/resources/integration_test.go index 51a003625..9540b0976 100644 --- a/resources/integration_test.go +++ b/resources/integration_test.go @@ -1,4 +1,4 @@ -// Copyright 2022 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -69,7 +69,7 @@ anigif: {{ $anigif.RelPermalink }}|{{ $anigif.Width }}|{{ $anigif.Height }}|{{ $ assertImages() - b.EditFileReplace("content/mybundle/index.md", func(s string) string { return strings.ReplaceAll(s, "Bundle", "BUNDLE") }) + b.EditFileReplaceFunc("content/mybundle/index.md", func(s string) string { return strings.ReplaceAll(s, "Bundle", "BUNDLE") }) b.Build() assertImages() diff --git a/resources/internal/resourcepaths.go b/resources/internal/resourcepaths.go new file mode 100644 index 000000000..21c65e2ca --- /dev/null +++ b/resources/internal/resourcepaths.go @@ -0,0 +1,107 @@ +// Copyright 2024 The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package internal + +import ( + "path" + "path/filepath" + "strings" + + "github.com/gohugoio/hugo/common/paths" +) + +// ResourcePaths holds path information for a resouce. +// All directories in here have Unix-style slashes, with leading slash, but no trailing slash. +// Empty directories are represented with an empty string. +type ResourcePaths struct { + // This is the directory component for the target file or link. + Dir string + + // Any base directory for the target file. Will be prepended to Dir. + BaseDirTarget string + + // This is the directory component for the link will be prepended to Dir. + BaseDirLink string + + // Set when publishing in a multihost setup. + TargetBasePaths []string + + // This is the File component, e.g. "data.json". + File string +} + +func (d ResourcePaths) join(p ...string) string { + var s string + for i, pp := range p { + if pp == "" { + continue + } + if i > 0 && !strings.HasPrefix(pp, "/") { + pp = "/" + pp + } + s += pp + + } + if !strings.HasPrefix(s, "/") { + s = "/" + s + } + return s +} + +func (d ResourcePaths) TargetLink() string { + return d.join(d.BaseDirLink, d.Dir, d.File) +} + +func (d ResourcePaths) TargetPath() string { + return d.join(d.BaseDirTarget, d.Dir, d.File) +} + +func (d ResourcePaths) Path() string { + return d.join(d.Dir, d.File) +} + +func (d ResourcePaths) TargetPaths() []string { + if len(d.TargetBasePaths) == 0 { + return []string{d.TargetPath()} + } + + var paths []string + for _, p := range d.TargetBasePaths { + paths = append(paths, p+d.TargetPath()) + } + return paths +} + +func (d ResourcePaths) TargetFilenames() []string { + filenames := d.TargetPaths() + for i, p := range filenames { + filenames[i] = filepath.FromSlash(p) + } + return filenames +} + +func (d ResourcePaths) FromTargetPath(targetPath string) ResourcePaths { + targetPath = filepath.ToSlash(targetPath) + dir, file := path.Split(targetPath) + dir = paths.ToSlashPreserveLeading(dir) + if dir == "/" { + dir = "" + } + d.Dir = dir + d.File = file + d.BaseDirLink = "" + d.BaseDirTarget = "" + + return d +} diff --git a/resources/kinds/kinds.go b/resources/kinds/kinds.go index b035cdd29..2660ec719 100644 --- a/resources/kinds/kinds.go +++ b/resources/kinds/kinds.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -34,10 +34,11 @@ const ( // The following are (currently) temporary nodes, // i.e. nodes we create just to render in isolation. - KindRSS = "rss" - KindSitemap = "sitemap" - KindRobotsTXT = "robotstxt" - Kind404 = "404" + KindRSS = "rss" + KindSitemap = "sitemap" + KindSitemapIndex = "sitemapindex" + KindRobotsTXT = "robotstxt" + KindStatus404 = "404" ) var ( @@ -77,7 +78,7 @@ var kindMapTemporary = map[string]string{ KindRSS: KindRSS, KindSitemap: KindSitemap, KindRobotsTXT: KindRobotsTXT, - Kind404: Kind404, + KindStatus404: KindStatus404, } // GetKindMain gets the page kind given a string, empty if not found. @@ -94,6 +95,16 @@ func GetKindAny(s string) string { return kindMapTemporary[strings.ToLower(s)] } +// IsBranch returns whether the given kind is a branch node. +func IsBranch(kind string) bool { + switch kind { + case KindHome, KindSection, KindTaxonomy, KindTerm: + return true + default: + return false + } +} + // IsDeprecatedAndReplacedWith returns the new kind if the given kind is deprecated. func IsDeprecatedAndReplacedWith(s string) string { s = strings.ToLower(s) diff --git a/resources/kinds/kinds_test.go b/resources/kinds/kinds_test.go index c2868d617..a0fe42ff8 100644 --- a/resources/kinds/kinds_test.go +++ b/resources/kinds/kinds_test.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/resources/page/page.go b/resources/page/page.go index b5af489f1..56ba04d74 100644 --- a/resources/page/page.go +++ b/resources/page/page.go @@ -19,16 +19,14 @@ import ( "context" "html/template" - "github.com/gohugoio/hugo/identity" "github.com/gohugoio/hugo/markup/converter" "github.com/gohugoio/hugo/markup/tableofcontents" "github.com/gohugoio/hugo/config" - "github.com/gohugoio/hugo/tpl" "github.com/gohugoio/hugo/common/maps" + "github.com/gohugoio/hugo/common/paths" "github.com/gohugoio/hugo/compare" - "github.com/gohugoio/hugo/hugofs/files" "github.com/gohugoio/hugo/navigation" "github.com/gohugoio/hugo/related" @@ -122,7 +120,7 @@ type ContentRenderer interface { type FileProvider interface { // File returns the source file for this Page, // or a zero File if this Page is not backed by a file. - File() source.File + File() *source.File } // GetPageProvider provides the GetPage method. @@ -133,9 +131,6 @@ type GetPageProvider interface { // This will return nil when no page could be found, and will return // an error if the ref is ambiguous. GetPage(ref string) (Page, error) - - // GetPageWithTemplateInfo is for internal use only. - GetPageWithTemplateInfo(info tpl.Info, ref string) (Page, error) } // GitInfoProvider provides Git info. @@ -166,6 +161,12 @@ type OutputFormatsProvider interface { OutputFormats() OutputFormats } +// PageProvider provides access to a Page. +// Implemented by shortcodes and others. +type PageProvider interface { + Page() Page +} + // Page is the core interface in Hugo and what you get as the top level data context in your templates. type Page interface { ContentProvider @@ -175,7 +176,7 @@ type Page interface { type PageFragment interface { resource.ResourceLinksProvider - resource.ResourceMetaProvider + resource.ResourceNameTitleProvider } // PageMetaProvider provides page metadata, typically provided via front matter. @@ -187,7 +188,7 @@ type PageMetaProvider interface { Aliases() []string // BundleType returns the bundle type: `leaf`, `branch` or an empty string. - BundleType() files.ContentClass + BundleType() string // A configured description. Description() string @@ -224,9 +225,8 @@ type PageMetaProvider interface { // to the source of this Page. It will be relative to any content root. Path() string - // This is just a temporary bridge method. Use Path in templates. - // Pathc is for internal usage only. - Pathc() string + // This is for internal use only. + PathInfo() *paths.Path // The slug, typically defined in front matter. Slug() string @@ -240,13 +240,6 @@ type PageMetaProvider interface { // Section returns the first path element below the content root. Section() string - // Returns a slice of sections (directories if it's a file) to this - // Page. - SectionsEntries() []string - - // SectionsPath is SectionsEntries joined with a /. - SectionsPath() string - // Sitemap returns the sitemap configuration for this page. // This is for internal use only. Sitemap() config.SitemapConfig @@ -332,9 +325,6 @@ type PageWithoutContent interface { // e.g. GetTerms("categories") GetTerms(taxonomy string) Pages - // Used in change/dependency tracking. - identity.Provider - // HeadingsFiltered returns the headings for this page when a filter is set. // This is currently only triggered with the Related content feature // and the "fragments" type of index. @@ -430,7 +420,7 @@ type TranslationsProvider interface { type TreeProvider interface { // IsAncestor returns whether the current page is an ancestor of other. // Note that this method is not relevant for taxonomy lists and taxonomy terms pages. - IsAncestor(other any) (bool, error) + IsAncestor(other any) bool // CurrentSection returns the page's current section or the page itself if home or a section. // Note that this will return nil for pages that is not regular, home or section pages. @@ -438,7 +428,7 @@ type TreeProvider interface { // IsDescendant returns whether the current page is a descendant of other. // Note that this method is not relevant for taxonomy lists and taxonomy terms pages. - IsDescendant(other any) (bool, error) + IsDescendant(other any) bool // FirstSection returns the section on level 1 below home, e.g. "/docs". // For the home page, this will return itself. @@ -447,7 +437,7 @@ type TreeProvider interface { // InSection returns whether other is in the current section. // Note that this will always return false for pages that are // not either regular, home or section pages. - InSection(other any) (bool, error) + InSection(other any) bool // Parent returns a section's parent section or a page's section. // To get a section's subsections, see Page's Sections method. @@ -463,6 +453,13 @@ type TreeProvider interface { // Page returns a reference to the Page itself, kept here mostly // for legacy reasons. Page() Page + + // Returns a slice of sections (directories if it's a file) to this + // Page. + SectionsEntries() []string + + // SectionsPath is SectionsEntries joined with a /. + SectionsPath() string } // PageWithContext is a Page with a context.Context. diff --git a/resources/page/page_generate/generate_page_wrappers.go b/resources/page/page_generate/generate_page_wrappers.go index 2449cf28d..d720b8a42 100644 --- a/resources/page/page_generate/generate_page_wrappers.go +++ b/resources/page/page_generate/generate_page_wrappers.go @@ -14,19 +14,14 @@ package page_generate import ( - "bytes" "errors" "fmt" "os" "path/filepath" "reflect" - "github.com/gohugoio/hugo/common/maps" - "github.com/gohugoio/hugo/codegen" "github.com/gohugoio/hugo/resources/page" - "github.com/gohugoio/hugo/resources/resource" - "github.com/gohugoio/hugo/source" ) const header = `// Copyright 2019 The Hugo Authors. All rights reserved. @@ -46,7 +41,7 @@ const header = `// Copyright 2019 The Hugo Authors. All rights reserved. ` var ( - pageInterface = reflect.TypeOf((*page.Page)(nil)).Elem() + pageInterface = reflect.TypeOf((*page.PageMetaProvider)(nil)).Elem() packageDir = filepath.FromSlash("resources/page") ) @@ -56,10 +51,6 @@ func Generate(c *codegen.Inspector) error { return fmt.Errorf("failed to generate JSON marshaler: %w", err) } - if err := generateFileIsZeroWrappers(c); err != nil { - return fmt.Errorf("failed to generate file wrappers: %w", err) - } - return nil } @@ -73,25 +64,7 @@ func generateMarshalJSON(c *codegen.Inspector) error { includes := []reflect.Type{pageInterface} - // Exclude these methods - excludes := []reflect.Type{ - // Leave this out for now. We need to revisit the author issue. - reflect.TypeOf((*page.AuthorProvider)(nil)).Elem(), - - reflect.TypeOf((*resource.ErrProvider)(nil)).Elem(), - - // navigation.PageMenus - - // Prevent loops. - reflect.TypeOf((*page.SitesProvider)(nil)).Elem(), - reflect.TypeOf((*page.Positioner)(nil)).Elem(), - - reflect.TypeOf((*page.ChildCareProvider)(nil)).Elem(), - reflect.TypeOf((*page.TreeProvider)(nil)).Elem(), - reflect.TypeOf((*page.InSectionPositioner)(nil)).Elem(), - reflect.TypeOf((*page.PaginatorProvider)(nil)).Elem(), - reflect.TypeOf((*maps.Scratcher)(nil)).Elem(), - } + excludes := []reflect.Type{} methods := c.MethodsFromTypes( includes, @@ -123,71 +96,6 @@ package page return nil } -func generateFileIsZeroWrappers(c *codegen.Inspector) error { - filename := filepath.Join(c.ProjectRootDir, packageDir, "zero_file.autogen.go") - f, err := os.Create(filename) - if err != nil { - return err - } - defer f.Close() - - // Generate warnings for zero file access - - warning := func(name string, tp reflect.Type) string { - msg := fmt.Sprintf(".File.%s on zero object. Wrap it in if or with: {{ with .File }}{{ .%s }}{{ end }}", name, name) - - // We made this a Warning in 0.92.0. - // When we remove this construct in 0.93.0, people will get a nil pointer. - return fmt.Sprintf("z.log.Warnln(%q)", msg) - } - - var buff bytes.Buffer - - methods := c.MethodsFromTypes([]reflect.Type{reflect.TypeOf((*source.File)(nil)).Elem()}, nil) - - for _, m := range methods { - if m.Name == "IsZero" || m.Name == "Classifier" { - continue - } - fmt.Fprint(&buff, m.DeclarationNamed("zeroFile")) - fmt.Fprintln(&buff, " {") - fmt.Fprintf(&buff, "\t%s\n", warning(m.Name, m.Owner)) - if len(m.Out) > 0 { - fmt.Fprintln(&buff, "\treturn") - } - fmt.Fprintln(&buff, "}") - - } - - pkgImports := append(methods.Imports(), "github.com/gohugoio/hugo/common/loggers", "github.com/gohugoio/hugo/source") - - fmt.Fprintf(f, `%s - -package page - -%s - -// ZeroFile represents a zero value of source.File with warnings if invoked. -type zeroFile struct { - log loggers.Logger -} - -func NewZeroFile(log loggers.Logger) source.File { - return zeroFile{log: log} -} - -func (zeroFile) IsZero() bool { - return true -} - - -%s - -`, header, importsString(pkgImports), buff.String()) - - return nil -} - func importsString(imps []string) string { if len(imps) == 0 { return "" diff --git a/resources/page/page_lazy_contentprovider.go b/resources/page/page_lazy_contentprovider.go index 2d647e90c..665b2d003 100644 --- a/resources/page/page_lazy_contentprovider.go +++ b/resources/page/page_lazy_contentprovider.go @@ -77,7 +77,6 @@ func (lcp *LazyContentProvider) Reset() { func (lcp *LazyContentProvider) TableOfContents(ctx context.Context) template.HTML { lcp.init.Do(ctx) return lcp.cp.TableOfContents(ctx) - } func (lcp *LazyContentProvider) Fragments(ctx context.Context) *tableofcontents.Fragments { @@ -131,7 +130,7 @@ func (lcp *LazyContentProvider) Len(ctx context.Context) int { } func (lcp *LazyContentProvider) Render(ctx context.Context, layout ...string) (template.HTML, error) { - lcp.init.Do(context.TODO()) + lcp.init.Do(ctx) return lcp.cp.Render(ctx, layout...) } @@ -149,6 +148,7 @@ func (lcp *LazyContentProvider) ParseContent(ctx context.Context, content []byte lcp.init.Do(ctx) return lcp.cp.ParseContent(ctx, content) } + func (lcp *LazyContentProvider) RenderContent(ctx context.Context, content []byte, doc any) (converter.ResultRender, bool, error) { lcp.init.Do(ctx) return lcp.cp.RenderContent(ctx, content, doc) diff --git a/resources/page/page_marshaljson.autogen.go b/resources/page/page_marshaljson.autogen.go index bc9b5cc0f..18ed2a75d 100644 --- a/resources/page/page_marshaljson.autogen.go +++ b/resources/page/page_marshaljson.autogen.go @@ -17,27 +17,12 @@ package page import ( "encoding/json" - "github.com/gohugoio/hugo/common/maps" - "github.com/gohugoio/hugo/config" - "github.com/gohugoio/hugo/hugofs/files" - "github.com/gohugoio/hugo/identity" - "github.com/gohugoio/hugo/langs" - "github.com/gohugoio/hugo/media" - "github.com/gohugoio/hugo/navigation" - "github.com/gohugoio/hugo/source" "time" + + "github.com/gohugoio/hugo/config" ) func MarshalPageToJSON(p Page) ([]byte, error) { - rawContent := p.RawContent() - resourceType := p.ResourceType() - mediaType := p.MediaType() - permalink := p.Permalink() - relPermalink := p.RelPermalink() - name := p.Name() - title := p.Title() - params := p.Params() - data := p.Data() date := p.Date() lastmod := p.Lastmod() publishDate := p.PublishDate() @@ -54,128 +39,65 @@ func MarshalPageToJSON(p Page) ([]byte, error) { isNode := p.IsNode() isPage := p.IsPage() path := p.Path() - pathc := p.Pathc() + pathc := p.Path() slug := p.Slug() lang := p.Lang() isSection := p.IsSection() section := p.Section() - sectionsEntries := p.SectionsEntries() - sectionsPath := p.SectionsPath() sitemap := p.Sitemap() typ := p.Type() weight := p.Weight() - language := p.Language() - file := p.File() - gitInfo := p.GitInfo() - codeOwners := p.CodeOwners() - outputFormats := p.OutputFormats() - alternativeOutputFormats := p.AlternativeOutputFormats() - menus := p.Menus() - translationKey := p.TranslationKey() - isTranslated := p.IsTranslated() - allTranslations := p.AllTranslations() - translations := p.Translations() - store := p.Store() - getIdentity := p.GetIdentity() s := struct { - RawContent string - ResourceType string - MediaType media.Type - Permalink string - RelPermalink string - Name string - Title string - Params maps.Params - Data interface{} - Date time.Time - Lastmod time.Time - PublishDate time.Time - ExpiryDate time.Time - Aliases []string - BundleType files.ContentClass - Description string - Draft bool - IsHome bool - Keywords []string - Kind string - Layout string - LinkTitle string - IsNode bool - IsPage bool - Path string - Pathc string - Slug string - Lang string - IsSection bool - Section string - SectionsEntries []string - SectionsPath string - Sitemap config.SitemapConfig - Type string - Weight int - Language *langs.Language - File source.File - GitInfo source.GitInfo - CodeOwners []string - OutputFormats OutputFormats - AlternativeOutputFormats OutputFormats - Menus navigation.PageMenus - TranslationKey string - IsTranslated bool - AllTranslations Pages - Translations Pages - Store *maps.Scratch - GetIdentity identity.Identity + Date time.Time + Lastmod time.Time + PublishDate time.Time + ExpiryDate time.Time + Aliases []string + BundleType string + Description string + Draft bool + IsHome bool + Keywords []string + Kind string + Layout string + LinkTitle string + IsNode bool + IsPage bool + Path string + Pathc string + Slug string + Lang string + IsSection bool + Section string + Sitemap config.SitemapConfig + Type string + Weight int }{ - RawContent: rawContent, - ResourceType: resourceType, - MediaType: mediaType, - Permalink: permalink, - RelPermalink: relPermalink, - Name: name, - Title: title, - Params: params, - Data: data, - Date: date, - Lastmod: lastmod, - PublishDate: publishDate, - ExpiryDate: expiryDate, - Aliases: aliases, - BundleType: bundleType, - Description: description, - Draft: draft, - IsHome: isHome, - Keywords: keywords, - Kind: kind, - Layout: layout, - LinkTitle: linkTitle, - IsNode: isNode, - IsPage: isPage, - Path: path, - Pathc: pathc, - Slug: slug, - Lang: lang, - IsSection: isSection, - Section: section, - SectionsEntries: sectionsEntries, - SectionsPath: sectionsPath, - Sitemap: sitemap, - Type: typ, - Weight: weight, - Language: language, - File: file, - GitInfo: gitInfo, - CodeOwners: codeOwners, - OutputFormats: outputFormats, - AlternativeOutputFormats: alternativeOutputFormats, - Menus: menus, - TranslationKey: translationKey, - IsTranslated: isTranslated, - AllTranslations: allTranslations, - Translations: translations, - Store: store, - GetIdentity: getIdentity, + Date: date, + Lastmod: lastmod, + PublishDate: publishDate, + ExpiryDate: expiryDate, + Aliases: aliases, + BundleType: bundleType, + Description: description, + Draft: draft, + IsHome: isHome, + Keywords: keywords, + Kind: kind, + Layout: layout, + LinkTitle: linkTitle, + IsNode: isNode, + IsPage: isPage, + Path: path, + Pathc: pathc, + Slug: slug, + Lang: lang, + IsSection: isSection, + Section: section, + Sitemap: sitemap, + Type: typ, + Weight: weight, } return json.Marshal(&s) diff --git a/resources/page/page_matcher.go b/resources/page/page_matcher.go index 4c861cbd7..f5e8e2697 100644 --- a/resources/page/page_matcher.go +++ b/resources/page/page_matcher.go @@ -63,7 +63,7 @@ func (m PageMatcher) Matches(p Page) bool { if m.Path != "" { g, err := glob.GetGlob(m.Path) // TODO(bep) Path() vs filepath vs leading slash. - p := strings.ToLower(filepath.ToSlash(p.Pathc())) + p := strings.ToLower(filepath.ToSlash(p.Path())) if !(strings.HasPrefix(p, "/")) { p = "/" + p } @@ -123,7 +123,6 @@ func DecodeCascadeConfig(in any) (*config.ConfigNamespace[[]PageMatcherParamsCon } return config.DecodeNamespace[[]PageMatcherParamsConfig](in, buildConfig) - } // DecodeCascade decodes in which could be either a map or a slice of maps. @@ -161,7 +160,6 @@ func mapToPageMatcherParamsConfig(m map[string]any) (PageMatcherParamsConfig, er } } return pcfg, pcfg.init() - } // decodePageMatcher decodes m into v. diff --git a/resources/page/page_nop.go b/resources/page/page_nop.go index 735d6eea8..a8f42e4d3 100644 --- a/resources/page/page_nop.go +++ b/resources/page/page_nop.go @@ -21,19 +21,17 @@ import ( "html/template" "time" - "github.com/gohugoio/hugo/identity" + "github.com/gohugoio/hugo/hugofs/files" "github.com/gohugoio/hugo/markup/converter" "github.com/gohugoio/hugo/markup/tableofcontents" - "github.com/gohugoio/hugo/hugofs/files" - "github.com/gohugoio/hugo/tpl" - "github.com/gohugoio/hugo/hugofs" "github.com/gohugoio/hugo/navigation" "github.com/gohugoio/hugo/common/hugo" "github.com/gohugoio/hugo/common/maps" + "github.com/gohugoio/hugo/common/paths" "github.com/gohugoio/hugo/source" "github.com/gohugoio/hugo/config" @@ -59,6 +57,8 @@ var ( // PageNop implements Page, but does nothing. type nopPage int +var noOpPathInfo = paths.Parse(files.ComponentFolderContent, "no-op.md") + func (p *nopPage) Err() resource.ResourceError { return nil } @@ -103,7 +103,7 @@ func (p *nopPage) BaseFileName() string { return "" } -func (p *nopPage) BundleType() files.ContentClass { +func (p *nopPage) BundleType() string { return "" } @@ -163,10 +163,8 @@ func (p *nopPage) Extension() string { return "" } -var nilFile *source.FileInfo - -func (p *nopPage) File() source.File { - return nilFile +func (p *nopPage) File() *source.File { + return nil } func (p *nopPage) FileInfo() hugofs.FileMetaInfo { @@ -189,10 +187,6 @@ func (p *nopPage) GetPage(ref string) (Page, error) { return nil, nil } -func (p *nopPage) GetPageWithTemplateInfo(info tpl.Info, ref string) (Page, error) { - return nil, nil -} - func (p *nopPage) GetParam(key string) any { return nil } @@ -221,16 +215,16 @@ func (p *nopPage) Hugo() (h hugo.HugoInfo) { return } -func (p *nopPage) InSection(other any) (bool, error) { - return false, nil +func (p *nopPage) InSection(other any) bool { + return false } -func (p *nopPage) IsAncestor(other any) (bool, error) { - return false, nil +func (p *nopPage) IsAncestor(other any) bool { + return false } -func (p *nopPage) IsDescendant(other any) (bool, error) { - return false, nil +func (p *nopPage) IsDescendant(other any) bool { + return false } func (p *nopPage) IsDraft() bool { @@ -357,8 +351,8 @@ func (p *nopPage) Path() string { return "" } -func (p *nopPage) Pathc() string { - return "" +func (p *nopPage) PathInfo() *paths.Path { + return noOpPathInfo } func (p *nopPage) Permalink() string { @@ -529,13 +523,10 @@ func (p *nopPage) WordCount(context.Context) int { return 0 } -func (p *nopPage) GetIdentity() identity.Identity { - return identity.NewPathIdentity("content", "foo/bar.md") -} - func (p *nopPage) Fragments(context.Context) *tableofcontents.Fragments { return nil } + func (p *nopPage) HeadingsFiltered(context.Context) tableofcontents.Headings { return nil } @@ -550,6 +541,7 @@ func (r *nopContentRenderer) ParseAndRenderContent(ctx context.Context, content func (r *nopContentRenderer) ParseContent(ctx context.Context, content []byte) (converter.ResultParse, bool, error) { return nil, false, nil } + func (r *nopContentRenderer) RenderContent(ctx context.Context, content []byte, doc any) (converter.ResultRender, bool, error) { return nil, false, nil } diff --git a/resources/page/page_paths.go b/resources/page/page_paths.go index 1bc16fe35..8052287c6 100644 --- a/resources/page/page_paths.go +++ b/resources/page/page_paths.go @@ -17,7 +17,9 @@ import ( "path" "path/filepath" "strings" + "sync" + "github.com/gohugoio/hugo/common/paths" "github.com/gohugoio/hugo/common/urls" "github.com/gohugoio/hugo/helpers" "github.com/gohugoio/hugo/output" @@ -39,16 +41,14 @@ type TargetPathDescriptor struct { Type output.Format Kind string - Sections []string + Path *paths.Path + Section *paths.Path // For regular content pages this is either // 1) the Slug, if set, // 2) the file base name (TranslationBaseName). BaseName string - // Source directory. - Dir string - // Typically a language prefix added to file paths. PrefixFilePath string @@ -74,7 +74,6 @@ type TargetPathDescriptor struct { // TODO(bep) move this type. type TargetPaths struct { - // Where to store the file on disk relative to the publish dir. OS slashes. TargetFilename string @@ -107,237 +106,347 @@ func (p TargetPaths) PermalinkForOutputFormat(s *helpers.PathSpec, f output.Form return s.PermalinkForBaseURL(p.Link, baseURLstr) } -func isHtmlIndex(s string) bool { - return strings.HasSuffix(s, "/index.html") -} - func CreateTargetPaths(d TargetPathDescriptor) (tp TargetPaths) { - if d.Type.Name == "" { - panic("CreateTargetPath: missing type") - } - // Normalize all file Windows paths to simplify what's next. - if helpers.FilePathSeparator != slash { - d.Dir = filepath.ToSlash(d.Dir) + if helpers.FilePathSeparator != "/" { d.PrefixFilePath = filepath.ToSlash(d.PrefixFilePath) - } - if d.URL != "" && !strings.HasPrefix(d.URL, "/") { + if !d.Type.Root && d.URL != "" && !strings.HasPrefix(d.URL, "/") { // Treat this as a context relative URL d.ForcePrefix = true } - pagePath := slash - fullSuffix := d.Type.MediaType.FirstSuffix.FullSuffix + if d.URL != "" { + d.URL = filepath.ToSlash(d.URL) + if strings.Contains(d.URL, "..") { + d.URL = path.Join("/", d.URL) + } + } - var ( - pagePathDir string - link string - linkDir string - ) + if d.Type.Root && !d.ForcePrefix { + d.PrefixFilePath = "" + d.PrefixLink = "" + } + + pb := getPagePathBuilder(d) + defer putPagePathBuilder(pb) + + pb.fullSuffix = d.Type.MediaType.FirstSuffix.FullSuffix // The top level index files, i.e. the home page etc., needs // the index base even when uglyURLs is enabled. needsBase := true - isUgly := d.UglyURLs && !d.Type.NoUgly - baseNameSameAsType := d.BaseName != "" && d.BaseName == d.Type.BaseName + pb.isUgly = (d.UglyURLs || d.Type.Ugly) && !d.Type.NoUgly + pb.baseNameSameAsType = !d.Path.IsBundle() && d.BaseName != "" && d.BaseName == d.Type.BaseName - if d.ExpandedPermalink == "" && baseNameSameAsType { - isUgly = true + if d.ExpandedPermalink == "" && pb.baseNameSameAsType { + pb.isUgly = true } - if d.Kind != kinds.KindPage && d.URL == "" && len(d.Sections) > 0 { + if d.Type == output.HTTPStatusHTMLFormat || d.Type == output.SitemapFormat || d.Type == output.RobotsTxtFormat { + pb.noSubResources = true + } else if d.Kind != kinds.KindPage && d.URL == "" && d.Section.Base() != "/" { if d.ExpandedPermalink != "" { - pagePath = pjoin(pagePath, d.ExpandedPermalink) + pb.Add(d.ExpandedPermalink) } else { - pagePath = pjoin(d.Sections...) + pb.Add(d.Section.Base()) } needsBase = false } if d.Type.Path != "" { - pagePath = pjoin(pagePath, d.Type.Path) + pb.Add(d.Type.Path) } if d.Kind != kinds.KindHome && d.URL != "" { - pagePath = pjoin(pagePath, d.URL) + pb.Add(paths.FieldsSlash(d.URL)...) if d.Addends != "" { - pagePath = pjoin(pagePath, d.Addends) + pb.Add(d.Addends) } - pagePathDir = pagePath - link = pagePath hasDot := strings.Contains(d.URL, ".") - hasSlash := strings.HasSuffix(d.URL, slash) + hasSlash := strings.HasSuffix(d.URL, "/") if hasSlash || !hasDot { - pagePath = pjoin(pagePath, d.Type.BaseName+fullSuffix) + pb.Add(d.Type.BaseName + pb.fullSuffix) } else if hasDot { - pagePathDir = path.Dir(pagePathDir) + pb.fullSuffix = paths.Ext(d.URL) } - if !isHtmlIndex(pagePath) { - link = pagePath - } else if !hasSlash { - link += slash + if pb.IsHtmlIndex() { + pb.linkUpperOffset = 1 } - linkDir = pagePathDir - if d.ForcePrefix { // Prepend language prefix if not already set in URL - if d.PrefixFilePath != "" && !strings.HasPrefix(d.URL, slash+d.PrefixFilePath) { - pagePath = pjoin(d.PrefixFilePath, pagePath) - pagePathDir = pjoin(d.PrefixFilePath, pagePathDir) + if d.PrefixFilePath != "" && !strings.HasPrefix(d.URL, "/"+d.PrefixFilePath) { + pb.prefixPath = d.PrefixFilePath } - if d.PrefixLink != "" && !strings.HasPrefix(d.URL, slash+d.PrefixLink) { - link = pjoin(d.PrefixLink, link) - linkDir = pjoin(d.PrefixLink, linkDir) + if d.PrefixLink != "" && !strings.HasPrefix(d.URL, "/"+d.PrefixLink) { + pb.prefixLink = d.PrefixLink } } - - } else if d.Kind == kinds.KindPage { - + } else if !kinds.IsBranch(d.Kind) { if d.ExpandedPermalink != "" { - pagePath = pjoin(pagePath, d.ExpandedPermalink) + pb.Add(d.ExpandedPermalink) } else { - if d.Dir != "" { - pagePath = pjoin(pagePath, d.Dir) + if dir := d.Path.ContainerDir(); dir != "" { + pb.Add(dir) } if d.BaseName != "" { - pagePath = pjoin(pagePath, d.BaseName) + pb.Add(d.BaseName) + } else { + pb.Add(d.Path.BaseNameNoIdentifier()) } } if d.Addends != "" { - pagePath = pjoin(pagePath, d.Addends) + pb.Add(d.Addends) } - link = pagePath - - // TODO(bep) this should not happen after the fix in https://github.com/gohugoio/hugo/issues/4870 - // but we may need some more testing before we can remove it. - if baseNameSameAsType { - link = strings.TrimSuffix(link, d.BaseName) - } - - pagePathDir = link - link = link + slash - linkDir = pagePathDir - - if isUgly { - pagePath = addSuffix(pagePath, fullSuffix) + if pb.isUgly { + pb.ConcatLast(pb.fullSuffix) } else { - pagePath = pjoin(pagePath, d.Type.BaseName+fullSuffix) + pb.Add(d.Type.BaseName + pb.fullSuffix) } - if !isHtmlIndex(pagePath) { - link = pagePath + if pb.IsHtmlIndex() { + pb.linkUpperOffset = 1 } if d.PrefixFilePath != "" { - pagePath = pjoin(d.PrefixFilePath, pagePath) - pagePathDir = pjoin(d.PrefixFilePath, pagePathDir) + pb.prefixPath = d.PrefixFilePath } if d.PrefixLink != "" { - link = pjoin(d.PrefixLink, link) - linkDir = pjoin(d.PrefixLink, linkDir) + pb.prefixLink = d.PrefixLink } - } else { if d.Addends != "" { - pagePath = pjoin(pagePath, d.Addends) + pb.Add(d.Addends) } needsBase = needsBase && d.Addends == "" - // No permalink expansion etc. for node type pages (for now) - base := "" - - if needsBase || !isUgly { - base = d.Type.BaseName + if needsBase || !pb.isUgly { + pb.Add(d.Type.BaseName + pb.fullSuffix) + } else { + pb.ConcatLast(pb.fullSuffix) } - pagePathDir = pagePath - link = pagePath - linkDir = pagePathDir - - if base != "" { - pagePath = path.Join(pagePath, addSuffix(base, fullSuffix)) - } else { - pagePath = addSuffix(pagePath, fullSuffix) - } - - if !isHtmlIndex(pagePath) { - link = pagePath - } else { - link += slash + if pb.IsHtmlIndex() { + pb.linkUpperOffset = 1 } if d.PrefixFilePath != "" { - pagePath = pjoin(d.PrefixFilePath, pagePath) - pagePathDir = pjoin(d.PrefixFilePath, pagePathDir) + pb.prefixPath = d.PrefixFilePath } if d.PrefixLink != "" { - link = pjoin(d.PrefixLink, link) - linkDir = pjoin(d.PrefixLink, linkDir) + pb.prefixLink = d.PrefixLink } } - pagePath = pjoin(slash, pagePath) - pagePathDir = strings.TrimSuffix(path.Join(slash, pagePathDir), slash) - - hadSlash := strings.HasSuffix(link, slash) - link = strings.Trim(link, slash) - if hadSlash { - link += slash - } - - if !strings.HasPrefix(link, slash) { - link = slash + link - } - - linkDir = strings.TrimSuffix(path.Join(slash, linkDir), slash) - // if page URL is explicitly set in frontmatter, // preserve its value without sanitization if d.Kind != kinds.KindPage || d.URL == "" { // Note: MakePathSanitized will lower case the path if // disablePathToLower isn't set. - pagePath = d.PathSpec.MakePathSanitized(pagePath) - pagePathDir = d.PathSpec.MakePathSanitized(pagePathDir) - link = d.PathSpec.MakePathSanitized(link) - linkDir = d.PathSpec.MakePathSanitized(linkDir) + pb.Sanitize() } + link := pb.Link() + pagePath := pb.PathFile() + tp.TargetFilename = filepath.FromSlash(pagePath) - tp.SubResourceBaseTarget = filepath.FromSlash(pagePathDir) - tp.SubResourceBaseLink = linkDir - tp.Link = d.PathSpec.URLizeFilename(link) + if !pb.noSubResources { + tp.SubResourceBaseTarget = pb.PathDir() + tp.SubResourceBaseLink = pb.LinkDir() + } + if d.URL != "" { + tp.Link = paths.URLEscape(link) + } else { + // This is slightly faster for when we know we don't have any + // query or scheme etc. + tp.Link = paths.PathEscape(link) + } if tp.Link == "" { - tp.Link = slash + tp.Link = "/" } return } -func addSuffix(s, suffix string) string { - return strings.Trim(s, slash) + suffix +// When adding state here, remember to update putPagePathBuilder. +type pagePathBuilder struct { + els []string + + d TargetPathDescriptor + + // Builder state. + isUgly bool + baseNameSameAsType bool + noSubResources bool + fullSuffix string // File suffix including any ".". + prefixLink string + prefixPath string + linkUpperOffset int } -// Like path.Join, but preserves one trailing slash if present. -func pjoin(elem ...string) string { - hadSlash := strings.HasSuffix(elem[len(elem)-1], slash) - joined := path.Join(elem...) - if hadSlash && !strings.HasSuffix(joined, slash) { - return joined + slash +func (p *pagePathBuilder) Add(el ...string) { + // Filter empty and slashes. + n := 0 + for _, e := range el { + if e != "" && e != slash { + el[n] = e + n++ + } } - return joined + el = el[:n] + + p.els = append(p.els, el...) +} + +func (p *pagePathBuilder) ConcatLast(s string) { + if len(p.els) == 0 { + p.Add(s) + return + } + old := p.els[len(p.els)-1] + if old == "" { + p.els[len(p.els)-1] = s + return + } + if old[len(old)-1] == '/' { + old = old[:len(old)-1] + } + p.els[len(p.els)-1] = old + s +} + +func (p *pagePathBuilder) IsHtmlIndex() bool { + return p.Last() == "index.html" +} + +func (p *pagePathBuilder) Last() string { + if p.els == nil { + return "" + } + return p.els[len(p.els)-1] +} + +func (p *pagePathBuilder) Link() string { + link := p.Path(p.linkUpperOffset) + + if p.baseNameSameAsType { + link = strings.TrimSuffix(link, p.d.BaseName) + } + + if p.prefixLink != "" { + link = "/" + p.prefixLink + link + } + + if p.linkUpperOffset > 0 && !strings.HasSuffix(link, "/") { + link += "/" + } + + return link +} + +func (p *pagePathBuilder) LinkDir() string { + if p.noSubResources { + return "" + } + + pathDir := p.PathDirBase() + + if p.prefixLink != "" { + pathDir = "/" + p.prefixLink + pathDir + } + + return pathDir +} + +func (p *pagePathBuilder) Path(upperOffset int) string { + upper := len(p.els) + if upperOffset > 0 { + upper -= upperOffset + } + pth := path.Join(p.els[:upper]...) + return paths.AddLeadingSlash(pth) +} + +func (p *pagePathBuilder) PathDir() string { + dir := p.PathDirBase() + if p.prefixPath != "" { + dir = "/" + p.prefixPath + dir + } + return dir +} + +func (p *pagePathBuilder) PathDirBase() string { + if p.noSubResources { + return "" + } + + dir := p.Path(0) + isIndex := strings.HasPrefix(p.Last(), p.d.Type.BaseName+".") + + if isIndex { + dir = paths.Dir(dir) + } else { + dir = strings.TrimSuffix(dir, p.fullSuffix) + } + + if dir == "/" { + dir = "" + } + + return dir +} + +func (p *pagePathBuilder) PathFile() string { + dir := p.Path(0) + if p.prefixPath != "" { + dir = "/" + p.prefixPath + dir + } + return dir +} + +func (p *pagePathBuilder) Prepend(el ...string) { + p.els = append(p.els[:0], append(el, p.els[0:]...)...) +} + +func (p *pagePathBuilder) Sanitize() { + for i, el := range p.els { + p.els[i] = p.d.PathSpec.MakePathSanitized(el) + } +} + +var pagePathBuilderPool = &sync.Pool{ + New: func() any { + return &pagePathBuilder{} + }, +} + +func getPagePathBuilder(d TargetPathDescriptor) *pagePathBuilder { + b := pagePathBuilderPool.Get().(*pagePathBuilder) + b.d = d + return b +} + +func putPagePathBuilder(b *pagePathBuilder) { + b.els = b.els[:0] + b.fullSuffix = "" + b.baseNameSameAsType = false + b.isUgly = false + b.noSubResources = false + b.prefixLink = "" + b.prefixPath = "" + b.linkUpperOffset = 0 + pagePathBuilderPool.Put(b) } diff --git a/resources/page/page_paths_test.go b/resources/page/page_paths_test.go deleted file mode 100644 index dd6457f77..000000000 --- a/resources/page/page_paths_test.go +++ /dev/null @@ -1,295 +0,0 @@ -// Copyright 2019 The Hugo Authors. All rights reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package page_test - -import ( - "fmt" - "path/filepath" - "strings" - "testing" - - "github.com/gohugoio/hugo/media" - "github.com/gohugoio/hugo/resources/kinds" - "github.com/gohugoio/hugo/resources/page" - - "github.com/gohugoio/hugo/output" -) - -func TestPageTargetPath(t *testing.T) { - pathSpec := newTestPathSpec() - - noExtNoDelimMediaType := media.WithDelimiterAndSuffixes(media.Builtin.TextType, "", "") - noExtNoDelimMediaType.Delimiter = "" - - // Netlify style _redirects - noExtDelimFormat := output.Format{ - Name: "NER", - MediaType: noExtNoDelimMediaType, - BaseName: "_redirects", - } - - for _, langPrefixPath := range []string{"", "no"} { - for _, langPrefixLink := range []string{"", "no"} { - for _, uglyURLs := range []bool{false, true} { - - tests := []struct { - name string - d page.TargetPathDescriptor - expected page.TargetPaths - }{ - {"JSON home", page.TargetPathDescriptor{Kind: kinds.KindHome, Type: output.JSONFormat}, page.TargetPaths{TargetFilename: "/index.json", SubResourceBaseTarget: "", Link: "/index.json"}}, - {"AMP home", page.TargetPathDescriptor{Kind: kinds.KindHome, Type: output.AMPFormat}, page.TargetPaths{TargetFilename: "/amp/index.html", SubResourceBaseTarget: "/amp", Link: "/amp/"}}, - {"HTML home", page.TargetPathDescriptor{Kind: kinds.KindHome, BaseName: "_index", Type: output.HTMLFormat}, page.TargetPaths{TargetFilename: "/index.html", SubResourceBaseTarget: "", Link: "/"}}, - {"Netlify redirects", page.TargetPathDescriptor{Kind: kinds.KindHome, BaseName: "_index", Type: noExtDelimFormat}, page.TargetPaths{TargetFilename: "/_redirects", SubResourceBaseTarget: "", Link: "/_redirects"}}, - {"HTML section list", page.TargetPathDescriptor{ - Kind: kinds.KindSection, - Sections: []string{"sect1"}, - BaseName: "_index", - Type: output.HTMLFormat, - }, page.TargetPaths{TargetFilename: "/sect1/index.html", SubResourceBaseTarget: "/sect1", Link: "/sect1/"}}, - {"HTML taxonomy term", page.TargetPathDescriptor{ - Kind: kinds.KindTerm, - Sections: []string{"tags", "hugo"}, - BaseName: "_index", - Type: output.HTMLFormat, - }, page.TargetPaths{TargetFilename: "/tags/hugo/index.html", SubResourceBaseTarget: "/tags/hugo", Link: "/tags/hugo/"}}, - {"HTML taxonomy", page.TargetPathDescriptor{ - Kind: kinds.KindTaxonomy, - Sections: []string{"tags"}, - BaseName: "_index", - Type: output.HTMLFormat, - }, page.TargetPaths{TargetFilename: "/tags/index.html", SubResourceBaseTarget: "/tags", Link: "/tags/"}}, - { - "HTML page", page.TargetPathDescriptor{ - Kind: kinds.KindPage, - Dir: "/a/b", - BaseName: "mypage", - Sections: []string{"a"}, - Type: output.HTMLFormat, - }, page.TargetPaths{TargetFilename: "/a/b/mypage/index.html", SubResourceBaseTarget: "/a/b/mypage", Link: "/a/b/mypage/"}, - }, - - { - "HTML page with index as base", page.TargetPathDescriptor{ - Kind: kinds.KindPage, - Dir: "/a/b", - BaseName: "index", - Sections: []string{"a"}, - Type: output.HTMLFormat, - }, page.TargetPaths{TargetFilename: "/a/b/index.html", SubResourceBaseTarget: "/a/b", Link: "/a/b/"}, - }, - - { - "HTML page with special chars", page.TargetPathDescriptor{ - Kind: kinds.KindPage, - Dir: "/a/b", - BaseName: "My Page!", - Type: output.HTMLFormat, - }, page.TargetPaths{TargetFilename: "/a/b/my-page/index.html", SubResourceBaseTarget: "/a/b/my-page", Link: "/a/b/my-page/"}, - }, - {"RSS home", page.TargetPathDescriptor{Kind: "rss", Type: output.RSSFormat}, page.TargetPaths{TargetFilename: "/index.xml", SubResourceBaseTarget: "", Link: "/index.xml"}}, - {"RSS section list", page.TargetPathDescriptor{ - Kind: "rss", - Sections: []string{"sect1"}, - Type: output.RSSFormat, - }, page.TargetPaths{TargetFilename: "/sect1/index.xml", SubResourceBaseTarget: "/sect1", Link: "/sect1/index.xml"}}, - { - "AMP page", page.TargetPathDescriptor{ - Kind: kinds.KindPage, - Dir: "/a/b/c", - BaseName: "myamp", - Type: output.AMPFormat, - }, page.TargetPaths{TargetFilename: "/amp/a/b/c/myamp/index.html", SubResourceBaseTarget: "/amp/a/b/c/myamp", Link: "/amp/a/b/c/myamp/"}, - }, - { - "AMP page with URL with suffix", page.TargetPathDescriptor{ - Kind: kinds.KindPage, - Dir: "/sect/", - BaseName: "mypage", - URL: "/some/other/url.xhtml", - Type: output.HTMLFormat, - }, page.TargetPaths{TargetFilename: "/some/other/url.xhtml", SubResourceBaseTarget: "/some/other", Link: "/some/other/url.xhtml"}, - }, - { - "JSON page with URL without suffix", page.TargetPathDescriptor{ - Kind: kinds.KindPage, - Dir: "/sect/", - BaseName: "mypage", - URL: "/some/other/path/", - Type: output.JSONFormat, - }, page.TargetPaths{TargetFilename: "/some/other/path/index.json", SubResourceBaseTarget: "/some/other/path", Link: "/some/other/path/index.json"}, - }, - { - "JSON page with URL without suffix and no trailing slash", page.TargetPathDescriptor{ - Kind: kinds.KindPage, - Dir: "/sect/", - BaseName: "mypage", - URL: "/some/other/path", - Type: output.JSONFormat, - }, page.TargetPaths{TargetFilename: "/some/other/path/index.json", SubResourceBaseTarget: "/some/other/path", Link: "/some/other/path/index.json"}, - }, - { - "HTML page with URL without suffix and no trailing slash", page.TargetPathDescriptor{ - Kind: kinds.KindPage, - Dir: "/sect/", - BaseName: "mypage", - URL: "/some/other/path", - Type: output.HTMLFormat, - }, page.TargetPaths{TargetFilename: "/some/other/path/index.html", SubResourceBaseTarget: "/some/other/path", Link: "/some/other/path/"}, - }, - { - "HTML page with URL containing double hyphen", page.TargetPathDescriptor{ - Kind: kinds.KindPage, - Dir: "/sect/", - BaseName: "mypage", - URL: "/some/other--url/", - Type: output.HTMLFormat, - }, page.TargetPaths{TargetFilename: "/some/other--url/index.html", SubResourceBaseTarget: "/some/other--url", Link: "/some/other--url/"}, - }, - { - "HTML page with expanded permalink", page.TargetPathDescriptor{ - Kind: kinds.KindPage, - Dir: "/a/b", - BaseName: "mypage", - ExpandedPermalink: "/2017/10/my-title/", - Type: output.HTMLFormat, - }, page.TargetPaths{TargetFilename: "/2017/10/my-title/index.html", SubResourceBaseTarget: "/2017/10/my-title", Link: "/2017/10/my-title/"}, - }, - { - "Paginated HTML home", page.TargetPathDescriptor{ - Kind: kinds.KindHome, - BaseName: "_index", - Type: output.HTMLFormat, - Addends: "page/3", - }, page.TargetPaths{TargetFilename: "/page/3/index.html", SubResourceBaseTarget: "/page/3", Link: "/page/3/"}, - }, - { - "Paginated Taxonomy terms list", page.TargetPathDescriptor{ - Kind: kinds.KindTerm, - BaseName: "_index", - Sections: []string{"tags", "hugo"}, - Type: output.HTMLFormat, - Addends: "page/3", - }, page.TargetPaths{TargetFilename: "/tags/hugo/page/3/index.html", SubResourceBaseTarget: "/tags/hugo/page/3", Link: "/tags/hugo/page/3/"}, - }, - { - "Regular page with addend", page.TargetPathDescriptor{ - Kind: kinds.KindPage, - Dir: "/a/b", - BaseName: "mypage", - Addends: "c/d/e", - Type: output.HTMLFormat, - }, page.TargetPaths{TargetFilename: "/a/b/mypage/c/d/e/index.html", SubResourceBaseTarget: "/a/b/mypage/c/d/e", Link: "/a/b/mypage/c/d/e/"}, - }, - } - - for i, test := range tests { - t.Run(fmt.Sprintf("langPrefixPath=%s,langPrefixLink=%s,uglyURLs=%t,name=%s", langPrefixPath, langPrefixLink, uglyURLs, test.name), - func(t *testing.T) { - test.d.ForcePrefix = true - test.d.PathSpec = pathSpec - test.d.UglyURLs = uglyURLs - test.d.PrefixFilePath = langPrefixPath - test.d.PrefixLink = langPrefixLink - test.d.Dir = filepath.FromSlash(test.d.Dir) - isUgly := uglyURLs && !test.d.Type.NoUgly - - expected := test.expected - - // TODO(bep) simplify - if test.d.Kind == kinds.KindPage && test.d.BaseName == test.d.Type.BaseName { - } else if test.d.Kind == kinds.KindHome && test.d.Type.Path != "" { - } else if test.d.Type.MediaType.FirstSuffix.Suffix != "" && (!strings.HasPrefix(expected.TargetFilename, "/index") || test.d.Addends != "") && test.d.URL == "" && isUgly { - expected.TargetFilename = strings.Replace(expected.TargetFilename, - "/"+test.d.Type.BaseName+"."+test.d.Type.MediaType.FirstSuffix.Suffix, - "."+test.d.Type.MediaType.FirstSuffix.Suffix, 1) - expected.Link = strings.TrimSuffix(expected.Link, "/") + "." + test.d.Type.MediaType.FirstSuffix.Suffix - - } - - if test.d.PrefixFilePath != "" && !strings.HasPrefix(test.d.URL, "/"+test.d.PrefixFilePath) { - expected.TargetFilename = "/" + test.d.PrefixFilePath + expected.TargetFilename - expected.SubResourceBaseTarget = "/" + test.d.PrefixFilePath + expected.SubResourceBaseTarget - } - - if test.d.PrefixLink != "" && !strings.HasPrefix(test.d.URL, "/"+test.d.PrefixLink) { - expected.Link = "/" + test.d.PrefixLink + expected.Link - } - - expected.TargetFilename = filepath.FromSlash(expected.TargetFilename) - expected.SubResourceBaseTarget = filepath.FromSlash(expected.SubResourceBaseTarget) - - pagePath := page.CreateTargetPaths(test.d) - - if !eqTargetPaths(pagePath, expected) { - t.Fatalf("[%d] [%s] targetPath expected\n%#v, got:\n%#v", i, test.name, expected, pagePath) - } - }) - } - } - } - } -} - -func TestPageTargetPathPrefix(t *testing.T) { - pathSpec := newTestPathSpec() - tests := []struct { - name string - d page.TargetPathDescriptor - expected page.TargetPaths - }{ - { - "URL set, prefix both, no force", - page.TargetPathDescriptor{Kind: kinds.KindPage, Type: output.JSONFormat, URL: "/mydir/my.json", ForcePrefix: false, PrefixFilePath: "pf", PrefixLink: "pl"}, - page.TargetPaths{TargetFilename: "/mydir/my.json", SubResourceBaseTarget: "/mydir", SubResourceBaseLink: "/mydir", Link: "/mydir/my.json"}, - }, - { - "URL set, prefix both, force", - page.TargetPathDescriptor{Kind: kinds.KindPage, Type: output.JSONFormat, URL: "/mydir/my.json", ForcePrefix: true, PrefixFilePath: "pf", PrefixLink: "pl"}, - page.TargetPaths{TargetFilename: "/pf/mydir/my.json", SubResourceBaseTarget: "/pf/mydir", SubResourceBaseLink: "/pl/mydir", Link: "/pl/mydir/my.json"}, - }, - } - - for i, test := range tests { - t.Run(fmt.Sprintf(test.name), - func(t *testing.T) { - test.d.PathSpec = pathSpec - expected := test.expected - expected.TargetFilename = filepath.FromSlash(expected.TargetFilename) - expected.SubResourceBaseTarget = filepath.FromSlash(expected.SubResourceBaseTarget) - - pagePath := page.CreateTargetPaths(test.d) - - if pagePath != expected { - t.Fatalf("[%d] [%s] targetPath expected\n%#v, got:\n%#v", i, test.name, expected, pagePath) - } - }) - } -} - -func eqTargetPaths(p1, p2 page.TargetPaths) bool { - if p1.Link != p2.Link { - return false - } - - if p1.SubResourceBaseTarget != p2.SubResourceBaseTarget { - return false - } - - if p1.TargetFilename != p2.TargetFilename { - return false - } - - return true -} diff --git a/resources/page/pagegroup.go b/resources/page/pagegroup.go index d091c6bef..e691a112e 100644 --- a/resources/page/pagegroup.go +++ b/resources/page/pagegroup.go @@ -244,7 +244,7 @@ func (p Pages) groupByDateField(format string, sorter func(p Pages) Pages, getDa return nil, nil } - firstPage := sp[0].(Page) + firstPage := sp[0] date := getDate(firstPage) // Pages may be a mix of multiple languages, so we need to use the language @@ -258,7 +258,7 @@ func (p Pages) groupByDateField(format string, sorter func(p Pages) Pages, getDa i := 0 for _, e := range sp[1:] { - date = getDate(e.(Page)) + date = getDate(e) formatted := formatter.Format(date, format) if r[i].Key.(string) != formatted { r = append(r, PageGroup{Key: formatted}) diff --git a/resources/page/pagemeta/page_frontmatter.go b/resources/page/pagemeta/page_frontmatter.go index 98ab6b222..d804f27a7 100644 --- a/resources/page/pagemeta/page_frontmatter.go +++ b/resources/page/pagemeta/page_frontmatter.go @@ -47,9 +47,8 @@ type FrontMatterHandler struct { // FrontMatterDescriptor describes how to handle front matter for a given Page. // It has pointers to values in the receiving page which gets updated. type FrontMatterDescriptor struct { - - // This the Page's front matter. - Frontmatter map[string]any + // This is the Page's params. + Params map[string]any // This is the Page's base filename (BaseFilename), e.g. page.md., or // if page is a leaf bundle, the bundle folder name (ContentBaseName). @@ -63,9 +62,6 @@ type FrontMatterDescriptor struct { // The below are pointers to values on Page and will be modified. - // This is the Page's params. - Params map[string]any - // This is the Page's dates. Dates *resource.Dates @@ -365,7 +361,7 @@ type frontmatterFieldHandlers int func (f *frontmatterFieldHandlers) newDateFieldHandler(key string, setter func(d *FrontMatterDescriptor, t time.Time)) frontMatterFieldHandler { return func(d *FrontMatterDescriptor) (bool, error) { - v, found := d.Frontmatter[key] + v, found := d.Params[key] if !found { return false, nil @@ -396,7 +392,7 @@ func (f *frontmatterFieldHandlers) newDateFilenameHandler(setter func(d *FrontMa setter(d, date) - if _, found := d.Frontmatter["slug"]; !found { + if _, found := d.Params["slug"]; !found { // Use slug from filename d.PageURLs.Slug = slug } diff --git a/resources/page/pagemeta/page_frontmatter_test.go b/resources/page/pagemeta/page_frontmatter_test.go index f040af163..1aff8b511 100644 --- a/resources/page/pagemeta/page_frontmatter_test.go +++ b/resources/page/pagemeta/page_frontmatter_test.go @@ -29,11 +29,10 @@ import ( func newTestFd() *pagemeta.FrontMatterDescriptor { return &pagemeta.FrontMatterDescriptor{ - Frontmatter: make(map[string]any), - Params: make(map[string]any), - Dates: &resource.Dates{}, - PageURLs: &pagemeta.URLPath{}, - Location: time.UTC, + Params: make(map[string]any), + Dates: &resource.Dates{}, + PageURLs: &pagemeta.URLPath{}, + Location: time.UTC, } } @@ -106,13 +105,13 @@ func TestFrontMatterDatesHandlers(t *testing.T) { case ":git": d.GitAuthorDate = d1 } - d.Frontmatter["date"] = d2 + d.Params["date"] = d2 c.Assert(handler.HandleDates(d), qt.IsNil) c.Assert(d.Dates.FDate, qt.Equals, d1) c.Assert(d.Params["date"], qt.Equals, d2) d = newTestFd() - d.Frontmatter["date"] = d2 + d.Params["date"] = d2 c.Assert(handler.HandleDates(d), qt.IsNil) c.Assert(d.Dates.FDate, qt.Equals, d2) c.Assert(d.Params["date"], qt.Equals, d2) @@ -120,54 +119,6 @@ func TestFrontMatterDatesHandlers(t *testing.T) { } } -func TestFrontMatterDatesCustomConfig(t *testing.T) { - t.Parallel() - - c := qt.New(t) - - cfg := config.New() - cfg.Set("frontmatter", map[string]any{ - "date": []string{"mydate"}, - "lastmod": []string{"publishdate"}, - "publishdate": []string{"publishdate"}, - }) - - conf := testconfig.GetTestConfig(nil, cfg) - handler, err := pagemeta.NewFrontmatterHandler(nil, conf.GetConfigSection("frontmatter").(pagemeta.FrontmatterConfig)) - c.Assert(err, qt.IsNil) - - testDate, err := time.Parse("2006-01-02", "2018-02-01") - c.Assert(err, qt.IsNil) - - d := newTestFd() - d.Frontmatter["mydate"] = testDate - testDate = testDate.Add(24 * time.Hour) - d.Frontmatter["date"] = testDate - testDate = testDate.Add(24 * time.Hour) - d.Frontmatter["lastmod"] = testDate - testDate = testDate.Add(24 * time.Hour) - d.Frontmatter["publishdate"] = testDate - testDate = testDate.Add(24 * time.Hour) - d.Frontmatter["expirydate"] = testDate - - c.Assert(handler.HandleDates(d), qt.IsNil) - - c.Assert(d.Dates.FDate.Day(), qt.Equals, 1) - c.Assert(d.Dates.FLastmod.Day(), qt.Equals, 4) - c.Assert(d.Dates.FPublishDate.Day(), qt.Equals, 4) - c.Assert(d.Dates.FExpiryDate.Day(), qt.Equals, 5) - - c.Assert(d.Params["date"], qt.Equals, d.Dates.FDate) - c.Assert(d.Params["mydate"], qt.Equals, d.Dates.FDate) - c.Assert(d.Params["publishdate"], qt.Equals, d.Dates.FPublishDate) - c.Assert(d.Params["expirydate"], qt.Equals, d.Dates.FExpiryDate) - - c.Assert(handler.IsDateKey("date"), qt.Equals, false) // This looks odd, but is configured like this. - c.Assert(handler.IsDateKey("mydate"), qt.Equals, true) - c.Assert(handler.IsDateKey("publishdate"), qt.Equals, true) - c.Assert(handler.IsDateKey("pubdate"), qt.Equals, true) -} - func TestFrontMatterDatesDefaultKeyword(t *testing.T) { t.Parallel() @@ -186,10 +137,10 @@ func TestFrontMatterDatesDefaultKeyword(t *testing.T) { testDate, _ := time.Parse("2006-01-02", "2018-02-01") d := newTestFd() - d.Frontmatter["mydate"] = testDate - d.Frontmatter["date"] = testDate.Add(1 * 24 * time.Hour) - d.Frontmatter["mypubdate"] = testDate.Add(2 * 24 * time.Hour) - d.Frontmatter["publishdate"] = testDate.Add(3 * 24 * time.Hour) + d.Params["mydate"] = testDate + d.Params["date"] = testDate.Add(1 * 24 * time.Hour) + d.Params["mypubdate"] = testDate.Add(2 * 24 * time.Hour) + d.Params["publishdate"] = testDate.Add(3 * 24 * time.Hour) c.Assert(handler.HandleDates(d), qt.IsNil) diff --git a/resources/page/pages.go b/resources/page/pages.go index 77e56a062..088abb9ac 100644 --- a/resources/page/pages.go +++ b/resources/page/pages.go @@ -66,9 +66,7 @@ func ToPages(seq any) (Pages, error) { return v.Pages, nil case []Page: pages := make(Pages, len(v)) - for i, vv := range v { - pages[i] = vv - } + copy(pages, v) return pages, nil case []any: pages := make(Pages, len(v)) diff --git a/resources/page/pages_related.go b/resources/page/pages_related.go index 217aced47..3322a4fbf 100644 --- a/resources/page/pages_related.go +++ b/resources/page/pages_related.go @@ -35,7 +35,6 @@ var ( // A PageGenealogist finds related pages in a page collection. This interface is implemented // by Pages and PageGroup, which makes it available as `{{ .RegularRelated . }}` etc. type PageGenealogist interface { - // Template example: // {{ $related := .RegularPages.Related . }} Related(ctx context.Context, opts any) (Pages, error) @@ -76,7 +75,6 @@ func (p Pages) Related(ctx context.Context, optsv any) (Pages, error) { } return result, nil - } // RelatedIndices searches the given indices with the search keywords from the @@ -186,6 +184,7 @@ func (s *RelatedDocsHandler) getIndex(p Pages) *related.InvertedIndex { } return nil } + func (s *RelatedDocsHandler) getOrCreateIndex(ctx context.Context, p Pages) (*related.InvertedIndex, error) { s.mu.RLock() cachedIndex := s.getIndex(p) diff --git a/resources/page/pages_sort.go b/resources/page/pages_sort.go index 32b1b3895..3f4875702 100644 --- a/resources/page/pages_sort.go +++ b/resources/page/pages_sort.go @@ -54,6 +54,19 @@ func getOrdinals(p1, p2 Page) (int, int) { return p1o.Ordinal(), p2o.Ordinal() } +func getWeight0s(p1, p2 Page) (int, int) { + p1w, ok1 := p1.(resource.Weight0Provider) + if !ok1 { + return -1, -1 + } + p2w, ok2 := p2.(resource.Weight0Provider) + if !ok2 { + return -1, -1 + } + + return p1w.Weight0(), p2w.Weight0() +} + // Sort stable sorts the pages given the receiver's sort order. func (by pageBy) Sort(pages Pages) { ps := &pageSorter{ @@ -72,12 +85,17 @@ var ( if o1 != o2 && o1 != -1 && o2 != -1 { return o1 < o2 } + // Weight0, as by the weight of the taxonomy entrie in the front matter. + w01, w02 := getWeight0s(p1, p2) + if w01 != w02 && w01 != -1 && w02 != -1 { + return w01 < w02 + } if p1.Weight() == p2.Weight() { if p1.Date().Unix() == p2.Date().Unix() { c := collatorStringCompare(func(p Page) string { return p.LinkTitle() }, p1, p2) if c == 0 { - if p1.File().IsZero() || p2.File().IsZero() { - return p1.File().IsZero() + if p1.File() == nil || p2.File() == nil { + return p1.File() == nil } return compare.LessStrings(p1.File().Filename(), p2.File().Filename()) } @@ -102,7 +120,7 @@ var ( if p1.Date().Unix() == p2.Date().Unix() { c := compare.Strings(p1.LinkTitle(), p2.LinkTitle()) if c == 0 { - if !p1.File().IsZero() && !p2.File().IsZero() { + if p1.File() != nil && p2.File() != nil { return compare.LessStrings(p1.File().Filename(), p2.File().Filename()) } } @@ -192,7 +210,6 @@ var collatorStringLess = func(p Page) (less func(s1, s2 string) bool, close func func() { coll.Unlock() } - } // ByWeight sorts the Pages by weight and returns a copy. @@ -406,7 +423,6 @@ func (p Pages) ByParam(paramsKey any) Pages { s2 := cast.ToString(v2) return stringLess(s1, s2) - } pages, _ := spc.get(key, pageBy(paramsKeyComparator).Sort, p) diff --git a/resources/page/pages_sort_test.go b/resources/page/pages_sort_test.go index 728237230..12fa4a1e1 100644 --- a/resources/page/pages_sort_test.go +++ b/resources/page/pages_sort_test.go @@ -109,7 +109,6 @@ func TestSortByN(t *testing.T) { byLen := func(p Pages) Pages { return p.ByLength(ctx) - } for i, this := range []struct { @@ -273,7 +272,7 @@ func createSortTestPages(num int) Pages { for i := 0; i < num; i++ { p := newTestPage() p.path = fmt.Sprintf("/x/y/p%d.md", i) - p.title = fmt.Sprintf("Title %d", i%(num+1/2)) + p.title = fmt.Sprintf("Title %d", i%((num+1)/2)) p.params = map[string]any{ "arbitrarily": map[string]any{ "nested": ("xyz" + fmt.Sprintf("%v", 100-i)), diff --git a/resources/page/permalinks.go b/resources/page/permalinks.go index 4577f5240..1677d3a90 100644 --- a/resources/page/permalinks.go +++ b/resources/page/permalinks.go @@ -120,12 +120,18 @@ func (l PermalinkExpander) Expand(key string, p Page) (string, error) { return expand(p) } +// Allow " " and / to represent the root section. +var sectionCutSet = " /" + +func init() { + if string(os.PathSeparator) != "/" { + sectionCutSet += string(os.PathSeparator) + } +} + func (l PermalinkExpander) parse(patterns map[string]string) (map[string]func(Page) (string, error), error) { expanders := make(map[string]func(Page) (string, error)) - // Allow " " and / to represent the root section. - const sectionCutSet = " /" + string(os.PathSeparator) - for k, pattern := range patterns { k = strings.Trim(k, sectionCutSet) @@ -295,7 +301,7 @@ func (l PermalinkExpander) pageToPermalinkSections(p Page, _ string) (string, er } func (l PermalinkExpander) translationBaseName(p Page) string { - if p.File().IsZero() { + if p.File() == nil { return "" } return p.File().TranslationBaseName() diff --git a/resources/page/permalinks_integration_test.go b/resources/page/permalinks_integration_test.go index 6c2411ad7..9a76ac602 100644 --- a/resources/page/permalinks_integration_test.go +++ b/resources/page/permalinks_integration_test.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -102,7 +102,6 @@ slug: "mytagslug" "taxonomy": {"tags": "/tagsslug/:slug/"}, "term": {"tags": "/tagsslug/tag/:slug/"}, }) - } func TestPermalinksOldSetup(t *testing.T) { @@ -145,7 +144,6 @@ slug: "p1slugvalue" "taxonomy": {}, "term": {"withpageslug": "/pageslug/:slug/"}, }) - } func TestPermalinksNestedSections(t *testing.T) { @@ -194,5 +192,4 @@ List. b.AssertFileContent("public/libros/index.html", "List.") b.AssertFileContent("public/libros/fiction/index.html", "List.") b.AssertFileContent("public/libros/fiction/2023/book1/index.html", "Single.") - } diff --git a/resources/page/permalinks_test.go b/resources/page/permalinks_test.go index 194387d5c..a3a45bb88 100644 --- a/resources/page/permalinks_test.go +++ b/resources/page/permalinks_test.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -202,7 +202,6 @@ func TestPermalinkExpansionSliceSyntax(t *testing.T) { c.Assert(fn1("[:last]"), qt.DeepEquals, []string{}) c.Assert(fn1("[1:last]"), qt.DeepEquals, []string{}) c.Assert(fn1("[1]"), qt.DeepEquals, []string{}) - }) c.Run("Out of bounds", func(c *qt.C) { @@ -218,9 +217,7 @@ func TestPermalinkExpansionSliceSyntax(t *testing.T) { c.Assert(fn4("[]"), qt.IsNil) c.Assert(fn4("[1:}"), qt.IsNil) c.Assert(fn4("foo"), qt.IsNil) - }) - } func BenchmarkPermalinkExpand(b *testing.B) { diff --git a/resources/page/site.go b/resources/page/site.go index 0480ce674..9ef76505d 100644 --- a/resources/page/site.go +++ b/resources/page/site.go @@ -21,7 +21,6 @@ import ( "github.com/gohugoio/hugo/config/privacy" "github.com/gohugoio/hugo/config/services" "github.com/gohugoio/hugo/identity" - "github.com/gohugoio/hugo/tpl" "github.com/gohugoio/hugo/config" @@ -88,8 +87,12 @@ type Site interface { Taxonomies() TaxonomyList // Returns the last modification date of the content. + // Deprecated: Use .Lastmod instead. LastChange() time.Time + // Returns the last modification date of the content. + Lastmod() time.Time + // Returns the Menus for this site. Menus() navigation.Menus @@ -108,10 +111,6 @@ type Site interface { // Returns the site config. Config() SiteConfig - // Returns the identity of this site. - // This is for internal use only. - GetIdentity() identity.Identity - // Author is deprecated and will be removed in a future release. Author() map[string]interface{} @@ -127,9 +126,6 @@ type Site interface { // Deprecated: Use Config().Privacy.Disqus instead. DisqusShortname() string - // For internal use only. - GetPageWithTemplateInfo(info tpl.Info, ref ...string) (Page, error) - // BuildDrafts is deprecated and will be removed in a future release. BuildDrafts() bool @@ -154,6 +150,9 @@ func (s Sites) First() Site { return s[0] } +// Some additional interfaces implemented by siteWrapper that's not on Site. +var _ identity.ForEeachIdentityByNameProvider = (*siteWrapper)(nil) + type siteWrapper struct { s Site } @@ -165,6 +164,10 @@ func WrapSite(s Site) Site { return &siteWrapper{s: s} } +func (s *siteWrapper) Key() string { + return s.s.Language().Lang +} + func (s *siteWrapper) Social() map[string]string { return s.s.Social() } @@ -260,7 +263,11 @@ func (s *siteWrapper) Taxonomies() TaxonomyList { } func (s *siteWrapper) LastChange() time.Time { - return s.s.LastChange() + return s.s.Lastmod() +} + +func (s *siteWrapper) Lastmod() time.Time { + return s.s.Lastmod() } func (s *siteWrapper) Menus() navigation.Menus { @@ -283,14 +290,6 @@ func (s *siteWrapper) Data() map[string]any { return s.s.Data() } -func (s *siteWrapper) GetIdentity() identity.Identity { - return s.s.GetIdentity() -} - -func (s *siteWrapper) GetPageWithTemplateInfo(info tpl.Info, ref ...string) (Page, error) { - return s.s.GetPageWithTemplateInfo(info, ref...) -} - func (s *siteWrapper) BuildDrafts() bool { return s.s.BuildDrafts() } @@ -312,6 +311,11 @@ func (s *siteWrapper) RSSLink() template.URL { return s.s.RSSLink() } +// For internal use only. +func (s *siteWrapper) ForEeachIdentityByName(name string, f func(identity.Identity) bool) { + s.s.(identity.ForEeachIdentityByNameProvider).ForEeachIdentityByName(name, f) +} + type testSite struct { h hugo.HugoInfo l *langs.Language @@ -341,6 +345,10 @@ func (testSite) LastChange() (t time.Time) { return } +func (testSite) Lastmod() (t time.Time) { + return +} + func (t testSite) Title() string { return "foo" } @@ -386,10 +394,6 @@ func (t testSite) MainSections() []string { return nil } -func (t testSite) GetIdentity() identity.Identity { - return identity.KeyValueIdentity{Key: "site", Value: t.l.Lang} -} - // Deprecated: use hugo.IsServer instead func (t testSite) IsServer() bool { return false @@ -439,10 +443,6 @@ func (s testSite) Config() SiteConfig { return SiteConfig{} } -func (testSite) GetPageWithTemplateInfo(info tpl.Info, ref ...string) (Page, error) { - return nil, nil -} - // Deprecated: Use .Site.Config.Services.Disqus.Shortname instead func (testSite) DisqusShortname() string { return "" diff --git a/resources/page/siteidentities/identities.go b/resources/page/siteidentities/identities.go new file mode 100644 index 000000000..8481999cf --- /dev/null +++ b/resources/page/siteidentities/identities.go @@ -0,0 +1,34 @@ +// Copyright 2024 The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package siteidentities + +import ( + "github.com/gohugoio/hugo/identity" +) + +const ( + // Identifies site.Data. + // The change detection in /data is currently very coarse grained. + Data = identity.StringIdentity("site.Data") +) + +// FromString returns the identity from the given string, +// or identity.Anonymous if not found. +func FromString(name string) (identity.Identity, bool) { + switch name { + case "Data": + return Data, true + } + return identity.Anonymous, false +} diff --git a/resources/page/taxonomy.go b/resources/page/taxonomy.go index 3aa0c7a7b..66c9e6fae 100644 --- a/resources/page/taxonomy.go +++ b/resources/page/taxonomy.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/resources/page/testhelpers_page_test.go b/resources/page/testhelpers_page_test.go deleted file mode 100644 index 95124cb58..000000000 --- a/resources/page/testhelpers_page_test.go +++ /dev/null @@ -1,38 +0,0 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package page_test - -import ( - "github.com/gohugoio/hugo/common/loggers" - "github.com/gohugoio/hugo/config" - "github.com/gohugoio/hugo/config/testconfig" - "github.com/gohugoio/hugo/helpers" - "github.com/gohugoio/hugo/hugofs" - "github.com/spf13/afero" -) - -func newTestPathSpec() *helpers.PathSpec { - return newTestPathSpecFor(config.New()) -} - -func newTestPathSpecFor(cfg config.Provider) *helpers.PathSpec { - mfs := afero.NewMemMapFs() - conf := testconfig.GetTestConfig(mfs, cfg) - fs := hugofs.NewFrom(mfs, conf.BaseConfig()) - ps, err := helpers.NewPathSpec(fs, conf, loggers.NewDefault()) - if err != nil { - panic(err) - } - return ps -} diff --git a/resources/page/testhelpers_test.go b/resources/page/testhelpers_test.go index ca2c4ff53..e80ed422d 100644 --- a/resources/page/testhelpers_test.go +++ b/resources/page/testhelpers_test.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -21,10 +21,7 @@ import ( "path/filepath" "time" - "github.com/gohugoio/hugo/hugofs/files" - "github.com/gohugoio/hugo/identity" "github.com/gohugoio/hugo/markup/tableofcontents" - "github.com/gohugoio/hugo/tpl" "github.com/gohugoio/hugo/resources/resource" @@ -32,6 +29,7 @@ import ( "github.com/gohugoio/hugo/common/hugo" "github.com/gohugoio/hugo/common/maps" + "github.com/gohugoio/hugo/common/paths" "github.com/gohugoio/hugo/config" "github.com/gohugoio/hugo/hugofs" "github.com/gohugoio/hugo/langs" @@ -54,7 +52,7 @@ func newTestPage() *testPage { func newTestPageWithFile(filename string) *testPage { filename = filepath.FromSlash(filename) - file := source.NewTestFile(filename) + file := source.NewFileInfoFrom(filename, filename) l, err := langs.NewLanguage( "en", @@ -107,7 +105,7 @@ type testPage struct { params map[string]any data map[string]any - file source.File + file *source.File currentSection *testPage sectionEntries []string @@ -141,7 +139,7 @@ func (p *testPage) BaseFileName() string { panic("testpage: not implemented") } -func (p *testPage) BundleType() files.ContentClass { +func (p *testPage) BundleType() string { panic("testpage: not implemented") } @@ -201,7 +199,7 @@ func (p *testPage) Extension() string { panic("testpage: not implemented") } -func (p *testPage) File() source.File { +func (p *testPage) File() *source.File { return p.file } @@ -225,10 +223,6 @@ func (p *testPage) GetPage(ref string) (Page, error) { panic("testpage: not implemented") } -func (p *testPage) GetPageWithTemplateInfo(info tpl.Info, ref string) (Page, error) { - panic("testpage: not implemented") -} - func (p *testPage) GetParam(key string) any { panic("testpage: not implemented") } @@ -261,15 +255,15 @@ func (p *testPage) Hugo() hugo.HugoInfo { panic("testpage: not implemented") } -func (p *testPage) InSection(other any) (bool, error) { +func (p *testPage) InSection(other any) bool { panic("testpage: not implemented") } -func (p *testPage) IsAncestor(other any) (bool, error) { +func (p *testPage) IsAncestor(other any) bool { panic("testpage: not implemented") } -func (p *testPage) IsDescendant(other any) (bool, error) { +func (p *testPage) IsDescendant(other any) bool { panic("testpage: not implemented") } @@ -301,6 +295,10 @@ func (p *testPage) IsTranslated() bool { panic("testpage: not implemented") } +func (p *testPage) Ancestors() Pages { + panic("testpage: not implemented") +} + func (p *testPage) Keywords() []string { return nil } @@ -415,16 +413,12 @@ func (p *testPage) Parent() Page { panic("testpage: not implemented") } -func (p *testPage) Ancestors() Pages { - panic("testpage: not implemented") -} - func (p *testPage) Path() string { return p.path } -func (p *testPage) Pathc() string { - return p.path +func (p *testPage) PathInfo() *paths.Path { + panic("testpage: not implemented") } func (p *testPage) Permalink() string { @@ -604,10 +598,6 @@ func (p *testPage) WordCount(context.Context) int { panic("testpage: not implemented") } -func (p *testPage) GetIdentity() identity.Identity { - panic("testpage: not implemented") -} - func createTestPages(num int) Pages { pages := make(Pages, num) diff --git a/resources/page/zero_file.autogen.go b/resources/page/zero_file.autogen.go index 72d98998e..4b7c034a1 100644 --- a/resources/page/zero_file.autogen.go +++ b/resources/page/zero_file.autogen.go @@ -14,75 +14,3 @@ // This file is autogenerated. package page - -import ( - "github.com/gohugoio/hugo/common/loggers" - "github.com/gohugoio/hugo/hugofs" - "github.com/gohugoio/hugo/source" -) - -// ZeroFile represents a zero value of source.File with warnings if invoked. -type zeroFile struct { - log loggers.Logger -} - -func NewZeroFile(log loggers.Logger) source.File { - return zeroFile{log: log} -} - -func (zeroFile) IsZero() bool { - return true -} - -func (z zeroFile) Path() (o0 string) { - z.log.Warnln(".File.Path on zero object. Wrap it in if or with: {{ with .File }}{{ .Path }}{{ end }}") - return -} -func (z zeroFile) Section() (o0 string) { - z.log.Warnln(".File.Section on zero object. Wrap it in if or with: {{ with .File }}{{ .Section }}{{ end }}") - return -} -func (z zeroFile) Lang() (o0 string) { - z.log.Warnln(".File.Lang on zero object. Wrap it in if or with: {{ with .File }}{{ .Lang }}{{ end }}") - return -} -func (z zeroFile) Filename() (o0 string) { - z.log.Warnln(".File.Filename on zero object. Wrap it in if or with: {{ with .File }}{{ .Filename }}{{ end }}") - return -} -func (z zeroFile) Dir() (o0 string) { - z.log.Warnln(".File.Dir on zero object. Wrap it in if or with: {{ with .File }}{{ .Dir }}{{ end }}") - return -} -func (z zeroFile) Extension() (o0 string) { - z.log.Warnln(".File.Extension on zero object. Wrap it in if or with: {{ with .File }}{{ .Extension }}{{ end }}") - return -} -func (z zeroFile) Ext() (o0 string) { - z.log.Warnln(".File.Ext on zero object. Wrap it in if or with: {{ with .File }}{{ .Ext }}{{ end }}") - return -} -func (z zeroFile) LogicalName() (o0 string) { - z.log.Warnln(".File.LogicalName on zero object. Wrap it in if or with: {{ with .File }}{{ .LogicalName }}{{ end }}") - return -} -func (z zeroFile) BaseFileName() (o0 string) { - z.log.Warnln(".File.BaseFileName on zero object. Wrap it in if or with: {{ with .File }}{{ .BaseFileName }}{{ end }}") - return -} -func (z zeroFile) TranslationBaseName() (o0 string) { - z.log.Warnln(".File.TranslationBaseName on zero object. Wrap it in if or with: {{ with .File }}{{ .TranslationBaseName }}{{ end }}") - return -} -func (z zeroFile) ContentBaseName() (o0 string) { - z.log.Warnln(".File.ContentBaseName on zero object. Wrap it in if or with: {{ with .File }}{{ .ContentBaseName }}{{ end }}") - return -} -func (z zeroFile) UniqueID() (o0 string) { - z.log.Warnln(".File.UniqueID on zero object. Wrap it in if or with: {{ with .File }}{{ .UniqueID }}{{ end }}") - return -} -func (z zeroFile) FileInfo() (o0 hugofs.FileMetaInfo) { - z.log.Warnln(".File.FileInfo on zero object. Wrap it in if or with: {{ with .File }}{{ .FileInfo }}{{ end }}") - return -} diff --git a/resources/postpub/postpub.go b/resources/postpub/postpub.go index 93b5c2638..65e32145c 100644 --- a/resources/postpub/postpub.go +++ b/resources/postpub/postpub.go @@ -31,7 +31,7 @@ import ( type PostPublishedResource interface { resource.ResourceTypeProvider resource.ResourceLinksProvider - resource.ResourceMetaProvider + resource.ResourceNameTitleProvider resource.ResourceParamsProvider resource.ResourceDataProvider resource.OriginProvider diff --git a/resources/resource.go b/resources/resource.go index b7e6b65a8..e78dd12cb 100644 --- a/resources/resource.go +++ b/resources/resource.go @@ -1,4 +1,4 @@ -// Copyright 2022 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -15,68 +15,55 @@ package resources import ( "context" + "errors" "fmt" "io" - "os" - "path" - "path/filepath" + "mime" "strings" "sync" + "sync/atomic" + "github.com/gohugoio/hugo/identity" "github.com/gohugoio/hugo/resources/internal" "github.com/gohugoio/hugo/common/herrors" - - "github.com/gohugoio/hugo/hugofs" + "github.com/gohugoio/hugo/common/paths" "github.com/gohugoio/hugo/media" - "github.com/gohugoio/hugo/source" - - "errors" "github.com/gohugoio/hugo/common/hugio" "github.com/gohugoio/hugo/common/maps" - "github.com/gohugoio/hugo/resources/page" "github.com/gohugoio/hugo/resources/resource" - "github.com/spf13/afero" "github.com/gohugoio/hugo/helpers" ) var ( - _ resource.ContentResource = (*genericResource)(nil) - _ resource.ReadSeekCloserResource = (*genericResource)(nil) - _ resource.Resource = (*genericResource)(nil) - _ resource.Source = (*genericResource)(nil) - _ resource.Cloner = (*genericResource)(nil) - _ resource.ResourcesLanguageMerger = (*resource.Resources)(nil) - _ permalinker = (*genericResource)(nil) - _ resource.Identifier = (*genericResource)(nil) - _ fileInfo = (*genericResource)(nil) + _ resource.ContentResource = (*genericResource)(nil) + _ resource.ReadSeekCloserResource = (*genericResource)(nil) + _ resource.Resource = (*genericResource)(nil) + _ resource.Source = (*genericResource)(nil) + _ resource.Cloner = (*genericResource)(nil) + _ resource.ResourcesLanguageMerger = (*resource.Resources)(nil) + _ resource.Identifier = (*genericResource)(nil) + _ identity.IdentityGroupProvider = (*genericResource)(nil) + _ identity.DependencyManagerProvider = (*genericResource)(nil) + _ identity.Identity = (*genericResource)(nil) + _ fileInfo = (*genericResource)(nil) ) type ResourceSourceDescriptor struct { - // TargetPaths is a callback to fetch paths's relative to its owner. - TargetPaths func() page.TargetPaths + // The source content. + OpenReadSeekCloser hugio.OpenReadSeekCloser - // Need one of these to load the resource content. - SourceFile source.File - OpenReadSeekCloser resource.OpenReadSeekCloser + // The canonical source path. + Path *paths.Path - FileInfo os.FileInfo + // The name of the resource. + Name string - // If OpenReadSeekerCloser is not set, we use this to open the file. - SourceFilename string - - Fs afero.Fs - - Data map[string]any - - // Set when its known up front, else it's resolved from the target filename. - MediaType media.Type - - // The relative target filename without any language code. - RelTargetFilename string + // The name of the resource as it was read from the source. + NameOriginal string // Any base paths prepended to the target path. This will also typically be the // language code, but setting it here means that it should not have any effect on @@ -85,15 +72,109 @@ type ResourceSourceDescriptor struct { // multiple targets. TargetBasePaths []string + TargetPath string + BasePathRelPermalink string + BasePathTargetPath string + + // The Data to associate with this resource. + Data map[string]any + // Delay publishing until either Permalink or RelPermalink is called. Maybe never. LazyPublish bool + + // Set when its known up front, else it's resolved from the target filename. + MediaType media.Type + + // Used to track depenencies (e.g. imports). May be nil if that's of no concern. + DependencyManager identity.Manager + + // A shared identity for this resource and all its clones. + // If this is not set, an Identity is created. + GroupIdentity identity.Identity } -func (r ResourceSourceDescriptor) Filename() string { - if r.SourceFile != nil { - return r.SourceFile.Filename() +func (fd *ResourceSourceDescriptor) init(r *Spec) error { + if len(fd.TargetBasePaths) == 0 { + // If not set, we publish the same resource to all hosts. + fd.TargetBasePaths = r.MultihostTargetBasePaths } - return r.SourceFilename + + if fd.OpenReadSeekCloser == nil { + panic(errors.New("OpenReadSeekCloser is nil")) + } + + if fd.TargetPath == "" { + panic(errors.New("RelPath is empty")) + } + + if fd.Path == nil { + fd.Path = paths.Parse("", fd.TargetPath) + } + + if fd.TargetPath == "" { + fd.TargetPath = fd.Path.Path() + } else { + fd.TargetPath = paths.ToSlashPreserveLeading(fd.TargetPath) + } + + fd.BasePathRelPermalink = paths.ToSlashPreserveLeading(fd.BasePathRelPermalink) + if fd.BasePathRelPermalink == "/" { + fd.BasePathRelPermalink = "" + } + fd.BasePathTargetPath = paths.ToSlashPreserveLeading(fd.BasePathTargetPath) + if fd.BasePathTargetPath == "/" { + fd.BasePathTargetPath = "" + } + + fd.TargetPath = paths.ToSlashPreserveLeading(fd.TargetPath) + for i, base := range fd.TargetBasePaths { + dir := paths.ToSlashPreserveLeading(base) + if dir == "/" { + dir = "" + } + fd.TargetBasePaths[i] = dir + } + + if fd.Name == "" { + fd.Name = fd.TargetPath + } + + if fd.NameOriginal == "" { + fd.NameOriginal = fd.Name + } + + mediaType := fd.MediaType + if mediaType.IsZero() { + ext := fd.Path.Ext() + var ( + found bool + suffixInfo media.SuffixInfo + ) + mediaType, suffixInfo, found = r.MediaTypes().GetFirstBySuffix(ext) + // TODO(bep) we need to handle these ambiguous types better, but in this context + // we most likely want the application/xml type. + if suffixInfo.Suffix == "xml" && mediaType.SubType == "rss" { + mediaType, found = r.MediaTypes().GetByType("application/xml") + } + + if !found { + // A fallback. Note that mime.TypeByExtension is slow by Hugo standards, + // so we should configure media types to avoid this lookup for most + // situations. + mimeStr := mime.TypeByExtension("." + ext) + if mimeStr != "" { + mediaType, _ = media.FromStringAndExt(mimeStr, ext) + } + } + } + + fd.MediaType = mediaType + + if fd.DependencyManager == nil { + fd.DependencyManager = identity.NopManager + } + + return nil } type ResourceTransformer interface { @@ -147,23 +228,25 @@ type baseResourceResource interface { type baseResourceInternal interface { resource.Source + resource.NameOriginalProvider fileInfo - metaAssigner + mediaTypeAssigner targetPather ReadSeekCloser() (hugio.ReadSeekCloser, error) + identity.IdentityGroupProvider + identity.DependencyManagerProvider + // For internal use. cloneWithUpdates(*transformationUpdate) (baseResource, error) tryTransformedFileCache(key string, u *transformationUpdate) io.ReadCloser - specProvider - getResourcePaths() *resourcePathDescriptor - getTargetFilenames() []string - openPublishFileForWriting(relTargetPath string) (io.WriteCloser, error) + getResourcePaths() internal.ResourcePaths - relTargetPathForRel(rel string, addBaseTargetPath, isAbs, isURL bool) string + specProvider + openPublishFileForWriting(relTargetPath string) (io.WriteCloser, error) } type specProvider interface { @@ -173,10 +256,10 @@ type specProvider interface { type baseResource interface { baseResourceResource baseResourceInternal + resource.Staler } -type commonResource struct { -} +type commonResource struct{} // Slice is for internal use. // for the template functions. See collections.Slice. @@ -201,60 +284,131 @@ func (commonResource) Slice(in any) (any, error) { } } -type dirFile struct { - // This is the directory component with Unix-style slashes. - dir string - // This is the file component. - file string -} - -func (d dirFile) path() string { - return path.Join(d.dir, d.file) -} - type fileInfo interface { - getSourceFilename() string - setSourceFilename(string) + setOpenSource(hugio.OpenReadSeekCloser) setSourceFilenameIsHash(bool) - setSourceFs(afero.Fs) - getFileInfo() hugofs.FileMetaInfo - hash() (string, error) - size() int + setTargetPath(internal.ResourcePaths) + size() int64 + hashProvider +} + +type hashProvider interface { + hash() string +} + +type StaleValue[V any] struct { + // The value. + Value V + + // IsStaleFunc reports whether the value is stale. + IsStaleFunc func() bool +} + +func (s *StaleValue[V]) IsStale() bool { + return s.IsStaleFunc() +} + +type AtomicStaler struct { + stale uint32 +} + +func (s *AtomicStaler) MarkStale() { + atomic.StoreUint32(&s.stale, 1) +} + +func (s *AtomicStaler) IsStale() bool { + return atomic.LoadUint32(&(s.stale)) > 0 +} + +// For internal use. +type GenericResourceTestInfo struct { + Paths internal.ResourcePaths +} + +// For internal use. +func GetTestInfoForResource(r resource.Resource) GenericResourceTestInfo { + var gr *genericResource + switch v := r.(type) { + case *genericResource: + gr = v + case *resourceAdapter: + gr = v.target.(*genericResource) + default: + panic(fmt.Sprintf("unknown resource type: %T", r)) + } + return GenericResourceTestInfo{ + Paths: gr.paths, + } } // genericResource represents a generic linkable resource. type genericResource struct { - *resourcePathDescriptor - *resourceFileInfo *resourceContent - spec *Spec + sd ResourceSourceDescriptor + paths internal.ResourcePaths + + sourceFilenameIsHash bool + + h *resourceHash // A hash of the source content. Is only calculated in caching situations. + + resource.Staler title string name string params map[string]any - data map[string]any - resourceType string - mediaType media.Type + spec *Spec +} + +func (l *genericResource) IdentifierBase() string { + return l.sd.Path.IdentifierBase() +} + +func (l *genericResource) GetIdentityGroup() identity.Identity { + return l.sd.GroupIdentity +} + +func (l *genericResource) GetDependencyManager() identity.Manager { + return l.sd.DependencyManager +} + +func (l *genericResource) ReadSeekCloser() (hugio.ReadSeekCloser, error) { + return l.sd.OpenReadSeekCloser() } func (l *genericResource) Clone() resource.Resource { return l.clone() } +func (l *genericResource) size() int64 { + l.hash() + return l.h.size +} + +func (l *genericResource) hash() string { + if err := l.h.init(l); err != nil { + panic(err) + } + return l.h.value +} + +func (l *genericResource) setOpenSource(openSource hugio.OpenReadSeekCloser) { + l.sd.OpenReadSeekCloser = openSource +} + +func (l *genericResource) setSourceFilenameIsHash(b bool) { + l.sourceFilenameIsHash = b +} + +func (l *genericResource) setTargetPath(d internal.ResourcePaths) { + l.paths = d +} + func (l *genericResource) cloneTo(targetPath string) resource.Resource { c := l.clone() - - targetPath = helpers.ToSlashTrimLeading(targetPath) - dir, file := path.Split(targetPath) - - c.resourcePathDescriptor = &resourcePathDescriptor{ - relTargetDirFile: dirFile{dir: dir, file: file}, - } - + c.paths = c.paths.FromTargetPath(targetPath) return c - } func (l *genericResource) Content(context.Context) (any, error) { @@ -270,41 +424,50 @@ func (r *genericResource) Err() resource.ResourceError { } func (l *genericResource) Data() any { - return l.data + return l.sd.Data } func (l *genericResource) Key() string { - basePath := l.spec.Cfg.BaseURL().BasePath + basePath := l.spec.Cfg.BaseURL().BasePathNoTrailingSlash + var key string if basePath == "" { - return l.RelPermalink() + key = l.RelPermalink() + } else { + key = strings.TrimPrefix(l.RelPermalink(), basePath) } - return strings.TrimPrefix(l.RelPermalink(), basePath) + + if l.spec.Cfg.IsMultihost() { + key = l.spec.Lang() + key + } + + return key } func (l *genericResource) MediaType() media.Type { - return l.mediaType + return l.sd.MediaType } func (l *genericResource) setMediaType(mediaType media.Type) { - l.mediaType = mediaType + l.sd.MediaType = mediaType } func (l *genericResource) Name() string { return l.name } -func (l *genericResource) Params() maps.Params { - return l.params +func (l *genericResource) NameOriginal() string { + return l.sd.NameOriginal } -func (l *genericResource) Permalink() string { - return l.spec.PermalinkForBaseURL(l.relPermalinkForRel(l.relTargetDirFile.path(), true), l.spec.Cfg.BaseURL().HostURL()) +func (l *genericResource) Params() maps.Params { + return l.params } func (l *genericResource) Publish() error { var err error l.publishInit.Do(func() { - targetFilenames := l.getTargetFilenames() + targetFilenames := l.getResourcePaths().TargetFilenames() + if l.sourceFilenameIsHash { // This is a processed image. We want to avoid copying it if it hasn't changed. var changedFilenames []string @@ -340,40 +503,30 @@ func (l *genericResource) Publish() error { } func (l *genericResource) RelPermalink() string { - return l.relPermalinkFor(l.relTargetDirFile.path()) + return l.spec.PathSpec.GetBasePath(false) + paths.PathEscape(l.paths.TargetLink()) +} + +func (l *genericResource) Permalink() string { + return l.spec.Cfg.BaseURL().WithPathNoTrailingSlash + paths.PathEscape(l.paths.TargetPath()) } func (l *genericResource) ResourceType() string { - return l.resourceType + return l.MediaType().MainType } func (l *genericResource) String() string { - return fmt.Sprintf("Resource(%s: %s)", l.resourceType, l.name) + return fmt.Sprintf("Resource(%s: %s)", l.ResourceType(), l.name) } // Path is stored with Unix style slashes. func (l *genericResource) TargetPath() string { - return l.relTargetDirFile.path() + return l.paths.TargetPath() } func (l *genericResource) Title() string { return l.title } -func (l *genericResource) createBasePath(rel string, isURL bool) string { - if l.targetPathBuilder == nil { - return rel - } - tp := l.targetPathBuilder() - - if isURL { - return path.Join(tp.SubResourceBaseLink, rel) - } - - // TODO(bep) path - return path.Join(filepath.ToSlash(tp.SubResourceBaseTarget), rel) -} - func (l *genericResource) initContent() error { var err error l.contentInit.Do(func() { @@ -396,28 +549,12 @@ func (l *genericResource) initContent() error { return err } -func (l *genericResource) setName(name string) { - l.name = name -} - -func (l *genericResource) getResourcePaths() *resourcePathDescriptor { - return l.resourcePathDescriptor -} - func (l *genericResource) getSpec() *Spec { return l.spec } -func (l *genericResource) getTargetFilenames() []string { - paths := l.relTargetPaths() - for i, p := range paths { - paths[i] = filepath.Clean(p) - } - return paths -} - -func (l *genericResource) setTitle(title string) { - l.title = title +func (l *genericResource) getResourcePaths() internal.ResourcePaths { + return l.paths } func (r *genericResource) tryTransformedFileCache(key string, u *transformationUpdate) io.ReadCloser { @@ -437,12 +574,12 @@ func (r *genericResource) mergeData(in map[string]any) { if len(in) == 0 { return } - if r.data == nil { - r.data = make(map[string]any) + if r.sd.Data == nil { + r.sd.Data = make(map[string]any) } for k, v := range in { - if _, found := r.data[k]; !found { - r.data[k] = v + if _, found := r.sd.Data[k]; !found { + r.sd.Data[k] = v } } } @@ -453,142 +590,49 @@ func (rc *genericResource) cloneWithUpdates(u *transformationUpdate) (baseResour if u.content != nil { r.contentInit.Do(func() { r.content = *u.content - r.openReadSeekerCloser = func() (hugio.ReadSeekCloser, error) { + r.sd.OpenReadSeekCloser = func() (hugio.ReadSeekCloser, error) { return hugio.NewReadSeekerNoOpCloserFromString(r.content), nil } }) } - r.mediaType = u.mediaType + r.sd.MediaType = u.mediaType if u.sourceFilename != nil { - r.setSourceFilename(*u.sourceFilename) - } - - if u.sourceFs != nil { - r.setSourceFs(u.sourceFs) + if u.sourceFs == nil { + return nil, errors.New("sourceFs is nil") + } + r.setOpenSource(func() (hugio.ReadSeekCloser, error) { + return u.sourceFs.Open(*u.sourceFilename) + }) + } else if u.sourceFs != nil { + return nil, errors.New("sourceFs is set without sourceFilename") } if u.targetPath == "" { return nil, errors.New("missing targetPath") } - fpath, fname := path.Split(u.targetPath) - r.resourcePathDescriptor.relTargetDirFile = dirFile{dir: fpath, file: fname} - + r.setTargetPath(r.paths.FromTargetPath(u.targetPath)) r.mergeData(u.data) return r, nil } func (l genericResource) clone() *genericResource { - gi := *l.resourceFileInfo - rp := *l.resourcePathDescriptor - l.resourceFileInfo = &gi - l.resourcePathDescriptor = &rp l.resourceContent = &resourceContent{} return &l } func (r *genericResource) openPublishFileForWriting(relTargetPath string) (io.WriteCloser, error) { - return helpers.OpenFilesForWriting(r.spec.BaseFs.PublishFs, r.relTargetPathsFor(relTargetPath)...) -} - -func (l *genericResource) permalinkFor(target string) string { - return l.spec.PermalinkForBaseURL(l.relPermalinkForRel(target, true), l.spec.Cfg.BaseURL().HostURL()) -} - -func (l *genericResource) relPermalinkFor(target string) string { - return l.relPermalinkForRel(target, false) -} - -func (l *genericResource) relPermalinkForRel(rel string, isAbs bool) string { - return l.spec.PathSpec.URLizeFilename(l.relTargetPathForRel(rel, false, isAbs, true)) -} - -func (l *genericResource) relTargetPathForRel(rel string, addBaseTargetPath, isAbs, isURL bool) string { - if addBaseTargetPath && len(l.baseTargetPathDirs) > 1 { - panic("multiple baseTargetPathDirs") - } - var basePath string - if addBaseTargetPath && len(l.baseTargetPathDirs) > 0 { - basePath = l.baseTargetPathDirs[0] - } - - return l.relTargetPathForRelAndBasePath(rel, basePath, isAbs, isURL) -} - -func (l *genericResource) relTargetPathForRelAndBasePath(rel, basePath string, isAbs, isURL bool) string { - rel = l.createBasePath(rel, isURL) - - if basePath != "" { - rel = path.Join(basePath, rel) - } - - if l.baseOffset != "" { - rel = path.Join(l.baseOffset, rel) - } - - if isURL { - bp := l.spec.PathSpec.GetBasePath(!isAbs) - if bp != "" { - rel = path.Join(bp, rel) - } - } - - if len(rel) == 0 || rel[0] != '/' { - rel = "/" + rel - } - - return rel -} - -func (l *genericResource) relTargetPaths() []string { - return l.relTargetPathsForRel(l.TargetPath()) -} - -func (l *genericResource) relTargetPathsFor(target string) []string { - return l.relTargetPathsForRel(target) -} - -func (l *genericResource) relTargetPathsForRel(rel string) []string { - if len(l.baseTargetPathDirs) == 0 { - return []string{l.relTargetPathForRelAndBasePath(rel, "", false, false)} - } - - targetPaths := make([]string, len(l.baseTargetPathDirs)) - for i, dir := range l.baseTargetPathDirs { - targetPaths[i] = l.relTargetPathForRelAndBasePath(rel, dir, false, false) - } - return targetPaths -} - -func (l *genericResource) updateParams(params map[string]any) { - if l.params == nil { - l.params = params - return - } - - // Sets the params not already set - for k, v := range params { - if _, found := l.params[k]; !found { - l.params[k] = v - } - } + filenames := r.paths.FromTargetPath(relTargetPath).TargetFilenames() + return helpers.OpenFilesForWriting(r.spec.BaseFs.PublishFs, filenames...) } type targetPather interface { TargetPath() string } -type permalinker interface { - targetPather - permalinkFor(target string) string - relPermalinkFor(target string) string - relTargetPaths() []string - relTargetPathsFor(target string) []string -} - type resourceContent struct { content string contentInit sync.Once @@ -596,113 +640,31 @@ type resourceContent struct { publishInit sync.Once } -type resourceFileInfo struct { - // Will be set if this resource is backed by something other than a file. - openReadSeekerCloser resource.OpenReadSeekCloser - - // This may be set to tell us to look in another filesystem for this resource. - // We, by default, use the sourceFs filesystem in the spec below. - sourceFs afero.Fs - - // Absolute filename to the source, including any content folder path. - // Note that this is absolute in relation to the filesystem it is stored in. - // It can be a base path filesystem, and then this filename will not match - // the path to the file on the real filesystem. - sourceFilename string - - // For performance. This means that whenever the content changes, the filename changes. - sourceFilenameIsHash bool - - fi hugofs.FileMetaInfo - - // A hash of the source content. Is only calculated in caching situations. - h *resourceHash +type resourceHash struct { + value string + size int64 + initOnce sync.Once } -func (fi *resourceFileInfo) ReadSeekCloser() (hugio.ReadSeekCloser, error) { - if fi.openReadSeekerCloser != nil { - return fi.openReadSeekerCloser() - } - - f, err := fi.getSourceFs().Open(fi.getSourceFilename()) - if err != nil { - return nil, err - } - return f, nil -} - -func (fi *resourceFileInfo) getFileInfo() hugofs.FileMetaInfo { - return fi.fi -} - -func (fi *resourceFileInfo) getSourceFilename() string { - return fi.sourceFilename -} - -func (fi *resourceFileInfo) setSourceFilename(s string) { - // Make sure it's always loaded by sourceFilename. - fi.openReadSeekerCloser = nil - fi.sourceFilename = s -} - -func (fi *resourceFileInfo) setSourceFilenameIsHash(b bool) { - fi.sourceFilenameIsHash = b -} - -func (fi *resourceFileInfo) getSourceFs() afero.Fs { - return fi.sourceFs -} - -func (fi *resourceFileInfo) setSourceFs(fs afero.Fs) { - fi.sourceFs = fs -} - -func (fi *resourceFileInfo) hash() (string, error) { - var err error - fi.h.init.Do(func() { +func (r *resourceHash) init(l hugio.ReadSeekCloserProvider) error { + var initErr error + r.initOnce.Do(func() { var hash string - var f hugio.ReadSeekCloser - f, err = fi.ReadSeekCloser() + var size int64 + f, err := l.ReadSeekCloser() if err != nil { - err = fmt.Errorf("failed to open source file: %w", err) + initErr = fmt.Errorf("failed to open source: %w", err) return } defer f.Close() - - hash, err = helpers.MD5FromFileFast(f) + hash, size, err = helpers.MD5FromReaderFast(f) if err != nil { + initErr = fmt.Errorf("failed to calculate hash: %w", err) return } - fi.h.value = hash + r.value = hash + r.size = size }) - return fi.h.value, err -} - -func (fi *resourceFileInfo) size() int { - if fi.fi == nil { - return 0 - } - - return int(fi.fi.Size()) -} - -type resourceHash struct { - value string - init sync.Once -} - -type resourcePathDescriptor struct { - // The relative target directory and filename. - relTargetDirFile dirFile - - // Callback used to construct a target path relative to its owner. - targetPathBuilder func() page.TargetPaths - - // This will normally be the same as above, but this will only apply to publishing - // of resources. It may be multiple values when in multihost mode. - baseTargetPathDirs []string - - // baseOffset is set when the output format's path has a offset, e.g. for AMP. - baseOffset string + return initErr } diff --git a/resources/resource/dates.go b/resources/resource/dates.go index 6d19ca7b9..88968750d 100644 --- a/resources/resource/dates.go +++ b/resources/resource/dates.go @@ -45,6 +45,10 @@ type Dates struct { FExpiryDate time.Time } +func (d *Dates) IsDateOrLastModAfter(in Dated) bool { + return d.Date().After(in.Date()) || d.Lastmod().After(in.Lastmod()) +} + func (d *Dates) UpdateDateAndLastmodIfAfter(in Dated) { if in.Date().After(d.Date()) { d.FDate = in.Date() diff --git a/resources/resource/resources.go b/resources/resource/resources.go index 795fe1934..9f298b7a6 100644 --- a/resources/resource/resources.go +++ b/resources/resource/resources.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -18,6 +18,7 @@ import ( "fmt" "strings" + "github.com/gohugoio/hugo/common/paths" "github.com/gohugoio/hugo/hugofs/glob" "github.com/spf13/cast" ) @@ -54,16 +55,33 @@ func (r Resources) ByType(typ any) Resources { // Get locates the name given in Resources. // The search is case insensitive. func (r Resources) Get(name any) Resource { + if r == nil { + return nil + } namestr, err := cast.ToStringE(name) if err != nil { panic(err) } namestr = strings.ToLower(namestr) + + // First check the Name. + // Note that this can be modified by the user in the front matter, + // also, it does not contain any language code. for _, resource := range r { if strings.EqualFold(namestr, resource.Name()) { return resource } } + + // Finally, check the original name. + for _, resource := range r { + if nop, ok := resource.(NameOriginalProvider); ok { + if strings.EqualFold(namestr, nop.NameOriginal()) { + return resource + } + } + } + return nil } @@ -75,13 +93,15 @@ func (r Resources) GetMatch(pattern any) Resource { panic(err) } + patternstr = paths.NormalizePathStringBasic(patternstr) + g, err := glob.GetGlob(patternstr) if err != nil { panic(err) } for _, resource := range r { - if g.Match(strings.ToLower(resource.Name())) { + if g.Match(paths.NormalizePathStringBasic(resource.Name())) { return resource } } @@ -163,7 +183,6 @@ type Source interface { // Note that GetRemote (as found in resources.GetRemote) is // not covered by this interface, as this is only available as a global template function. type ResourceFinder interface { - // Get locates the Resource with the given name in the current context (e.g. in .Page.Resources). // // It returns nil if no Resource could found, panics if name is invalid. diff --git a/resources/resource/resourcetypes.go b/resources/resource/resourcetypes.go index 9e550e252..43d0aa786 100644 --- a/resources/resource/resourcetypes.go +++ b/resources/resource/resourcetypes.go @@ -76,7 +76,7 @@ type Resource interface { ResourceTypeProvider MediaTypeProvider ResourceLinksProvider - ResourceMetaProvider + ResourceNameTitleProvider ResourceParamsProvider ResourceDataProvider ErrProvider @@ -107,19 +107,41 @@ type ResourceLinksProvider interface { RelPermalink() string } +// ResourceMetaProvider provides metadata about a resource. type ResourceMetaProvider interface { + ResourceNameTitleProvider + ResourceParamsProvider +} + +type WithResourceMetaProvider interface { + // WithResourceMeta creates a new Resource with the given metadata. + // For internal use. + WithResourceMeta(ResourceMetaProvider) Resource +} + +type ResourceNameTitleProvider interface { // Name is the logical name of this resource. This can be set in the front matter // metadata for this resource. If not set, Hugo will assign a value. // This will in most cases be the base filename. // So, for the image "/some/path/sunset.jpg" this will be "sunset.jpg". // The value returned by this method will be used in the GetByPrefix and ByPrefix methods // on Resources. + // Note that for bundled content resources with language code in the filename, this will + // be the name without the language code. Name() string // Title returns the title if set in front matter. For content pages, this will be the expected value. Title() string } +type NameOriginalProvider interface { + // NameOriginal is the original name of this resource. + // Note that for bundled content resources with language code in the filename, this will + // be the name with the language code. + // For internal use (for now). + NameOriginal() string +} + type ResourceParamsProvider interface { // Params set in front matter for this resource. Params() maps.Params @@ -146,6 +168,17 @@ type Identifier interface { Key() string } +// WeightProvider provides a weight. +type WeightProvider interface { + Weight() int +} + +// Weight0Provider provides a weight that's considered before the WeightProvider in sorting. +// This allows the weight set on a given term to win. +type Weight0Provider interface { + Weight0() int +} + // ContentResource represents a Resource that provides a way to get to its content. // Most Resource types in Hugo implements this interface, including Page. type ContentResource interface { @@ -166,10 +199,6 @@ type ContentProvider interface { Content(context.Context) (any, error) } -// OpenReadSeekCloser allows setting some other way (than reading from a filesystem) -// to open or create a ReadSeekCloser. -type OpenReadSeekCloser func() (hugio.ReadSeekCloser, error) - // ReadSeekCloserResource is a Resource that supports loading its content. type ReadSeekCloserResource interface { MediaType() media.Type @@ -192,6 +221,41 @@ type TranslationKeyProvider interface { TranslationKey() string } +// Staler controls stale state of a Resource. A stale resource should be discarded. +type Staler interface { + StaleMarker + StaleInfo +} + +// StaleMarker marks a Resource as stale. +type StaleMarker interface { + MarkStale() +} + +// StaleInfo tells if a resource is marked as stale. +type StaleInfo interface { + IsStale() bool +} + +// IsStaleAny reports whether any of the os is marked as stale. +func IsStaleAny(os ...any) bool { + for _, o := range os { + if s, ok := o.(StaleInfo); ok && s.IsStale() { + return true + } + } + return false +} + +// MarkStale will mark any of the oses as stale, if possible. +func MarkStale(os ...any) { + for _, o := range os { + if s, ok := o.(Staler); ok { + s.MarkStale() + } + } +} + // UnmarshableResource represents a Resource that can be unmarshaled to some other format. type UnmarshableResource interface { ReadSeekCloserResource diff --git a/resources/resource_cache.go b/resources/resource_cache.go index 388e293e8..a76a51b1c 100644 --- a/resources/resource_cache.go +++ b/resources/resource_cache.go @@ -14,182 +14,69 @@ package resources import ( + "context" "encoding/json" "io" "path" "path/filepath" - "regexp" "strings" "sync" - "github.com/gohugoio/hugo/helpers" - - hglob "github.com/gohugoio/hugo/hugofs/glob" - "github.com/gohugoio/hugo/resources/resource" + "github.com/gohugoio/hugo/cache/dynacache" "github.com/gohugoio/hugo/cache/filecache" - - "github.com/BurntSushi/locker" ) -const ( - CACHE_CLEAR_ALL = "clear_all" - CACHE_OTHER = "other" -) +func newResourceCache(rs *Spec, memCache *dynacache.Cache) *ResourceCache { + return &ResourceCache{ + fileCache: rs.FileCaches.AssetsCache(), + cacheResource: dynacache.GetOrCreatePartition[string, resource.Resource]( + memCache, + "/res1", + dynacache.OptionsPartition{ClearWhen: dynacache.ClearOnChange, Weight: 40}, + ), + cacheResources: dynacache.GetOrCreatePartition[string, resource.Resources]( + memCache, + "/ress", + dynacache.OptionsPartition{ClearWhen: dynacache.ClearOnChange, Weight: 40}, + ), + cacheResourceTransformation: dynacache.GetOrCreatePartition[string, *resourceAdapterInner]( + memCache, + "/res1/tra", + dynacache.OptionsPartition{ClearWhen: dynacache.ClearOnChange, Weight: 40}, + ), + } +} type ResourceCache struct { sync.RWMutex - // Either resource.Resource or resource.Resources. - cache map[string]any + cacheResource *dynacache.Partition[string, resource.Resource] + cacheResources *dynacache.Partition[string, resource.Resources] + cacheResourceTransformation *dynacache.Partition[string, *resourceAdapterInner] fileCache *filecache.Cache - - // Provides named resource locks. - nlocker *locker.Locker -} - -// ResourceCacheKey converts the filename into the format used in the resource -// cache. -func ResourceCacheKey(filename string) string { - filename = filepath.ToSlash(filename) - return path.Join(resourceKeyPartition(filename), filename) -} - -func resourceKeyPartition(filename string) string { - ext := strings.TrimPrefix(path.Ext(filepath.ToSlash(filename)), ".") - if ext == "" { - ext = CACHE_OTHER - } - return ext -} - -// Commonly used aliases and directory names used for some types. -var extAliasKeywords = map[string][]string{ - "sass": {"scss"}, - "scss": {"sass"}, -} - -// ResourceKeyPartitions resolves a ordered slice of partitions that is -// used to do resource cache invalidations. -// -// We use the first directory path element and the extension, so: -// -// a/b.json => "a", "json" -// b.json => "json" -// -// For some of the extensions we will also map to closely related types, -// e.g. "scss" will also return "sass". -func ResourceKeyPartitions(filename string) []string { - var partitions []string - filename = hglob.NormalizePath(filename) - dir, name := path.Split(filename) - ext := strings.TrimPrefix(path.Ext(filepath.ToSlash(name)), ".") - - if dir != "" { - partitions = append(partitions, strings.Split(dir, "/")[0]) - } - - if ext != "" { - partitions = append(partitions, ext) - } - - if aliases, found := extAliasKeywords[ext]; found { - partitions = append(partitions, aliases...) - } - - if len(partitions) == 0 { - partitions = []string{CACHE_OTHER} - } - - return helpers.UniqueStringsSorted(partitions) -} - -// ResourceKeyContainsAny returns whether the key is a member of any of the -// given partitions. -// -// This is used for resource cache invalidation. -func ResourceKeyContainsAny(key string, partitions []string) bool { - parts := strings.Split(key, "/") - for _, p1 := range partitions { - for _, p2 := range parts { - if p1 == p2 { - return true - } - } - } - return false -} - -func (c *ResourceCache) clear() { - c.Lock() - defer c.Unlock() - - c.cache = make(map[string]any) - c.nlocker = locker.NewLocker() -} - -func (c *ResourceCache) Contains(key string) bool { - key = c.cleanKey(filepath.ToSlash(key)) - _, found := c.get(key) - return found } func (c *ResourceCache) cleanKey(key string) string { - return strings.TrimPrefix(path.Clean(strings.ToLower(key)), "/") + return strings.TrimPrefix(path.Clean(strings.ToLower(filepath.ToSlash(key))), "/") } -func (c *ResourceCache) get(key string) (any, bool) { - c.RLock() - defer c.RUnlock() - r, found := c.cache[key] - return r, found +func (c *ResourceCache) Get(ctx context.Context, key string) (resource.Resource, bool) { + return c.cacheResource.Get(ctx, key) } func (c *ResourceCache) GetOrCreate(key string, f func() (resource.Resource, error)) (resource.Resource, error) { - r, err := c.getOrCreate(key, func() (any, error) { return f() }) - if r == nil || err != nil { - return nil, err - } - return r.(resource.Resource), nil + return c.cacheResource.GetOrCreate(key, func(key string) (resource.Resource, error) { + return f() + }) } func (c *ResourceCache) GetOrCreateResources(key string, f func() (resource.Resources, error)) (resource.Resources, error) { - r, err := c.getOrCreate(key, func() (any, error) { return f() }) - if r == nil || err != nil { - return nil, err - } - return r.(resource.Resources), nil -} - -func (c *ResourceCache) getOrCreate(key string, f func() (any, error)) (any, error) { - key = c.cleanKey(key) - // First check in-memory cache. - r, found := c.get(key) - if found { - return r, nil - } - // This is a potentially long running operation, so get a named lock. - c.nlocker.Lock(key) - - // Double check in-memory cache. - r, found = c.get(key) - if found { - c.nlocker.Unlock(key) - return r, nil - } - - defer c.nlocker.Unlock(key) - - r, err := f() - if err != nil { - return nil, err - } - - c.set(key, r) - - return r, nil + return c.cacheResources.GetOrCreate(key, func(key string) (resource.Resources, error) { + return f() + }) } func (c *ResourceCache) getFilenames(key string) (string, string) { @@ -242,64 +129,3 @@ func (c *ResourceCache) writeMeta(key string, meta transformedResourceMetadata) return fi, fc, err } - -func (c *ResourceCache) set(key string, r any) { - c.Lock() - defer c.Unlock() - c.cache[key] = r -} - -func (c *ResourceCache) DeletePartitions(partitions ...string) { - partitionsSet := map[string]bool{ - // Always clear out the resources not matching any partition. - "other": true, - } - for _, p := range partitions { - partitionsSet[p] = true - } - - if partitionsSet[CACHE_CLEAR_ALL] { - c.clear() - return - } - - c.Lock() - defer c.Unlock() - - for k := range c.cache { - clear := false - for p := range partitionsSet { - if strings.Contains(k, p) { - // There will be some false positive, but that's fine. - clear = true - break - } - } - - if clear { - delete(c.cache, k) - } - } -} - -func (c *ResourceCache) DeleteMatchesRe(re *regexp.Regexp) { - c.Lock() - defer c.Unlock() - - for k := range c.cache { - if re.MatchString(k) { - delete(c.cache, k) - } - } -} - -func (c *ResourceCache) DeleteMatches(match func(string) bool) { - c.Lock() - defer c.Unlock() - - for k := range c.cache { - if match(k) { - delete(c.cache, k) - } - } -} diff --git a/resources/resource_cache_test.go b/resources/resource_cache_test.go deleted file mode 100644 index bcb241025..000000000 --- a/resources/resource_cache_test.go +++ /dev/null @@ -1,58 +0,0 @@ -// Copyright 2019 The Hugo Authors. All rights reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package resources - -import ( - "path/filepath" - "testing" - - qt "github.com/frankban/quicktest" -) - -func TestResourceKeyPartitions(t *testing.T) { - c := qt.New(t) - - for _, test := range []struct { - input string - expected []string - }{ - {"a.js", []string{"js"}}, - {"a.scss", []string{"sass", "scss"}}, - {"a.sass", []string{"sass", "scss"}}, - {"d/a.js", []string{"d", "js"}}, - {"js/a.js", []string{"js"}}, - {"D/a.JS", []string{"d", "js"}}, - {"d/a", []string{"d"}}, - {filepath.FromSlash("/d/a.js"), []string{"d", "js"}}, - {filepath.FromSlash("/d/e/a.js"), []string{"d", "js"}}, - } { - c.Assert(ResourceKeyPartitions(test.input), qt.DeepEquals, test.expected, qt.Commentf(test.input)) - } -} - -func TestResourceKeyContainsAny(t *testing.T) { - c := qt.New(t) - - for _, test := range []struct { - key string - filename string - expected bool - }{ - {"styles/css", "asdf.css", true}, - {"styles/css", "styles/asdf.scss", true}, - {"js/foo.bar", "asdf.css", false}, - } { - c.Assert(ResourceKeyContainsAny(test.key, ResourceKeyPartitions(test.filename)), qt.Equals, test.expected) - } -} diff --git a/resources/resource_factories/bundler/bundler.go b/resources/resource_factories/bundler/bundler.go index 67f1f90fa..c255da601 100644 --- a/resources/resource_factories/bundler/bundler.go +++ b/resources/resource_factories/bundler/bundler.go @@ -18,7 +18,6 @@ import ( "fmt" "io" "path" - "path/filepath" "github.com/gohugoio/hugo/common/hugio" "github.com/gohugoio/hugo/media" @@ -81,8 +80,8 @@ func (r *multiReadSeekCloser) Close() error { // Concat concatenates the list of Resource objects. func (c *Client) Concat(targetPath string, r resource.Resources) (resource.Resource, error) { - // The CACHE_OTHER will make sure this will be re-created and published on rebuilds. - return c.rs.ResourceCache.GetOrCreate(path.Join(resources.CACHE_OTHER, targetPath), func() (resource.Resource, error) { + targetPath = path.Clean(targetPath) + return c.rs.ResourceCache.GetOrCreate(targetPath, func() (resource.Resource, error) { var resolvedm media.Type // The given set of resources must be of the same Media Type. @@ -132,12 +131,11 @@ func (c *Client) Concat(targetPath string, r resource.Resources) (resource.Resou return newMultiReadSeekCloser(rcsources...), nil } - composite, err := c.rs.New( + composite, err := c.rs.NewResource( resources.ResourceSourceDescriptor{ - Fs: c.rs.FileCaches.AssetsCache().Fs, LazyPublish: true, OpenReadSeekCloser: concatr, - RelTargetFilename: filepath.Clean(targetPath), + TargetPath: targetPath, }) if err != nil { return nil, err diff --git a/resources/resource_factories/create/create.go b/resources/resource_factories/create/create.go index 2e4721299..e98eb7425 100644 --- a/resources/resource_factories/create/create.go +++ b/resources/resource_factories/create/create.go @@ -17,15 +17,19 @@ package create import ( "net/http" + "os" "path" "path/filepath" "strings" "time" + "github.com/gohugoio/hugo/helpers" "github.com/gohugoio/hugo/hugofs/glob" + "github.com/gohugoio/hugo/identity" "github.com/gohugoio/hugo/hugofs" + "github.com/gohugoio/hugo/cache/dynacache" "github.com/gohugoio/hugo/cache/filecache" "github.com/gohugoio/hugo/common/hugio" "github.com/gohugoio/hugo/resources" @@ -53,19 +57,44 @@ func New(rs *resources.Spec) *Client { // Copy copies r to the new targetPath. func (c *Client) Copy(r resource.Resource, targetPath string) (resource.Resource, error) { - return c.rs.ResourceCache.GetOrCreate(resources.ResourceCacheKey(targetPath), func() (resource.Resource, error) { + key := dynacache.CleanKey(targetPath) + return c.rs.ResourceCache.GetOrCreate(key, func() (resource.Resource, error) { return resources.Copy(r, targetPath), nil }) } -// Get creates a new Resource by opening the given filename in the assets filesystem. -func (c *Client) Get(filename string) (resource.Resource, error) { - filename = filepath.Clean(filename) - return c.rs.ResourceCache.GetOrCreate(resources.ResourceCacheKey(filename), func() (resource.Resource, error) { - return c.rs.New(resources.ResourceSourceDescriptor{ - Fs: c.rs.BaseFs.Assets.Fs, - LazyPublish: true, - SourceFilename: filename, +func (c *Client) newDependencyManager() identity.Manager { + if c.rs.Cfg.Running() { + return identity.NewManager("resources") + } + return identity.NopManager +} + +// Get creates a new Resource by opening the given pathname in the assets filesystem. +func (c *Client) Get(pathname string) (resource.Resource, error) { + pathname = path.Clean(pathname) + key := dynacache.CleanKey(pathname) + + return c.rs.ResourceCache.GetOrCreate(key, func() (resource.Resource, error) { + // The resource file will not be read before it gets used (e.g. in .Content), + // so we need to check that the file exists here. + filename := filepath.FromSlash(pathname) + if _, err := c.rs.BaseFs.Assets.Fs.Stat(filename); err != nil { + if os.IsNotExist(err) { + return nil, nil + } + // A real error. + return nil, err + } + + return c.rs.NewResource(resources.ResourceSourceDescriptor{ + LazyPublish: true, + OpenReadSeekCloser: func() (hugio.ReadSeekCloser, error) { + return c.rs.BaseFs.Assets.Fs.Open(filename) + }, + GroupIdentity: identity.StringIdentity(key), + DependencyManager: c.newDependencyManager(), + TargetPath: pathname, }) }) } @@ -95,9 +124,6 @@ func (c *Client) GetMatch(pattern string) (resource.Resource, error) { func (c *Client) match(name, pattern string, matchFunc func(r resource.Resource) bool, firstOnly bool) (resource.Resources, error) { pattern = glob.NormalizePath(pattern) partitions := glob.FilterGlobParts(strings.Split(pattern, "/")) - if len(partitions) == 0 { - partitions = []string{resources.CACHE_OTHER} - } key := path.Join(name, path.Join(partitions...)) key = path.Join(key, pattern) @@ -106,13 +132,13 @@ func (c *Client) match(name, pattern string, matchFunc func(r resource.Resource) handle := func(info hugofs.FileMetaInfo) (bool, error) { meta := info.Meta() - r, err := c.rs.New(resources.ResourceSourceDescriptor{ + r, err := c.rs.NewResource(resources.ResourceSourceDescriptor{ LazyPublish: true, - FileInfo: info, OpenReadSeekCloser: func() (hugio.ReadSeekCloser, error) { return meta.Open() }, - RelTargetFilename: meta.Path, + GroupIdentity: meta.PathInfo, + TargetPath: meta.PathInfo.PathNoLang(), }) if err != nil { return true, err @@ -138,15 +164,19 @@ func (c *Client) match(name, pattern string, matchFunc func(r resource.Resource) // FromString creates a new Resource from a string with the given relative target path. // TODO(bep) see #10912; we currently emit a warning for this config scenario. func (c *Client) FromString(targetPath, content string) (resource.Resource, error) { - return c.rs.ResourceCache.GetOrCreate(path.Join(resources.CACHE_OTHER, targetPath), func() (resource.Resource, error) { - return c.rs.New( + targetPath = path.Clean(targetPath) + key := dynacache.CleanKey(targetPath) + helpers.MD5String(content) + r, err := c.rs.ResourceCache.GetOrCreate(key, func() (resource.Resource, error) { + return c.rs.NewResource( resources.ResourceSourceDescriptor{ - Fs: c.rs.FileCaches.AssetsCache().Fs, - LazyPublish: true, + LazyPublish: true, + GroupIdentity: identity.Anonymous, // All usage of this resource are tracked via its string content. OpenReadSeekCloser: func() (hugio.ReadSeekCloser, error) { return hugio.NewReadSeekerNoOpCloserFromString(content), nil }, - RelTargetFilename: filepath.Clean(targetPath), + TargetPath: targetPath, }) }) + + return r, err } diff --git a/resources/resource_factories/create/integration_test.go b/resources/resource_factories/create/integration_test.go index 140c5d091..61bc17adb 100644 --- a/resources/resource_factories/create/integration_test.go +++ b/resources/resource_factories/create/integration_test.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -25,7 +25,6 @@ import ( ) func TestGetRemoteHead(t *testing.T) { - files := ` -- config.toml -- [security] @@ -60,7 +59,6 @@ func TestGetRemoteHead(t *testing.T) { "Head Content: .", "Head Data: map[ContentLength:18210 ContentType:image/png Status:200 OK StatusCode:200 TransferEncoding:[]]", ) - } func TestGetRemoteRetry(t *testing.T) { @@ -133,14 +131,11 @@ mediaTypes = ['text/plain'] TxtarString: files, }, ).BuildE() - // This is hard to get stable on GitHub Actions, it sometimes succeeds due to timing issues. if err != nil { b.AssertLogContains("Got Err") b.AssertLogContains("Retry timeout") b.AssertLogContains("ContentLength:0") } - }) - } diff --git a/resources/resource_factories/create/remote.go b/resources/resource_factories/create/remote.go index d1fd2481d..c2d17e7a5 100644 --- a/resources/resource_factories/create/remote.go +++ b/resources/resource_factories/create/remote.go @@ -24,7 +24,6 @@ import ( "net/http/httputil" "net/url" "path" - "path/filepath" "strings" "time" @@ -253,15 +252,16 @@ func (c *Client) FromRemote(uri string, optionsm map[string]any) (resource.Resou resourceID = filename[:len(filename)-len(path.Ext(filename))] + "_" + resourceID + mediaType.FirstSuffix.FullSuffix data := responseToData(res, false) - return c.rs.New( + return c.rs.NewResource( resources.ResourceSourceDescriptor{ - MediaType: mediaType, - Data: data, - LazyPublish: true, + MediaType: mediaType, + Data: data, + GroupIdentity: identity.StringIdentity(resourceID), + LazyPublish: true, OpenReadSeekCloser: func() (hugio.ReadSeekCloser, error) { return hugio.NewReadSeekerNoOpCloser(bytes.NewReader(body)), nil }, - RelTargetFilename: filepath.Clean(resourceID), + TargetPath: resourceID, }) } diff --git a/resources/resource_metadata.go b/resources/resource_metadata.go index 8954a5109..869fc11bf 100644 --- a/resources/resource_metadata.go +++ b/resources/resource_metadata.go @@ -28,111 +28,161 @@ import ( ) var ( - _ metaAssigner = (*genericResource)(nil) - _ metaAssigner = (*imageResource)(nil) - _ metaAssignerProvider = (*resourceAdapter)(nil) + _ mediaTypeAssigner = (*genericResource)(nil) + _ mediaTypeAssigner = (*imageResource)(nil) + _ resource.Staler = (*genericResource)(nil) + _ resource.NameOriginalProvider = (*genericResource)(nil) ) -type metaAssignerProvider interface { - getMetaAssigner() metaAssigner -} - // metaAssigner allows updating metadata in resources that supports it. type metaAssigner interface { setTitle(title string) setName(name string) - setMediaType(mediaType media.Type) updateParams(params map[string]any) } +// metaAssigner allows updating the media type in resources that supports it. +type mediaTypeAssigner interface { + setMediaType(mediaType media.Type) +} + const counterPlaceHolder = ":counter" +var _ metaAssigner = (*metaResource)(nil) + +// metaResource is a resource with metadata that can be updated. +type metaResource struct { + changed bool + title string + name string + params maps.Params +} + +func (r *metaResource) Name() string { + return r.name +} + +func (r *metaResource) Title() string { + return r.title +} + +func (r *metaResource) Params() maps.Params { + return r.params +} + +func (r *metaResource) setTitle(title string) { + r.title = title + r.changed = true +} + +func (r *metaResource) setName(name string) { + r.name = name + r.changed = true +} + +func (r *metaResource) updateParams(params map[string]any) { + if r.params == nil { + r.params = make(map[string]interface{}) + } + for k, v := range params { + r.params[k] = v + } + r.changed = true +} + +func CloneWithMetadataIfNeeded(m []map[string]any, r resource.Resource) resource.Resource { + wmp, ok := r.(resource.WithResourceMetaProvider) + if !ok { + return r + } + + wrapped := &metaResource{ + name: r.Name(), + title: r.Title(), + params: r.Params(), + } + + assignMetadata(m, wrapped) + if !wrapped.changed { + return r + } + + return wmp.WithResourceMeta(wrapped) +} + // AssignMetadata assigns the given metadata to those resources that supports updates // and matching by wildcard given in `src` using `filepath.Match` with lower cased values. // This assignment is additive, but the most specific match needs to be first. // The `name` and `title` metadata field support shell-matched collection it got a match in. // See https://golang.org/pkg/path/#Match -func AssignMetadata(metadata []map[string]any, resources ...resource.Resource) error { +func assignMetadata(metadata []map[string]any, ma *metaResource) error { counters := make(map[string]int) - for _, r := range resources { - var ma metaAssigner - mp, ok := r.(metaAssignerProvider) - if ok { - ma = mp.getMetaAssigner() - } else { - ma, ok = r.(metaAssigner) - if !ok { - continue - } + var ( + nameSet, titleSet bool + nameCounter, titleCounter = 0, 0 + nameCounterFound, titleCounterFound bool + resourceSrcKey = strings.ToLower(ma.Name()) + ) + + for _, meta := range metadata { + src, found := meta["src"] + if !found { + return fmt.Errorf("missing 'src' in metadata for resource") } - var ( - nameSet, titleSet bool - nameCounter, titleCounter = 0, 0 - nameCounterFound, titleCounterFound bool - resourceSrcKey = strings.ToLower(r.Name()) - ) + srcKey := strings.ToLower(cast.ToString(src)) - for _, meta := range metadata { - src, found := meta["src"] - if !found { - return fmt.Errorf("missing 'src' in metadata for resource") - } + glob, err := glob.GetGlob(srcKey) + if err != nil { + return fmt.Errorf("failed to match resource with metadata: %w", err) + } - srcKey := strings.ToLower(cast.ToString(src)) + match := glob.Match(resourceSrcKey) - glob, err := glob.GetGlob(srcKey) - if err != nil { - return fmt.Errorf("failed to match resource with metadata: %w", err) - } - - match := glob.Match(resourceSrcKey) - - if match { - if !nameSet { - name, found := meta["name"] - if found { - name := cast.ToString(name) - if !nameCounterFound { - nameCounterFound = strings.Contains(name, counterPlaceHolder) - } - if nameCounterFound && nameCounter == 0 { - counterKey := "name_" + srcKey - nameCounter = counters[counterKey] + 1 - counters[counterKey] = nameCounter - } - - ma.setName(replaceResourcePlaceholders(name, nameCounter)) - nameSet = true - } - } - - if !titleSet { - title, found := meta["title"] - if found { - title := cast.ToString(title) - if !titleCounterFound { - titleCounterFound = strings.Contains(title, counterPlaceHolder) - } - if titleCounterFound && titleCounter == 0 { - counterKey := "title_" + srcKey - titleCounter = counters[counterKey] + 1 - counters[counterKey] = titleCounter - } - ma.setTitle((replaceResourcePlaceholders(title, titleCounter))) - titleSet = true - } - } - - params, found := meta["params"] + if match { + if !nameSet { + name, found := meta["name"] if found { - m := maps.ToStringMap(params) - // Needed for case insensitive fetching of params values - maps.PrepareParams(m) - ma.updateParams(m) + name := cast.ToString(name) + if !nameCounterFound { + nameCounterFound = strings.Contains(name, counterPlaceHolder) + } + if nameCounterFound && nameCounter == 0 { + counterKey := "name_" + srcKey + nameCounter = counters[counterKey] + 1 + counters[counterKey] = nameCounter + } + + ma.setName(replaceResourcePlaceholders(name, nameCounter)) + nameSet = true } } + + if !titleSet { + title, found := meta["title"] + if found { + title := cast.ToString(title) + if !titleCounterFound { + titleCounterFound = strings.Contains(title, counterPlaceHolder) + } + if titleCounterFound && titleCounter == 0 { + counterKey := "title_" + srcKey + titleCounter = counters[counterKey] + 1 + counters[counterKey] = titleCounter + } + ma.setTitle((replaceResourcePlaceholders(title, titleCounter))) + titleSet = true + } + } + + params, found := meta["params"] + if found { + m := maps.ToStringMap(params) + // Needed for case insensitive fetching of params values + maps.PrepareParams(m) + ma.updateParams(m) + } } } diff --git a/resources/resource_spec.go b/resources/resource_spec.go index 3e1b53205..66f56d147 100644 --- a/resources/resource_spec.go +++ b/resources/resource_spec.go @@ -14,54 +14,44 @@ package resources import ( - "errors" - "fmt" - "mime" - "os" "path" - "path/filepath" - "strings" "sync" - "github.com/BurntSushi/locker" "github.com/gohugoio/hugo/config" "github.com/gohugoio/hugo/config/allconfig" "github.com/gohugoio/hugo/output" + "github.com/gohugoio/hugo/resources/internal" "github.com/gohugoio/hugo/resources/jsconfig" "github.com/gohugoio/hugo/common/herrors" "github.com/gohugoio/hugo/common/hexec" "github.com/gohugoio/hugo/common/loggers" + "github.com/gohugoio/hugo/common/paths" "github.com/gohugoio/hugo/identity" "github.com/gohugoio/hugo/helpers" - "github.com/gohugoio/hugo/hugofs" "github.com/gohugoio/hugo/resources/postpub" + "github.com/gohugoio/hugo/cache/dynacache" "github.com/gohugoio/hugo/cache/filecache" "github.com/gohugoio/hugo/media" "github.com/gohugoio/hugo/resources/images" "github.com/gohugoio/hugo/resources/page" "github.com/gohugoio/hugo/resources/resource" "github.com/gohugoio/hugo/tpl" - "github.com/spf13/afero" ) func NewSpec( s *helpers.PathSpec, common *SpecCommon, // may be nil - imageCache *ImageCache, // may be nil + fileCaches filecache.Caches, + memCache *dynacache.Cache, incr identity.Incrementer, logger loggers.Logger, errorHandler herrors.ErrorSender, - execHelper *hexec.Exec) (*Spec, error) { - - fileCaches, err := filecache.NewCaches(s) - if err != nil { - return nil, fmt.Errorf("failed to create file caches from configuration: %w", err) - } - + execHelper *hexec.Exec, +) (*Spec, error) { conf := s.Cfg.GetConfig().(*allconfig.Config) imgConfig := conf.Imaging @@ -91,37 +81,28 @@ func NewSpec( PostProcessResources: make(map[string]postpub.PostPublishedResource), JSConfigBuilder: jsconfig.NewBuilder(), }, - ResourceCache: &ResourceCache{ - fileCache: fileCaches.AssetsCache(), - cache: make(map[string]any), - nlocker: locker.NewLocker(), - }, } } - if imageCache == nil { - imageCache = newImageCache( - fileCaches.ImageCache(), - s, - ) - } else { - imageCache = imageCache.WithPathSpec(s) - - } - rs := &Spec{ PathSpec: s, Logger: logger, ErrorSender: errorHandler, imaging: imaging, - ImageCache: imageCache, - ExecHelper: execHelper, + ImageCache: newImageCache( + fileCaches.ImageCache(), + memCache, + s, + ), + ExecHelper: execHelper, Permalinks: permalinks, SpecCommon: common, } + rs.ResourceCache = newResourceCache(rs, memCache) + return rs, nil } @@ -162,8 +143,51 @@ type PostBuildAssets struct { JSConfigBuilder *jsconfig.Builder } -func (r *Spec) New(fd ResourceSourceDescriptor) (resource.Resource, error) { - return r.newResourceFor(fd) +// NewResource creates a new Resource from the given ResourceSourceDescriptor. +func (r *Spec) NewResource(rd ResourceSourceDescriptor) (resource.Resource, error) { + if err := rd.init(r); err != nil { + return nil, err + } + + dir, name := path.Split(rd.TargetPath) + dir = paths.ToSlashPreserveLeading(dir) + if dir == "/" { + dir = "" + } + rp := internal.ResourcePaths{ + File: name, + Dir: dir, + BaseDirTarget: rd.BasePathTargetPath, + BaseDirLink: rd.BasePathRelPermalink, + TargetBasePaths: rd.TargetBasePaths, + } + + gr := &genericResource{ + Staler: &AtomicStaler{}, + h: &resourceHash{}, + paths: rp, + spec: r, + sd: rd, + params: make(map[string]any), + name: rd.Name, + title: rd.Name, + resourceContent: &resourceContent{}, + } + + if rd.MediaType.MainType == "image" { + imgFormat, ok := images.ImageFormatFromMediaSubType(rd.MediaType.SubType) + if ok { + ir := &imageResource{ + Image: images.NewImage(imgFormat, r.imaging, nil, gr), + baseResource: gr, + } + ir.root = ir + return newResourceAdapter(gr.spec, rd.LazyPublish, ir), nil + } + + } + + return newResourceAdapter(gr.spec, rd.LazyPublish, gr), nil } func (r *Spec) MediaTypes() media.Types { @@ -178,205 +202,6 @@ func (r *Spec) BuildConfig() config.BuildConfig { return r.Cfg.GetConfigSection("build").(config.BuildConfig) } -func (r *Spec) CacheStats() string { - r.ImageCache.mu.RLock() - defer r.ImageCache.mu.RUnlock() - - s := fmt.Sprintf("Cache entries: %d", len(r.ImageCache.store)) - - count := 0 - for k := range r.ImageCache.store { - if count > 5 { - break - } - s += "\n" + k - count++ - } - - return s -} - -func (r *Spec) ClearCaches() { - r.ImageCache.clear() - r.ResourceCache.clear() -} - -func (r *Spec) DeleteBySubstring(s string) { - r.ImageCache.deleteIfContains(s) -} - func (s *Spec) String() string { return "spec" } - -// TODO(bep) clean up below -func (r *Spec) newGenericResource(sourceFs afero.Fs, - targetPathBuilder func() page.TargetPaths, - osFileInfo os.FileInfo, - sourceFilename, - baseFilename string, - mediaType media.Type) *genericResource { - return r.newGenericResourceWithBase( - sourceFs, - nil, - nil, - targetPathBuilder, - osFileInfo, - sourceFilename, - baseFilename, - mediaType, - nil, - ) -} - -func (r *Spec) newGenericResourceWithBase( - sourceFs afero.Fs, - openReadSeekerCloser resource.OpenReadSeekCloser, - targetPathBaseDirs []string, - targetPathBuilder func() page.TargetPaths, - osFileInfo os.FileInfo, - sourceFilename, - baseFilename string, - mediaType media.Type, - data map[string]any, -) *genericResource { - if osFileInfo != nil && osFileInfo.IsDir() { - panic(fmt.Sprintf("dirs not supported resource types: %v", osFileInfo)) - } - - // This value is used both to construct URLs and file paths, but start - // with a Unix-styled path. - baseFilename = helpers.ToSlashTrimLeading(baseFilename) - fpath, fname := path.Split(baseFilename) - - resourceType := mediaType.MainType - - pathDescriptor := &resourcePathDescriptor{ - baseTargetPathDirs: helpers.UniqueStringsReuse(targetPathBaseDirs), - targetPathBuilder: targetPathBuilder, - relTargetDirFile: dirFile{dir: fpath, file: fname}, - } - - var fim hugofs.FileMetaInfo - if osFileInfo != nil { - fim = osFileInfo.(hugofs.FileMetaInfo) - } - - gfi := &resourceFileInfo{ - fi: fim, - openReadSeekerCloser: openReadSeekerCloser, - sourceFs: sourceFs, - sourceFilename: sourceFilename, - h: &resourceHash{}, - } - - g := &genericResource{ - resourceFileInfo: gfi, - resourcePathDescriptor: pathDescriptor, - mediaType: mediaType, - resourceType: resourceType, - spec: r, - params: make(map[string]any), - name: baseFilename, - title: baseFilename, - resourceContent: &resourceContent{}, - data: data, - } - - return g -} - -func (r *Spec) newResource(sourceFs afero.Fs, fd ResourceSourceDescriptor) (resource.Resource, error) { - fi := fd.FileInfo - var sourceFilename string - - if fd.OpenReadSeekCloser != nil { - } else if fd.SourceFilename != "" { - var err error - fi, err = sourceFs.Stat(fd.SourceFilename) - if err != nil { - if herrors.IsNotExist(err) { - return nil, nil - } - return nil, err - } - sourceFilename = fd.SourceFilename - } else { - sourceFilename = fd.SourceFile.Filename() - } - - if fd.RelTargetFilename == "" { - fd.RelTargetFilename = sourceFilename - } - - mimeType := fd.MediaType - if mimeType.IsZero() { - ext := strings.ToLower(filepath.Ext(fd.RelTargetFilename)) - var ( - found bool - suffixInfo media.SuffixInfo - ) - mimeType, suffixInfo, found = r.MediaTypes().GetFirstBySuffix(strings.TrimPrefix(ext, ".")) - // TODO(bep) we need to handle these ambiguous types better, but in this context - // we most likely want the application/xml type. - if suffixInfo.Suffix == "xml" && mimeType.SubType == "rss" { - mimeType, found = r.MediaTypes().GetByType("application/xml") - } - - if !found { - // A fallback. Note that mime.TypeByExtension is slow by Hugo standards, - // so we should configure media types to avoid this lookup for most - // situations. - mimeStr := mime.TypeByExtension(ext) - if mimeStr != "" { - mimeType, _ = media.FromStringAndExt(mimeStr, ext) - } - } - } - - gr := r.newGenericResourceWithBase( - sourceFs, - fd.OpenReadSeekCloser, - fd.TargetBasePaths, - fd.TargetPaths, - fi, - sourceFilename, - fd.RelTargetFilename, - mimeType, - fd.Data) - - if mimeType.MainType == "image" { - imgFormat, ok := images.ImageFormatFromMediaSubType(mimeType.SubType) - if ok { - ir := &imageResource{ - Image: images.NewImage(imgFormat, r.imaging, nil, gr), - baseResource: gr, - } - ir.root = ir - return newResourceAdapter(gr.spec, fd.LazyPublish, ir), nil - } - } - - return newResourceAdapter(gr.spec, fd.LazyPublish, gr), nil -} - -func (r *Spec) newResourceFor(fd ResourceSourceDescriptor) (resource.Resource, error) { - if fd.OpenReadSeekCloser == nil { - if fd.SourceFile != nil && fd.SourceFilename != "" { - return nil, errors.New("both SourceFile and AbsSourceFilename provided") - } else if fd.SourceFile == nil && fd.SourceFilename == "" { - return nil, errors.New("either SourceFile or AbsSourceFilename must be provided") - } - } - - if fd.RelTargetFilename == "" { - fd.RelTargetFilename = fd.Filename() - } - - if len(fd.TargetBasePaths) == 0 { - // If not set, we publish the same resource to all hosts. - fd.TargetBasePaths = r.MultihostTargetBasePaths - } - - return r.newResource(fd.Fs, fd) -} diff --git a/resources/resource_spec_test.go b/resources/resource_spec_test.go new file mode 100644 index 000000000..67fe09992 --- /dev/null +++ b/resources/resource_spec_test.go @@ -0,0 +1,48 @@ +// Copyright 2024 The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package resources_test + +import ( + "testing" + + qt "github.com/frankban/quicktest" + "github.com/gohugoio/hugo/common/hugio" + "github.com/gohugoio/hugo/identity" + "github.com/gohugoio/hugo/resources" +) + +func TestNewResource(t *testing.T) { + c := qt.New(t) + + spec := newTestResourceSpec(specDescriptor{c: c}) + + open := hugio.NewOpenReadSeekCloser(hugio.NewReadSeekerNoOpCloserFromString("content")) + + rd := resources.ResourceSourceDescriptor{ + OpenReadSeekCloser: open, + TargetPath: "a/b.txt", + BasePathRelPermalink: "c/d", + BasePathTargetPath: "e/f", + GroupIdentity: identity.Anonymous, + } + + r, err := spec.NewResource(rd) + c.Assert(err, qt.IsNil) + c.Assert(r, qt.Not(qt.IsNil)) + c.Assert(r.RelPermalink(), qt.Equals, "/c/d/a/b.txt") + + info := resources.GetTestInfoForResource(r) + c.Assert(info.Paths.TargetLink(), qt.Equals, "/c/d/a/b.txt") + c.Assert(info.Paths.TargetPath(), qt.Equals, "/e/f/a/b.txt") +} diff --git a/resources/resource_test.go b/resources/resource_test.go deleted file mode 100644 index d6065c248..000000000 --- a/resources/resource_test.go +++ /dev/null @@ -1,48 +0,0 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package resources_test - -import ( - "testing" - - "github.com/gohugoio/hugo/resources" - - "github.com/gohugoio/hugo/media" - - qt "github.com/frankban/quicktest" -) - -func TestNewResourceFromFilename(t *testing.T) { - c := qt.New(t) - spec := newTestResourceSpec(specDescriptor{c: c}) - - writeSource(t, spec.Fs, "assets/a/b/logo.png", "image") - writeSource(t, spec.Fs, "assets/a/b/data.json", "json") - - r, err := spec.New(resources.ResourceSourceDescriptor{Fs: spec.BaseFs.Assets.Fs, SourceFilename: "a/b/logo.png"}) - - c.Assert(err, qt.IsNil) - c.Assert(r, qt.Not(qt.IsNil)) - c.Assert(r.ResourceType(), qt.Equals, "image") - c.Assert(r.RelPermalink(), qt.Equals, "/a/b/logo.png") - c.Assert(r.Permalink(), qt.Equals, "https://example.com/a/b/logo.png") - - r, err = spec.New(resources.ResourceSourceDescriptor{Fs: spec.BaseFs.Assets.Fs, SourceFilename: "a/b/data.json"}) - - c.Assert(err, qt.IsNil) - c.Assert(r, qt.Not(qt.IsNil)) - c.Assert(r.ResourceType(), qt.Equals, "application") -} - -var pngType, _ = media.FromStringAndExt("image/png", "png") diff --git a/resources/resource_transformers/babel/babel.go b/resources/resource_transformers/babel/babel.go index 2999d73cb..212331d8e 100644 --- a/resources/resource_transformers/babel/babel.go +++ b/resources/resource_transformers/babel/babel.go @@ -140,7 +140,7 @@ func (t *babelTransformation) Transform(ctx *resources.ResourceTransformationCtx configFile = t.rs.BaseFs.ResolveJSConfigFile(configFile) if configFile == "" && t.options.Config != "" { // Only fail if the user specified config file is not found. - return fmt.Errorf("babel config %q not found:", configFile) + return fmt.Errorf("babel config %q not found", configFile) } } @@ -177,7 +177,6 @@ func (t *babelTransformation) Transform(ctx *resources.ResourceTransformationCtx // ARGA [--no-install babel --config-file /private/var/folders/_g/j3j21hts4fn7__h04w2x8gb40000gn/T/hugo-test-babel812882892/babel.config.js --source-maps --filename=js/main2.js --out-file=/var/folders/_g/j3j21hts4fn7__h04w2x8gb40000gn/T/compileOut-2237820197.js] // [--no-install babel --config-file /private/var/folders/_g/j3j21hts4fn7__h04w2x8gb40000gn/T/hugo-test-babel332846848/babel.config.js --filename=js/main.js --out-file=/var/folders/_g/j3j21hts4fn7__h04w2x8gb40000gn/T/compileOut-1451390834.js 0x10304ee60 0x10304ed60 0x10304f060] cmd, err := ex.Npx(binaryName, cmdArgs...) - if err != nil { if hexec.IsNotFound(err) { // This may be on a CI server etc. Will fall back to pre-built assets. @@ -187,7 +186,6 @@ func (t *babelTransformation) Transform(ctx *resources.ResourceTransformationCtx } stdin, err := cmd.StdinPipe() - if err != nil { return err } diff --git a/resources/resource_transformers/htesting/testhelpers.go b/resources/resource_transformers/htesting/testhelpers.go index b1feccc5f..c9382b828 100644 --- a/resources/resource_transformers/htesting/testhelpers.go +++ b/resources/resource_transformers/htesting/testhelpers.go @@ -16,54 +16,25 @@ package htesting import ( "path/filepath" - "github.com/gohugoio/hugo/config" - "github.com/gohugoio/hugo/config/testconfig" - "github.com/gohugoio/hugo/helpers" - "github.com/gohugoio/hugo/hugofs" + "github.com/gohugoio/hugo/common/hugio" + "github.com/gohugoio/hugo/identity" "github.com/gohugoio/hugo/resources" "github.com/spf13/afero" ) -func NewTestResourceSpec() (*resources.Spec, error) { - cfg := config.New() - - imagingCfg := map[string]any{ - "resampleFilter": "linear", - "quality": 68, - "anchor": "left", - } - - cfg.Set("imaging", imagingCfg) - afs := afero.NewMemMapFs() - - conf := testconfig.GetTestConfig(afs, cfg) - fs := hugofs.NewFrom(hugofs.NewBaseFileDecorator(afs), conf.BaseConfig()) - s, err := helpers.NewPathSpec(fs, conf, nil) - if err != nil { - return nil, err - } - - spec, err := resources.NewSpec(s, nil, nil, nil, nil, nil, nil) - return spec, err -} - -func NewResourceTransformer(filename, content string) (resources.ResourceTransformer, error) { - spec, err := NewTestResourceSpec() - if err != nil { - return nil, err - } - return NewResourceTransformerForSpec(spec, filename, content) -} - func NewResourceTransformerForSpec(spec *resources.Spec, filename, content string) (resources.ResourceTransformer, error) { filename = filepath.FromSlash(filename) fs := spec.Fs.Source - if err := afero.WriteFile(fs, filename, []byte(content), 0777); err != nil { + if err := afero.WriteFile(fs, filename, []byte(content), 0o777); err != nil { return nil, err } - r, err := spec.New(resources.ResourceSourceDescriptor{Fs: fs, SourceFilename: filename}) + var open hugio.OpenReadSeekCloser = func() (hugio.ReadSeekCloser, error) { + return fs.Open(filename) + } + + r, err := spec.NewResource(resources.ResourceSourceDescriptor{TargetPath: filepath.FromSlash(filename), OpenReadSeekCloser: open, GroupIdentity: identity.Anonymous}) if err != nil { return nil, err } diff --git a/resources/resource_transformers/integrity/integrity.go b/resources/resource_transformers/integrity/integrity.go index 63f4f4c76..aef744443 100644 --- a/resources/resource_transformers/integrity/integrity.go +++ b/resources/resource_transformers/integrity/integrity.go @@ -23,6 +23,7 @@ import ( "hash" "io" + "github.com/gohugoio/hugo/common/constants" "github.com/gohugoio/hugo/resources/internal" "github.com/gohugoio/hugo/resources" @@ -47,7 +48,7 @@ type fingerprintTransformation struct { } func (t *fingerprintTransformation) Key() internal.ResourceTransformationKey { - return internal.NewResourceTransformationKey("fingerprint", t.algo) + return internal.NewResourceTransformationKey(constants.ResourceTransformationFingerprint, t.algo) } // Transform creates a MD5 hash of the Resource content and inserts that hash before diff --git a/resources/resource_transformers/integrity/integrity_test.go b/resources/resource_transformers/integrity/integrity_test.go index 27e193618..e0af68ae9 100644 --- a/resources/resource_transformers/integrity/integrity_test.go +++ b/resources/resource_transformers/integrity/integrity_test.go @@ -17,6 +17,7 @@ import ( "context" "testing" + "github.com/gohugoio/hugo/config/testconfig" "github.com/gohugoio/hugo/resources/resource" qt "github.com/frankban/quicktest" @@ -51,11 +52,12 @@ func TestHashFromAlgo(t *testing.T) { func TestTransform(t *testing.T) { c := qt.New(t) - spec, err := htesting.NewTestResourceSpec() - c.Assert(err, qt.IsNil) - client := New(spec) + d := testconfig.GetTestDeps(nil, nil) + t.Cleanup(func() { c.Assert(d.Close(), qt.IsNil) }) - r, err := htesting.NewResourceTransformerForSpec(spec, "hugo.txt", "Hugo Rocks!") + client := New(d.ResourceSpec) + + r, err := htesting.NewResourceTransformerForSpec(d.ResourceSpec, "hugo.txt", "Hugo Rocks!") c.Assert(err, qt.IsNil) transformed, err := client.Fingerprint(r, "") diff --git a/resources/resource_transformers/js/build.go b/resources/resource_transformers/js/build.go index aa802d81e..cc68d2253 100644 --- a/resources/resource_transformers/js/build.go +++ b/resources/resource_transformers/js/build.go @@ -14,6 +14,7 @@ package js import ( + "errors" "fmt" "io" "os" @@ -22,8 +23,6 @@ import ( "regexp" "strings" - "errors" - "github.com/spf13/afero" "github.com/gohugoio/hugo/hugofs" @@ -93,7 +92,7 @@ func (t *buildTransformation) Transform(ctx *resources.ResourceTransformationCtx return err } - buildOptions.Plugins, err = createBuildPlugins(t.c, opts) + buildOptions.Plugins, err = createBuildPlugins(ctx.DependencyManager, t.c, opts) if err != nil { return err } diff --git a/resources/resource_transformers/js/integration_test.go b/resources/resource_transformers/js/integration_test.go index 0e311107b..304c51d33 100644 --- a/resources/resource_transformers/js/integration_test.go +++ b/resources/resource_transformers/js/integration_test.go @@ -29,6 +29,7 @@ func TestBuildVariants(t *testing.T) { mainWithImport := ` -- config.toml -- disableKinds=["page", "section", "taxonomy", "term", "sitemap", "robotsTXT"] +disableLiveReload = true -- assets/js/main.js -- import { hello1, hello2 } from './util1'; hello1(); @@ -61,7 +62,7 @@ JS Content:{{ $js.Content }}:End: b := hugolib.NewIntegrationTestBuilder(hugolib.IntegrationTestConfig{T: c, Running: true, NeedsOsFS: true, TxtarString: mainWithImport}).Build() b.AssertFileContent("public/index.html", `abcd`) - b.EditFileReplace("assets/js/util1.js", func(s string) string { return strings.ReplaceAll(s, "abcd", "1234") }).Build() + b.EditFileReplaceFunc("assets/js/util1.js", func(s string) string { return strings.ReplaceAll(s, "abcd", "1234") }).Build() b.AssertFileContent("public/index.html", `1234`) }) @@ -69,7 +70,7 @@ JS Content:{{ $js.Content }}:End: b := hugolib.NewIntegrationTestBuilder(hugolib.IntegrationTestConfig{T: c, Running: true, NeedsOsFS: true, TxtarString: mainWithImport}).Build() b.AssertFileContent("public/index.html", `efgh`) - b.EditFileReplace("assets/js/util2.js", func(s string) string { return strings.ReplaceAll(s, "efgh", "1234") }).Build() + b.EditFileReplaceFunc("assets/js/util2.js", func(s string) string { return strings.ReplaceAll(s, "efgh", "1234") }).Build() b.AssertFileContent("public/index.html", `1234`) }) } @@ -257,7 +258,6 @@ JS Content:{{ $js.Content }}:End: b.Assert(err, qt.IsNotNil) b.Assert(err.Error(), qt.Contains, `util1.js:4:17": No matching export in`) }) - } // See issue 10527. @@ -301,7 +301,6 @@ IMPORT_SRC_DIR:imp3/foo.ts b.AssertFileContent("public/js/main.js", expected) }) } - } // See https://github.com/evanw/esbuild/issues/2745 @@ -342,7 +341,6 @@ License util2 Main license `) - } // Issue #11232 diff --git a/resources/resource_transformers/js/options.go b/resources/resource_transformers/js/options.go index e9ffbabe4..df32e7012 100644 --- a/resources/resource_transformers/js/options.go +++ b/resources/resource_transformers/js/options.go @@ -21,11 +21,12 @@ import ( "strings" "github.com/gohugoio/hugo/common/maps" + "github.com/gohugoio/hugo/common/paths" + "github.com/gohugoio/hugo/identity" "github.com/spf13/afero" "github.com/evanw/esbuild/pkg/api" - "github.com/gohugoio/hugo/helpers" "github.com/gohugoio/hugo/hugofs" "github.com/gohugoio/hugo/media" "github.com/mitchellh/mapstructure" @@ -113,7 +114,7 @@ func decodeOptions(m map[string]any) (Options, error) { } if opts.TargetPath != "" { - opts.TargetPath = helpers.ToSlashTrimLeading(opts.TargetPath) + opts.TargetPath = paths.ToSlashTrimLeading(opts.TargetPath) } opts.Target = strings.ToLower(opts.Target) @@ -203,7 +204,7 @@ func resolveComponentInAssets(fs afero.Fs, impPath string) *hugofs.FileMeta { return m } -func createBuildPlugins(c *Client, opts Options) ([]api.Plugin, error) { +func createBuildPlugins(depsManager identity.Manager, c *Client, opts Options) ([]api.Plugin, error) { fs := c.rs.Assets resolveImport := func(args api.OnResolveArgs) (api.OnResolveResult, error) { @@ -224,6 +225,7 @@ func createBuildPlugins(c *Client, opts Options) ([]api.Plugin, error) { // ESBuild resolve this. return api.OnResolveResult{}, nil } + relDir = filepath.Dir(rel) } else { relDir = opts.sourceDir @@ -238,6 +240,8 @@ func createBuildPlugins(c *Client, opts Options) ([]api.Plugin, error) { m := resolveComponentInAssets(fs.Fs, impPath) if m != nil { + depsManager.AddIdentity(m.PathInfo) + // Store the source root so we can create a jsconfig.json // to help IntelliSense when the build is done. // This should be a small number of elements, and when diff --git a/resources/resource_transformers/js/options_test.go b/resources/resource_transformers/js/options_test.go index a76a24caa..b8b031b81 100644 --- a/resources/resource_transformers/js/options_test.go +++ b/resources/resource_transformers/js/options_test.go @@ -14,10 +14,15 @@ package js import ( + "path" "path/filepath" "testing" + "github.com/gohugoio/hugo/config" + "github.com/gohugoio/hugo/config/testconfig" "github.com/gohugoio/hugo/hugofs" + "github.com/gohugoio/hugo/hugolib/filesystems" + "github.com/gohugoio/hugo/hugolib/paths" "github.com/gohugoio/hugo/media" "github.com/spf13/afero" @@ -164,20 +169,27 @@ func TestResolveComponentInAssets(t *testing.T) { mfs := afero.NewMemMapFs() for _, filename := range test.files { - c.Assert(afero.WriteFile(mfs, filepath.Join(baseDir, filename), []byte("let foo='bar';"), 0777), qt.IsNil) + c.Assert(afero.WriteFile(mfs, filepath.Join(baseDir, filename), []byte("let foo='bar';"), 0o777), qt.IsNil) } - bfs := hugofs.DecorateBasePathFs(afero.NewBasePathFs(mfs, baseDir).(*afero.BasePathFs)) + conf := testconfig.GetTestConfig(mfs, config.New()) + fs := hugofs.NewFrom(mfs, conf.BaseConfig()) - got := resolveComponentInAssets(bfs, test.impPath) + p, err := paths.New(fs, conf) + c.Assert(err, qt.IsNil) + bfs, err := filesystems.NewBase(p, nil) + c.Assert(err, qt.IsNil) + + got := resolveComponentInAssets(bfs.Assets.Fs, test.impPath) gotPath := "" + expect := test.expect if got != nil { - gotPath = filepath.ToSlash(got.Path) + gotPath = filepath.ToSlash(got.Filename) + expect = path.Join(baseDir, test.expect) } - c.Assert(gotPath, qt.Equals, test.expect) + c.Assert(gotPath, qt.Equals, expect) }) - } } diff --git a/resources/resource_transformers/minifier/minify_test.go b/resources/resource_transformers/minifier/minify_test.go index b2d8ed734..030abf426 100644 --- a/resources/resource_transformers/minifier/minify_test.go +++ b/resources/resource_transformers/minifier/minify_test.go @@ -17,6 +17,7 @@ import ( "context" "testing" + "github.com/gohugoio/hugo/config/testconfig" "github.com/gohugoio/hugo/resources/resource" qt "github.com/frankban/quicktest" @@ -26,11 +27,11 @@ import ( func TestTransform(t *testing.T) { c := qt.New(t) - spec, err := htesting.NewTestResourceSpec() - c.Assert(err, qt.IsNil) - client, _ := New(spec) + d := testconfig.GetTestDeps(nil, nil) + t.Cleanup(func() { c.Assert(d.Close(), qt.IsNil) }) - r, err := htesting.NewResourceTransformerForSpec(spec, "hugo.html", "

    Hugo Rocks!

    ") + client, _ := New(d.ResourceSpec) + r, err := htesting.NewResourceTransformerForSpec(d.ResourceSpec, "hugo.html", "

    Hugo Rocks!

    ") c.Assert(err, qt.IsNil) transformed, err := client.Minify(r) diff --git a/resources/resource_transformers/postcss/integration_test.go b/resources/resource_transformers/postcss/integration_test.go index 74aaa2661..957e69403 100644 --- a/resources/resource_transformers/postcss/integration_test.go +++ b/resources/resource_transformers/postcss/integration_test.go @@ -139,7 +139,6 @@ Styles Content: Len: 770917| b.AssertLogContains("Hugo PublishDir: " + filepath.Join(tempDir, "public")) } } - } // 9880 @@ -149,7 +148,7 @@ func TestTransformPostCSSError(t *testing.T) { } if runtime.GOOS == "windows" { - //TODO(bep) This has started to fail on Windows with Go 1.19 on GitHub Actions for some mysterious reason. + // TODO(bep) This has started to fail on Windows with Go 1.19 on GitHub Actions for some mysterious reason. t.Skip("Skip on Windows") } @@ -165,7 +164,6 @@ func TestTransformPostCSSError(t *testing.T) { s.AssertIsFileError(err) c.Assert(err.Error(), qt.Contains, "a.css:4:2") - } func TestTransformPostCSSNotInstalledError(t *testing.T) { @@ -184,7 +182,6 @@ func TestTransformPostCSSNotInstalledError(t *testing.T) { s.AssertIsFileError(err) c.Assert(err.Error(), qt.Contains, `binary with name "npx" not found`) - } // #9895 @@ -206,8 +203,7 @@ func TestTransformPostCSSImportError(t *testing.T) { s.AssertIsFileError(err) c.Assert(err.Error(), qt.Contains, "styles.css:4:3") - c.Assert(err.Error(), qt.Contains, filepath.FromSlash(`failed to resolve CSS @import "css/components/doesnotexist.css"`)) - + c.Assert(err.Error(), qt.Contains, filepath.FromSlash(`failed to resolve CSS @import "/css/components/doesnotexist.css"`)) } func TestTransformPostCSSImporSkipInlineImportsNotFound(t *testing.T) { @@ -230,7 +226,6 @@ func TestTransformPostCSSImporSkipInlineImportsNotFound(t *testing.T) { }).Build() s.AssertFileContent("public/css/styles.css", `@import "components/doesnotexist.css";`) - } // Issue 9787 @@ -267,5 +262,4 @@ Styles Content: Len: 770917 `) } - } diff --git a/resources/resource_transformers/postcss/postcss.go b/resources/resource_transformers/postcss/postcss.go index a65fa3783..9015e120d 100644 --- a/resources/resource_transformers/postcss/postcss.go +++ b/resources/resource_transformers/postcss/postcss.go @@ -1,4 +1,4 @@ -// Copyright 2018 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -17,6 +17,7 @@ import ( "bytes" "crypto/sha256" "encoding/hex" + "errors" "fmt" "io" "path" @@ -30,6 +31,7 @@ import ( "github.com/gohugoio/hugo/common/loggers" "github.com/gohugoio/hugo/common/text" "github.com/gohugoio/hugo/hugofs" + "github.com/gohugoio/hugo/identity" "github.com/gohugoio/hugo/common/hugo" @@ -37,8 +39,6 @@ import ( "github.com/spf13/afero" "github.com/spf13/cast" - "errors" - "github.com/mitchellh/mapstructure" "github.com/gohugoio/hugo/common/herrors" @@ -86,7 +86,6 @@ func (c *Client) Process(res resources.ResourceTransformer, options map[string]a // Some of the options from https://github.com/postcss/postcss-cli type Options struct { - // Set a custom path to look for a config file. Config string @@ -151,7 +150,7 @@ func (t *postcssTransformation) Transform(ctx *resources.ResourceTransformationC const binaryName = "postcss" infol := t.rs.Logger.InfoCommand(binaryName) - infoW := loggers.LevelLoggerToWriter(infol) + infow := loggers.LevelLoggerToWriter(infol) ex := t.rs.ExecHelper @@ -179,7 +178,7 @@ func (t *postcssTransformation) Transform(ctx *resources.ResourceTransformationC configFile = t.rs.BaseFs.ResolveJSConfigFile(configFile) if configFile == "" && options.Config != "" { // Only fail if the user specified config file is not found. - return fmt.Errorf("postcss config %q not found:", options.Config) + return fmt.Errorf("postcss config %q not found", options.Config) } } @@ -196,7 +195,7 @@ func (t *postcssTransformation) Transform(ctx *resources.ResourceTransformationC var errBuf bytes.Buffer - stderr := io.MultiWriter(infoW, &errBuf) + stderr := io.MultiWriter(infow, &errBuf) cmdArgs = append(cmdArgs, hexec.WithStderr(stderr)) cmdArgs = append(cmdArgs, hexec.WithStdout(ctx.To)) cmdArgs = append(cmdArgs, hexec.WithEnviron(hugo.GetExecEnviron(t.rs.Cfg.BaseConfig().WorkingDir, t.rs.Cfg, t.rs.BaseFs.Assets.Fs))) @@ -221,7 +220,7 @@ func (t *postcssTransformation) Transform(ctx *resources.ResourceTransformationC ctx.From, ctx.InPath, options, - t.rs.Assets.Fs, t.rs.Logger, + t.rs.Assets.Fs, t.rs.Logger, ctx.DependencyManager, ) if options.InlineImports { @@ -260,17 +259,19 @@ type importResolver struct { inPath string opts Options - contentSeen map[string]bool - linemap map[int]fileOffset - fs afero.Fs - logger loggers.Logger + contentSeen map[string]bool + dependencyManager identity.Manager + linemap map[int]fileOffset + fs afero.Fs + logger loggers.Logger } -func newImportResolver(r io.Reader, inPath string, opts Options, fs afero.Fs, logger loggers.Logger) *importResolver { +func newImportResolver(r io.Reader, inPath string, opts Options, fs afero.Fs, logger loggers.Logger, dependencyManager identity.Manager) *importResolver { return &importResolver{ - r: r, - inPath: inPath, - fs: fs, logger: logger, + r: r, + dependencyManager: dependencyManager, + inPath: inPath, + fs: fs, logger: logger, linemap: make(map[int]fileOffset), contentSeen: make(map[string]bool), opts: opts, } @@ -289,7 +290,8 @@ func (imp *importResolver) contentHash(filename string) ([]byte, string) { func (imp *importResolver) importRecursive( lineNum int, content string, - inPath string) (int, string, error) { + inPath string, +) (int, string, error) { basePath := path.Dir(inPath) var replacements []string @@ -312,6 +314,7 @@ func (imp *importResolver) importRecursive( } else { path := strings.Trim(strings.TrimPrefix(line, importIdentifier), " \"';") filename := filepath.Join(basePath, path) + imp.dependencyManager.AddIdentity(identity.CleanStringIdentity(filename)) importContent, hash := imp.contentHash(filename) if importContent == nil { @@ -364,8 +367,6 @@ func (imp *importResolver) importRecursive( } func (imp *importResolver) resolve() (io.Reader, error) { - const importIdentifier = "@import" - content, err := io.ReadAll(imp.r) if err != nil { return nil, err @@ -438,6 +439,5 @@ func (imp *importResolver) toFileError(output string) error { pos.LineNumber = file.Offset + 1 return ferr.UpdatePosition(pos).UpdateContent(f, nil) - //return herrors.NewFileErrorFromFile(inErr, file.Filename, realFilename, hugofs.Os, herrors.SimpleLineMatcher) - + // return herrors.NewFileErrorFromFile(inErr, file.Filename, realFilename, hugofs.Os, herrors.SimpleLineMatcher) } diff --git a/resources/resource_transformers/postcss/postcss_test.go b/resources/resource_transformers/postcss/postcss_test.go index dd0695cd1..1edaaaaf5 100644 --- a/resources/resource_transformers/postcss/postcss_test.go +++ b/resources/resource_transformers/postcss/postcss_test.go @@ -20,6 +20,7 @@ import ( "github.com/gohugoio/hugo/common/loggers" "github.com/gohugoio/hugo/htesting/hqt" + "github.com/gohugoio/hugo/identity" "github.com/gohugoio/hugo/helpers" @@ -71,7 +72,7 @@ func TestImportResolver(t *testing.T) { fs := afero.NewMemMapFs() writeFile := func(name, content string) { - c.Assert(afero.WriteFile(fs, name, []byte(content), 0777), qt.IsNil) + c.Assert(afero.WriteFile(fs, name, []byte(content), 0o777), qt.IsNil) } writeFile("a.css", `@import "b.css"; @@ -96,6 +97,7 @@ LOCAL_STYLE "styles.css", Options{}, fs, loggers.NewDefault(), + identity.NopManager, ) r, err := imp.resolve() @@ -123,7 +125,7 @@ func BenchmarkImportResolver(b *testing.B) { fs := afero.NewMemMapFs() writeFile := func(name, content string) { - c.Assert(afero.WriteFile(fs, name, []byte(content), 0777), qt.IsNil) + c.Assert(afero.WriteFile(fs, name, []byte(content), 0o777), qt.IsNil) } writeFile("a.css", `@import "b.css"; @@ -153,6 +155,7 @@ LOCAL_STYLE "styles.css", Options{}, fs, logger, + identity.NopManager, ) b.StartTimer() diff --git a/resources/resource_transformers/templates/execute_as_template.go b/resources/resource_transformers/templates/execute_as_template.go index efe3e4c57..79d249bd6 100644 --- a/resources/resource_transformers/templates/execute_as_template.go +++ b/resources/resource_transformers/templates/execute_as_template.go @@ -18,6 +18,7 @@ import ( "context" "fmt" + "github.com/gohugoio/hugo/common/paths" "github.com/gohugoio/hugo/helpers" "github.com/gohugoio/hugo/resources" "github.com/gohugoio/hugo/resources/internal" @@ -68,7 +69,7 @@ func (t *executeAsTemplateTransform) Transform(ctx *resources.ResourceTransforma func (c *Client) ExecuteAsTemplate(ctx context.Context, res resources.ResourceTransformer, targetPath string, data any) (resource.Resource, error) { return res.TransformWithContext(ctx, &executeAsTemplateTransform{ rs: c.rs, - targetPath: helpers.ToSlashTrimLeading(targetPath), + targetPath: paths.ToSlashTrimLeading(targetPath), t: c.t, data: data, }) diff --git a/resources/resource_transformers/tocss/dartsass/client.go b/resources/resource_transformers/tocss/dartsass/client.go index 929900ca8..4b8ca97eb 100644 --- a/resources/resource_transformers/tocss/dartsass/client.go +++ b/resources/resource_transformers/tocss/dartsass/client.go @@ -25,6 +25,7 @@ import ( "github.com/bep/logg" "github.com/gohugoio/hugo/common/herrors" "github.com/gohugoio/hugo/common/hugo" + "github.com/gohugoio/hugo/common/paths" "github.com/gohugoio/hugo/helpers" "github.com/gohugoio/hugo/hugofs" "github.com/gohugoio/hugo/hugolib/filesystems" @@ -78,7 +79,6 @@ func New(fs *filesystems.SourceFilesystem, rs *resources.Spec) (*Client, error) } }, }) - } else { transpilerv1, err = godartsassv1.Start(godartsassv1.Options{ DartSassEmbeddedFilename: hugo.DartSassBinaryName, @@ -153,11 +153,11 @@ func (c *Client) toCSS(args godartsass.Args, src io.Reader) (godartsass.Result, } } else { res, err = c.transpiler.Execute(args) - } if err != nil { if err.Error() == "unexpected EOF" { + //lint:ignore ST1005 end user message. return res, fmt.Errorf("got unexpected EOF when executing %q. The user running hugo must have read and execute permissions on this program. With execute permissions only, this error is thrown.", hugo.DartSassBinaryName) } return res, herrors.NewFileErrorFromFileInErr(err, hugofs.Os, herrors.OffsetMatcher) @@ -167,7 +167,6 @@ func (c *Client) toCSS(args godartsass.Args, src io.Reader) (godartsass.Result, } type Options struct { - // Hugo, will by default, just replace the extension of the source // to .css, e.g. "scss/main.scss" becomes "scss/main.css". You can // control this by setting this, e.g. "styles/main.css" will create @@ -204,7 +203,7 @@ func decodeOptions(m map[string]any) (opts Options, err error) { err = mapstructure.WeakDecode(m, &opts) if opts.TargetPath != "" { - opts.TargetPath = helpers.ToSlashTrimLeading(opts.TargetPath) + opts.TargetPath = paths.ToSlashTrimLeading(opts.TargetPath) } return diff --git a/resources/resource_transformers/tocss/dartsass/transform.go b/resources/resource_transformers/tocss/dartsass/transform.go index 32855e1c5..73eca6a53 100644 --- a/resources/resource_transformers/tocss/dartsass/transform.go +++ b/resources/resource_transformers/tocss/dartsass/transform.go @@ -1,4 +1,4 @@ -// Copyright 2022 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -23,6 +23,7 @@ import ( "github.com/gohugoio/hugo/common/hugo" "github.com/gohugoio/hugo/common/paths" "github.com/gohugoio/hugo/htesting" + "github.com/gohugoio/hugo/identity" "github.com/gohugoio/hugo/media" "github.com/gohugoio/hugo/resources" @@ -80,8 +81,9 @@ func (t *transform) Transform(ctx *resources.ResourceTransformationCtx) error { URL: filename, IncludePaths: t.c.sfs.RealDirs(baseDir), ImportResolver: importResolver{ - baseDir: baseDir, - c: t.c, + baseDir: baseDir, + c: t.c, + dependencyManager: ctx.DependencyManager, varsStylesheet: godartsass.Import{Content: sass.CreateVarsStyleSheet(opts.Vars)}, }, @@ -126,10 +128,10 @@ func (t *transform) Transform(ctx *resources.ResourceTransformationCtx) error { } type importResolver struct { - baseDir string - c *Client - - varsStylesheet godartsass.Import + baseDir string + c *Client + dependencyManager identity.Manager + varsStylesheet godartsass.Import } func (t importResolver) CanonicalizeURL(url string) (string, error) { @@ -172,6 +174,7 @@ func (t importResolver) CanonicalizeURL(url string) (string, error) { fi, err := t.c.sfs.Fs.Stat(filenameToCheck) if err == nil { if fim, ok := fi.(hugofs.FileMetaInfo); ok { + t.dependencyManager.AddIdentity(identity.CleanStringIdentity(filenameToCheck)) return "file://" + filepath.ToSlash(fim.Meta().Filename), nil } } @@ -196,7 +199,6 @@ func (t importResolver) Load(url string) (godartsass.Import, error) { } return godartsass.Import{Content: string(b), SourceSyntax: sourceSyntax}, err - } type importResolverV1 struct { diff --git a/resources/resource_transformers/tocss/internal/sass/helpers.go b/resources/resource_transformers/tocss/internal/sass/helpers.go index acd6d86d5..c1cef141e 100644 --- a/resources/resource_transformers/tocss/internal/sass/helpers.go +++ b/resources/resource_transformers/tocss/internal/sass/helpers.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -56,7 +56,6 @@ func CreateVarsStyleSheet(vars map[string]any) string { sort.Strings(varsSlice) varsStylesheet = strings.Join(varsSlice, "\n") return varsStylesheet - } var ( diff --git a/resources/resource_transformers/tocss/internal/sass/helpers_test.go b/resources/resource_transformers/tocss/internal/sass/helpers_test.go index 56e73736e..ef31fdd8f 100644 --- a/resources/resource_transformers/tocss/internal/sass/helpers_test.go +++ b/resources/resource_transformers/tocss/internal/sass/helpers_test.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -40,5 +40,4 @@ func TestIsUnquotedCSSValue(t *testing.T) { } { c.Assert(isTypedCSSValue(test.in), qt.Equals, test.out) } - } diff --git a/resources/resource_transformers/tocss/scss/client.go b/resources/resource_transformers/tocss/scss/client.go index 2028163ff..aead6279b 100644 --- a/resources/resource_transformers/tocss/scss/client.go +++ b/resources/resource_transformers/tocss/scss/client.go @@ -16,7 +16,7 @@ package scss import ( "regexp" - "github.com/gohugoio/hugo/helpers" + "github.com/gohugoio/hugo/common/paths" "github.com/gohugoio/hugo/hugolib/filesystems" "github.com/gohugoio/hugo/resources" "github.com/spf13/afero" @@ -37,7 +37,6 @@ func New(fs *filesystems.SourceFilesystem, rs *resources.Spec) (*Client, error) } type Options struct { - // Hugo, will by default, just replace the extension of the source // to .css, e.g. "scss/main.scss" becomes "scss/main.css". You can // control this by setting this, e.g. "styles/main.css" will create @@ -73,7 +72,7 @@ func DecodeOptions(m map[string]any) (opts Options, err error) { err = mapstructure.WeakDecode(m, &opts) if opts.TargetPath != "" { - opts.TargetPath = helpers.ToSlashTrimLeading(opts.TargetPath) + opts.TargetPath = paths.ToSlashTrimLeading(opts.TargetPath) } return diff --git a/resources/resource_transformers/tocss/scss/tocss.go b/resources/resource_transformers/tocss/scss/tocss.go index 1018ea02e..a4c4e6d8e 100644 --- a/resources/resource_transformers/tocss/scss/tocss.go +++ b/resources/resource_transformers/tocss/scss/tocss.go @@ -20,7 +20,6 @@ import ( "fmt" "io" "path" - "path/filepath" "strings" @@ -29,6 +28,7 @@ import ( "github.com/gohugoio/hugo/common/herrors" "github.com/gohugoio/hugo/helpers" "github.com/gohugoio/hugo/hugofs" + "github.com/gohugoio/hugo/identity" "github.com/gohugoio/hugo/media" "github.com/gohugoio/hugo/resources" "github.com/gohugoio/hugo/resources/resource_transformers/tocss/internal/sass" @@ -115,6 +115,7 @@ func (t *toCSSTransformation) Transform(ctx *resources.ResourceTransformationCtx fi, err := t.c.sfs.Fs.Stat(filenameToCheck) if err == nil { if fim, ok := fi.(hugofs.FileMetaInfo); ok { + ctx.DependencyManager.AddIdentity(identity.CleanStringIdentity(filenameToCheck)) return fim.Meta().Filename, "", true } } diff --git a/resources/testhelpers_test.go b/resources/testhelpers_test.go index 1de2f54f6..028524619 100644 --- a/resources/testhelpers_test.go +++ b/resources/testhelpers_test.go @@ -2,23 +2,21 @@ package resources_test import ( "image" - "io" "os" "path/filepath" "runtime" "strings" - "testing" + "github.com/gohugoio/hugo/common/hugio" "github.com/gohugoio/hugo/config" "github.com/gohugoio/hugo/config/testconfig" "github.com/gohugoio/hugo/deps" + "github.com/gohugoio/hugo/identity" "github.com/gohugoio/hugo/resources" qt "github.com/frankban/quicktest" - "github.com/gohugoio/hugo/helpers" "github.com/gohugoio/hugo/hugofs" "github.com/gohugoio/hugo/resources/images" - "github.com/gohugoio/hugo/resources/page" "github.com/gohugoio/hugo/resources/resource" "github.com/spf13/afero" ) @@ -44,7 +42,7 @@ func newTestResourceSpec(desc specDescriptor) *resources.Spec { panic("osFs not supported for this test") } - if err := afs.MkdirAll("assets", 0755); err != nil { + if err := afs.MkdirAll("assets", 0o755); err != nil { panic(err) } @@ -64,16 +62,13 @@ func newTestResourceSpec(desc specDescriptor) *resources.Spec { func(d *deps.Deps) { d.Fs.PublishDir = hugofs.NewCreateCountingFs(d.Fs.PublishDir) }, ) - return d.ResourceSpec -} - -func newTargetPaths(link string) func() page.TargetPaths { - return func() page.TargetPaths { - return page.TargetPaths{ - SubResourceBaseTarget: filepath.FromSlash(link), - SubResourceBaseLink: link, + desc.c.Cleanup(func() { + if err := d.Close(); err != nil { + panic(err) } - } + }) + + return d.ResourceSpec } func newTestResourceOsFs(c *qt.C) (*resources.Spec, string) { @@ -92,7 +87,7 @@ func newTestResourceOsFs(c *qt.C) (*resources.Spec, string) { cfg.Set("workingDir", workDir) - os.MkdirAll(filepath.Join(workDir, "assets"), 0755) + os.MkdirAll(filepath.Join(workDir, "assets"), 0o755) d := testconfig.GetTestDeps(hugofs.Os, cfg) @@ -116,22 +111,16 @@ func fetchImageForSpec(spec *resources.Spec, c *qt.C, name string) images.ImageR } func fetchResourceForSpec(spec *resources.Spec, c *qt.C, name string, targetPathAddends ...string) resource.ContentResource { - src, err := os.Open(filepath.FromSlash("testdata/" + name)) + b, err := os.ReadFile(filepath.FromSlash("testdata/" + name)) c.Assert(err, qt.IsNil) - if len(targetPathAddends) > 0 { - addends := strings.Join(targetPathAddends, "_") - name = addends + "_" + name - } - out, err := helpers.OpenFileForWriting(spec.Fs.WorkingDirWritable, filepath.Join(filepath.Join("assets", name))) - c.Assert(err, qt.IsNil) - _, err = io.Copy(out, src) - out.Close() - src.Close() - c.Assert(err, qt.IsNil) - - factory := newTargetPaths("/a") - - r, err := spec.New(resources.ResourceSourceDescriptor{Fs: spec.BaseFs.Assets.Fs, TargetPaths: factory, LazyPublish: true, RelTargetFilename: name, SourceFilename: name}) + open := hugio.NewOpenReadSeekCloser(hugio.NewReadSeekerNoOpCloserFromBytes(b)) + targetPath := name + base := "/a/" + r, err := spec.NewResource(resources.ResourceSourceDescriptor{ + LazyPublish: true, + Name: name, TargetPath: targetPath, BasePathRelPermalink: base, BasePathTargetPath: base, OpenReadSeekCloser: open, + GroupIdentity: identity.Anonymous, + }) c.Assert(err, qt.IsNil) c.Assert(r, qt.Not(qt.IsNil)) @@ -150,17 +139,3 @@ func assertImageFile(c *qt.C, fs afero.Fs, filename string, width, height int) { c.Assert(config.Width, qt.Equals, width) c.Assert(config.Height, qt.Equals, height) } - -func assertFileCache(c *qt.C, fs afero.Fs, filename string, width, height int) { - assertImageFile(c, fs, filepath.Clean(filename), width, height) -} - -func writeSource(t testing.TB, fs *hugofs.Fs, filename, content string) { - writeToFs(t, fs.Source, filename, content) -} - -func writeToFs(t testing.TB, fs afero.Fs, filename, content string) { - if err := afero.WriteFile(fs, filepath.FromSlash(filename), []byte(content), 0755); err != nil { - t.Fatalf("Failed to write file: %s", err) - } -} diff --git a/resources/transform.go b/resources/transform.go index 0c38345ad..408decbb8 100644 --- a/resources/transform.go +++ b/resources/transform.go @@ -23,7 +23,9 @@ import ( "strings" "sync" + "github.com/gohugoio/hugo/common/constants" "github.com/gohugoio/hugo/common/paths" + "github.com/gohugoio/hugo/identity" "github.com/gohugoio/hugo/resources/images" "github.com/gohugoio/hugo/resources/images/exif" @@ -42,13 +44,18 @@ import ( ) var ( - _ resource.ContentResource = (*resourceAdapter)(nil) - _ resourceCopier = (*resourceAdapter)(nil) - _ resource.ReadSeekCloserResource = (*resourceAdapter)(nil) - _ resource.Resource = (*resourceAdapter)(nil) - _ resource.Source = (*resourceAdapter)(nil) - _ resource.Identifier = (*resourceAdapter)(nil) - _ resource.ResourceMetaProvider = (*resourceAdapter)(nil) + _ resource.ContentResource = (*resourceAdapter)(nil) + _ resourceCopier = (*resourceAdapter)(nil) + _ resource.ReadSeekCloserResource = (*resourceAdapter)(nil) + _ resource.Resource = (*resourceAdapter)(nil) + _ resource.Staler = (*resourceAdapterInner)(nil) + _ resource.Source = (*resourceAdapter)(nil) + _ resource.Identifier = (*resourceAdapter)(nil) + _ resource.ResourceNameTitleProvider = (*resourceAdapter)(nil) + _ resource.WithResourceMetaProvider = (*resourceAdapter)(nil) + _ identity.DependencyManagerProvider = (*resourceAdapter)(nil) + _ identity.IdentityGroupProvider = (*resourceAdapter)(nil) + _ resource.NameOriginalProvider = (*resourceAdapter)(nil) ) // These are transformations that need special support in Hugo that may not @@ -68,11 +75,13 @@ func newResourceAdapter(spec *Spec, lazyPublish bool, target transformableResour } return &resourceAdapter{ resourceTransformations: &resourceTransformations{}, + metaProvider: target, resourceAdapterInner: &resourceAdapterInner{ - ctx: context.TODO(), + ctx: context.Background(), spec: spec, publishOnce: po, target: target, + Staler: &AtomicStaler{}, }, } } @@ -88,6 +97,9 @@ type ResourceTransformationCtx struct { // The context that started the transformation. Ctx context.Context + // The dependency manager to use for dependency tracking. + DependencyManager identity.Manager + // The content to transform. From io.Reader @@ -162,8 +174,11 @@ type resourceAdapter struct { commonResource *resourceTransformations *resourceAdapterInner + metaProvider resource.ResourceMetaProvider } +var _ identity.ForEeachIdentityByNameProvider = (*resourceAdapter)(nil) + func (r *resourceAdapter) Content(ctx context.Context) (any, error) { r.init(false, true) if r.transformationsErr != nil { @@ -176,16 +191,41 @@ func (r *resourceAdapter) Err() resource.ResourceError { return nil } +func (r *resourceAdapter) GetIdentity() identity.Identity { + return identity.FirstIdentity(r.target) +} + func (r *resourceAdapter) Data() any { r.init(false, false) return r.target.Data() } +func (r *resourceAdapter) ForEeachIdentityByName(name string, f func(identity.Identity) bool) { + if constants.IsFieldRelOrPermalink(name) && !r.resourceTransformations.hasTransformationPermalinkHash() { + // Special case for links without any content hash in the URL. + // We don't need to rebuild all pages that use this resource, + // but we want to make sure that the resource is accessed at least once. + f(identity.NewFindFirstManagerIdentityProvider(r.target.GetDependencyManager(), r.target.GetIdentityGroup())) + return + } + f(r.target.GetIdentityGroup()) + f(r.target.GetDependencyManager()) +} + +func (r *resourceAdapter) GetIdentityGroup() identity.Identity { + return r.target.GetIdentityGroup() +} + +func (r *resourceAdapter) GetDependencyManager() identity.Manager { + return r.target.GetDependencyManager() +} + func (r resourceAdapter) cloneTo(targetPath string) resource.Resource { newtTarget := r.target.cloneTo(targetPath) newInner := &resourceAdapterInner{ ctx: r.ctx, spec: r.spec, + Staler: r.Staler, target: newtTarget.(transformableResource), } if r.resourceAdapterInner.publishOnce != nil { @@ -239,12 +279,17 @@ func (r *resourceAdapter) MediaType() media.Type { func (r *resourceAdapter) Name() string { r.init(false, false) - return r.target.Name() + return r.metaProvider.Name() +} + +func (r *resourceAdapter) NameOriginal() string { + r.init(false, false) + return r.target.(resource.NameOriginalProvider).NameOriginal() } func (r *resourceAdapter) Params() maps.Params { r.init(false, false) - return r.target.Params() + return r.metaProvider.Params() } func (r *resourceAdapter) Permalink() string { @@ -283,7 +328,7 @@ func (r *resourceAdapter) String() string { func (r *resourceAdapter) Title() string { r.init(false, false) - return r.target.Title() + return r.metaProvider.Title() } func (r resourceAdapter) Transform(t ...ResourceTransformation) (ResourceTransformer, error) { @@ -298,6 +343,7 @@ func (r resourceAdapter) TransformWithContext(ctx context.Context, t ...Resource r.resourceAdapterInner = &resourceAdapterInner{ ctx: ctx, spec: r.spec, + Staler: r.Staler, publishOnce: &publishOnce{}, target: r.target, } @@ -313,6 +359,11 @@ func (r *resourceAdapter) DecodeImage() (image.Image, error) { return r.getImageOps().DecodeImage() } +func (r resourceAdapter) WithResourceMeta(mp resource.ResourceMetaProvider) resource.Resource { + r.metaProvider = mp + return &r +} + func (r *resourceAdapter) getImageOps() images.ImageResourceOps { img, ok := r.target.(images.ImageResourceOps) if !ok { @@ -326,14 +377,6 @@ func (r *resourceAdapter) getImageOps() images.ImageResourceOps { return img } -func (r *resourceAdapter) getMetaAssigner() metaAssigner { - return r.target -} - -func (r *resourceAdapter) getSpec() *Spec { - return r.spec -} - func (r *resourceAdapter) publish() { if r.publishOnce == nil { return @@ -349,41 +392,28 @@ func (r *resourceAdapter) publish() { } func (r *resourceAdapter) TransformationKey() string { - // Files with a suffix will be stored in cache (both on disk and in memory) - // partitioned by their suffix. var key string for _, tr := range r.transformations { key = key + "_" + tr.Key().Value() } - - base := ResourceCacheKey(r.target.Key()) - return r.spec.ResourceCache.cleanKey(base) + "_" + helpers.MD5String(key) + return r.spec.ResourceCache.cleanKey(r.target.Key()) + "_" + helpers.MD5String(key) } -func (r *resourceAdapter) transform(publish, setContent bool) error { - cache := r.spec.ResourceCache - +func (r *resourceAdapter) getOrTransform(publish, setContent bool) error { key := r.TransformationKey() - - cached, found := cache.get(key) - - if found { - r.resourceAdapterInner = cached.(*resourceAdapterInner) - return nil + res, err := r.spec.ResourceCache.cacheResourceTransformation.GetOrCreate(key, func(string) (*resourceAdapterInner, error) { + return r.transform(key, publish, setContent) + }) + if err != nil { + return err } - // Acquire a write lock for the named transformation. - cache.nlocker.Lock(key) - // Check the cache again. - cached, found = cache.get(key) - if found { - r.resourceAdapterInner = cached.(*resourceAdapterInner) - cache.nlocker.Unlock(key) - return nil - } + r.resourceAdapterInner = res + return nil +} - defer cache.nlocker.Unlock(key) - defer cache.set(key, r.resourceAdapterInner) +func (r *resourceAdapter) transform(key string, publish, setContent bool) (*resourceAdapterInner, error) { + cache := r.spec.ResourceCache b1 := bp.GetBuffer() b2 := bp.GetBuffer() @@ -394,6 +424,7 @@ func (r *resourceAdapter) transform(publish, setContent bool) error { Ctx: r.ctx, Data: make(map[string]any), OpenResourcePublisher: r.target.openPublishFileForWriting, + DependencyManager: r.target.GetDependencyManager(), } tctx.InMediaType = r.target.MediaType() @@ -406,7 +437,7 @@ func (r *resourceAdapter) transform(publish, setContent bool) error { contentrc, err := contentReadSeekerCloser(r.target) if err != nil { - return err + return nil, err } defer contentrc.Close() @@ -479,14 +510,14 @@ func (r *resourceAdapter) transform(publish, setContent bool) error { } else { err = tr.Transform(tctx) if err != nil && err != herrors.ErrFeatureNotAvailable { - return newErr(err) + return nil, newErr(err) } if mayBeCachedOnDisk { tryFileCache = bcfg.UseResourceCache(err) } if err != nil && !tryFileCache { - return newErr(err) + return nil, newErr(err) } } @@ -494,9 +525,9 @@ func (r *resourceAdapter) transform(publish, setContent bool) error { f := r.target.tryTransformedFileCache(key, updates) if f == nil { if err != nil { - return newErr(err) + return nil, newErr(err) } - return newErr(fmt.Errorf("resource %q not found in file cache", key)) + return nil, newErr(fmt.Errorf("resource %q not found in file cache", key)) } transformedContentr = f updates.sourceFs = cache.fileCache.Fs @@ -521,7 +552,7 @@ func (r *resourceAdapter) transform(publish, setContent bool) error { if publish { publicw, err := r.target.openPublishFileForWriting(updates.targetPath) if err != nil { - return err + return nil, err } publishwriters = append(publishwriters, publicw) } @@ -531,7 +562,7 @@ func (r *resourceAdapter) transform(publish, setContent bool) error { // Also write it to the cache fi, metaw, err := cache.writeMeta(key, updates.toTransformedResourceMetadata()) if err != nil { - return err + return nil, err } updates.sourceFilename = &fi.Name updates.sourceFs = cache.fileCache.Fs @@ -562,7 +593,7 @@ func (r *resourceAdapter) transform(publish, setContent bool) error { publishw := hugio.NewMultiWriteCloser(publishwriters...) _, err = io.Copy(publishw, transformedContentr) if err != nil { - return err + return nil, err } publishw.Close() @@ -573,11 +604,11 @@ func (r *resourceAdapter) transform(publish, setContent bool) error { newTarget, err := r.target.cloneWithUpdates(updates) if err != nil { - return err + return nil, err } r.target = newTarget - return nil + return r.resourceAdapterInner, nil } func (r *resourceAdapter) init(publish, setContent bool) { @@ -597,7 +628,7 @@ func (r *resourceAdapter) initTransform(publish, setContent bool) { r.publishOnce = nil } - r.transformationsErr = r.transform(publish, setContent) + r.transformationsErr = r.getOrTransform(publish, setContent) if r.transformationsErr != nil { if r.spec.ErrorSender != nil { r.spec.ErrorSender.SendError(r.transformationsErr) @@ -618,24 +649,42 @@ type resourceAdapterInner struct { target transformableResource + resource.Staler + spec *Spec // Handles publishing (to /public) if needed. *publishOnce } +func (r *resourceAdapterInner) IsStale() bool { + return r.Staler.IsStale() || r.target.IsStale() +} + type resourceTransformations struct { transformationsInit sync.Once transformationsErr error transformations []ResourceTransformation } +// hasTransformationPermalinkHash reports whether any of the transformations +// in the chain creates a permalink that's based on the content, e.g. fingerprint. +func (r *resourceTransformations) hasTransformationPermalinkHash() bool { + for _, t := range r.transformations { + if constants.IsResourceTransformationPermalinkHash(t.Key().Name) { + return true + } + } + return false +} + type transformableResource interface { baseResourceInternal resource.ContentProvider resource.Resource resource.Identifier + resource.Staler resourceCopier } diff --git a/resources/transform_integration_test.go b/resources/transform_integration_test.go new file mode 100644 index 000000000..4404f1642 --- /dev/null +++ b/resources/transform_integration_test.go @@ -0,0 +1,50 @@ +// Copyright 2024 The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package resources_test + +import ( + "testing" + + "github.com/gohugoio/hugo/hugolib" +) + +func TestTransformCached(t *testing.T) { + files := ` +-- hugo.toml -- +disableKinds = ["taxonomy", "term"] +-- assets/css/main.css -- +body { + background: #fff; +} +-- content/p1.md -- +--- +title: "P1" +--- +P1. +-- content/p2.md -- +--- +title: "P2" +--- +P2. +-- layouts/_default/list.html -- +List. +-- layouts/_default/single.html -- +{{ $css := resources.Get "css/main.css" | resources.Minify }} +CSS: {{ $css.Content }} +` + + b := hugolib.Test(t, files) + + b.AssertFileContent("public/p1/index.html", "CSS: body{background:#fff}") +} diff --git a/resources/transform_test.go b/resources/transform_test.go index d430bfb6c..fd152a47c 100644 --- a/resources/transform_test.go +++ b/resources/transform_test.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -25,10 +25,12 @@ import ( "testing" "github.com/gohugoio/hugo/htesting" + "github.com/gohugoio/hugo/identity" "github.com/gohugoio/hugo/media" "github.com/gohugoio/hugo/resources" "github.com/gohugoio/hugo/common/herrors" + "github.com/gohugoio/hugo/common/hugio" "github.com/gohugoio/hugo/hugofs" "github.com/gohugoio/hugo/resources/images" @@ -47,12 +49,13 @@ const gopher = `iVBORw0KGgoAAAANSUhEUgAAAEsAAAA8CAAAAAALAhhPAAAFfUlEQVRYw62XeWwU func gopherPNG() io.Reader { return base64.NewDecoder(base64.StdEncoding, strings.NewReader(gopher)) } func TestTransform(t *testing.T) { - createTransformer := func(c *qt.C, spec *resources.Spec, filename, content string) resources.Transformer { - filename = filepath.FromSlash(filename) - err := afero.WriteFile(spec.Fs.Source, filepath.Join("assets", filename), []byte(content), 0777) - c.Assert(err, qt.IsNil) - r, err := spec.New(resources.ResourceSourceDescriptor{Fs: spec.BaseFs.Assets.Fs, SourceFilename: filename}) + targetPath := identity.CleanString(filename) + r, err := spec.NewResource(resources.ResourceSourceDescriptor{ + TargetPath: targetPath, + OpenReadSeekCloser: hugio.NewOpenReadSeekCloser(hugio.NewReadSeekerNoOpCloserFromString(content)), + GroupIdentity: identity.StringIdentity(targetPath), + }) c.Assert(err, qt.IsNil) c.Assert(r, qt.Not(qt.IsNil), qt.Commentf(filename)) return r.(resources.Transformer) @@ -310,8 +313,10 @@ func TestTransform(t *testing.T) { r := createTransformer(c, spec, "f1.txt", "color is blue") - tr1, _ := r.Transform(t1) - tr2, _ := tr1.Transform(t2) + tr1, err := r.Transform(t1) + c.Assert(err, qt.IsNil) + tr2, err := tr1.Transform(t2) + c.Assert(err, qt.IsNil) content1, err := tr1.(resource.ContentProvider).Content(context.Background()) c.Assert(err, qt.IsNil) diff --git a/scripts/fork_go_templates/main.go b/scripts/fork_go_templates/main.go index 8e14813ec..5b9262c0a 100644 --- a/scripts/fork_go_templates/main.go +++ b/scripts/fork_go_templates/main.go @@ -168,6 +168,9 @@ func doWithGoFiles(dir string, return } must(filepath.Walk(filepath.Join(forkRoot, dir), func(path string, info os.FileInfo, err error) error { + if err != nil { + return err + } if info.IsDir() { return nil } diff --git a/source/content_directory_test.go b/source/content_directory_test.go index 7d1630529..96ee22bc7 100644 --- a/source/content_directory_test.go +++ b/source/content_directory_test.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/source/fileInfo.go b/source/fileInfo.go index 60c6e6ea8..5b24bbeb2 100644 --- a/source/fileInfo.go +++ b/source/fileInfo.go @@ -14,9 +14,7 @@ package source import ( - "fmt" "path/filepath" - "strings" "sync" "time" @@ -24,8 +22,6 @@ import ( "github.com/gohugoio/hugo/common/hugo" "github.com/gohugoio/hugo/common/paths" - "github.com/gohugoio/hugo/hugofs/files" - "github.com/gohugoio/hugo/common/hugio" "github.com/gohugoio/hugo/hugofs" @@ -33,269 +29,121 @@ import ( "github.com/gohugoio/hugo/helpers" ) -// fileInfo implements the File interface. -var ( - _ File = (*FileInfo)(nil) -) - -// File represents a source file. -// This is a temporary construct until we resolve page.Page conflicts. -// TODO(bep) remove this construct once we have resolved page deprecations -type File interface { - fileOverlap - FileWithoutOverlap -} - -// Temporary to solve duplicate/deprecated names in page.Page -type fileOverlap interface { - // Path gets the relative path including file name and extension. - // The directory is relative to the content root. - Path() string - - // Section is first directory below the content root. - // For page bundles in root, the Section will be empty. - Section() string - - // Lang is the language code for this page. It will be the - // same as the site's language code. - Lang() string - - IsZero() bool -} - -type FileWithoutOverlap interface { - // Filename gets the full path and filename to the file. - Filename() string - - // Dir gets the name of the directory that contains this file. - // The directory is relative to the content root. - Dir() string - - // Extension is an alias to Ext(). - // Deprecated: Use Ext instead. - Extension() string - - // Ext gets the file extension, i.e "myblogpost.md" will return "md". - Ext() string - - // LogicalName is filename and extension of the file. - LogicalName() string - - // BaseFileName is a filename without extension. - BaseFileName() string - - // TranslationBaseName is a filename with no extension, - // not even the optional language extension part. - TranslationBaseName() string - - // ContentBaseName is a either TranslationBaseName or name of containing folder - // if file is a leaf bundle. - ContentBaseName() string - - // UniqueID is the MD5 hash of the file's path and is for most practical applications, - // Hugo content files being one of them, considered to be unique. - UniqueID() string - - // For internal use only. - FileInfo() hugofs.FileMetaInfo -} - -// FileInfo describes a source file. -type FileInfo struct { - // Absolute filename to the file on disk. - filename string - - sp *SourceSpec - - fi hugofs.FileMetaInfo - - // Derived from filename - ext string // Extension without any "." - lang string - - name string - - dir string - relDir string - relPath string - baseName string - translationBaseName string - contentBaseName string - section string - classifier files.ContentClass +// File describes a source file. +type File struct { + fim hugofs.FileMetaInfo uniqueID string - lazyInit sync.Once } // Filename returns a file's absolute path and filename on disk. -func (fi *FileInfo) Filename() string { return fi.filename } +func (fi *File) Filename() string { return fi.fim.Meta().Filename } // Path gets the relative path including file name and extension. The directory // is relative to the content root. -func (fi *FileInfo) Path() string { return fi.relPath } +func (fi *File) Path() string { return filepath.Join(fi.p().Dir()[1:], fi.p().Name()) } // Dir gets the name of the directory that contains this file. The directory is // relative to the content root. -func (fi *FileInfo) Dir() string { return fi.relDir } +func (fi *File) Dir() string { + return fi.pathToDir(fi.p().Dir()) +} // Extension is an alias to Ext(). -func (fi *FileInfo) Extension() string { +func (fi *File) Extension() string { hugo.Deprecate(".File.Extension", "Use .File.Ext instead.", "v0.96.0") return fi.Ext() } -// Ext returns a file's extension without the leading period (ie. "md"). -func (fi *FileInfo) Ext() string { return fi.ext } +// Ext returns a file's extension without the leading period (e.g. "md"). +// Deprecated: Use Extension() instead. +func (fi *File) Ext() string { return fi.p().Ext() } -// Lang returns a file's language (ie. "sv"). -func (fi *FileInfo) Lang() string { return fi.lang } +// Lang returns a file's language (e.g. "sv"). +func (fi *File) Lang() string { + return fi.fim.Meta().Lang +} -// LogicalName returns a file's name and extension (ie. "page.sv.md"). -func (fi *FileInfo) LogicalName() string { return fi.name } +// LogicalName returns a file's name and extension (e.g. "page.sv.md"). +func (fi *File) LogicalName() string { + return fi.p().Name() +} -// BaseFileName returns a file's name without extension (ie. "page.sv"). -func (fi *FileInfo) BaseFileName() string { return fi.baseName } +// BaseFileName returns a file's name without extension (e.g. "page.sv"). +func (fi *File) BaseFileName() string { + return fi.p().NameNoExt() +} // TranslationBaseName returns a file's translation base name without the -// language segment (ie. "page"). -func (fi *FileInfo) TranslationBaseName() string { return fi.translationBaseName } +// language segment (e.g. "page"). +func (fi *File) TranslationBaseName() string { return fi.p().NameNoIdentifier() } // ContentBaseName is a either TranslationBaseName or name of containing folder -// if file is a leaf bundle. -func (fi *FileInfo) ContentBaseName() string { - fi.init() - return fi.contentBaseName +// if file is a bundle. +func (fi *File) ContentBaseName() string { + return fi.p().BaseNameNoIdentifier() } // Section returns a file's section. -func (fi *FileInfo) Section() string { - fi.init() - return fi.section +func (fi *File) Section() string { + return fi.p().Section() } // UniqueID returns a file's unique, MD5 hash identifier. -func (fi *FileInfo) UniqueID() string { +func (fi *File) UniqueID() string { fi.init() return fi.uniqueID } // FileInfo returns a file's underlying os.FileInfo. -// For internal use only. -func (fi *FileInfo) FileInfo() hugofs.FileMetaInfo { return fi.fi } +func (fi *File) FileInfo() hugofs.FileMetaInfo { return fi.fim } -func (fi *FileInfo) String() string { return fi.BaseFileName() } +func (fi *File) String() string { return fi.BaseFileName() } // Open implements ReadableFile. -func (fi *FileInfo) Open() (hugio.ReadSeekCloser, error) { - f, err := fi.fi.Meta().Open() +func (fi *File) Open() (hugio.ReadSeekCloser, error) { + f, err := fi.fim.Meta().Open() return f, err } -func (fi *FileInfo) IsZero() bool { +func (fi *File) IsZero() bool { return fi == nil } // We create a lot of these FileInfo objects, but there are parts of it used only // in some cases that is slightly expensive to construct. -func (fi *FileInfo) init() { +func (fi *File) init() { fi.lazyInit.Do(func() { - relDir := strings.Trim(fi.relDir, helpers.FilePathSeparator) - parts := strings.Split(relDir, helpers.FilePathSeparator) - var section string - if (fi.classifier != files.ContentClassLeaf && len(parts) == 1) || len(parts) > 1 { - section = parts[0] - } - fi.section = section - - if fi.classifier.IsBundle() && len(parts) > 0 { - fi.contentBaseName = parts[len(parts)-1] - } else { - fi.contentBaseName = fi.translationBaseName - } - - fi.uniqueID = helpers.MD5String(filepath.ToSlash(fi.relPath)) + fi.uniqueID = helpers.MD5String(filepath.ToSlash(fi.Path())) }) } -// NewTestFile creates a partially filled File used in unit tests. -// TODO(bep) improve this package -func NewTestFile(filename string) *FileInfo { - base := filepath.Base(filepath.Dir(filename)) - return &FileInfo{ - filename: filename, - translationBaseName: base, +func (fi *File) pathToDir(s string) string { + if s == "" { + return s } + return filepath.FromSlash(s[1:] + "/") } -func (sp *SourceSpec) NewFileInfoFrom(path, filename string) (*FileInfo, error) { +func (fi *File) p() *paths.Path { + return fi.fim.Meta().PathInfo +} + +func NewFileInfoFrom(path, filename string) *File { meta := &hugofs.FileMeta{ Filename: filename, - Path: path, + PathInfo: paths.Parse("", filepath.ToSlash(path)), } - return sp.NewFileInfo(hugofs.NewFileMetaInfo(nil, meta)) + return NewFileInfo(hugofs.NewFileMetaInfo(nil, meta)) } -func (sp *SourceSpec) NewFileInfo(fi hugofs.FileMetaInfo) (*FileInfo, error) { - m := fi.Meta() - - filename := m.Filename - relPath := m.Path - - if relPath == "" { - return nil, fmt.Errorf("no Path provided by %v (%T)", m, m.Fs) +func NewFileInfo(fi hugofs.FileMetaInfo) *File { + return &File{ + fim: fi, } - - if filename == "" { - return nil, fmt.Errorf("no Filename provided by %v (%T)", m, m.Fs) - } - - relDir := filepath.Dir(relPath) - if relDir == "." { - relDir = "" - } - if !strings.HasSuffix(relDir, helpers.FilePathSeparator) { - relDir = relDir + helpers.FilePathSeparator - } - - lang := m.Lang - translationBaseName := m.TranslationBaseName - - dir, name := filepath.Split(relPath) - if !strings.HasSuffix(dir, helpers.FilePathSeparator) { - dir = dir + helpers.FilePathSeparator - } - - ext := strings.ToLower(strings.TrimPrefix(filepath.Ext(name), ".")) - baseName := paths.Filename(name) - - if translationBaseName == "" { - // This is usually provided by the filesystem. But this FileInfo is also - // created in a standalone context when doing "hugo new". This is - // an approximate implementation, which is "good enough" in that case. - fileLangExt := filepath.Ext(baseName) - translationBaseName = strings.TrimSuffix(baseName, fileLangExt) - } - - f := &FileInfo{ - sp: sp, - filename: filename, - fi: fi, - lang: lang, - ext: ext, - dir: dir, - relDir: relDir, // Dir() - relPath: relPath, // Path() - name: name, - baseName: baseName, // BaseFileName() - translationBaseName: translationBaseName, - classifier: m.Classifier, - } - - return f, nil } func NewGitInfo(info gitmap.GitInfo) GitInfo { diff --git a/source/fileInfo_test.go b/source/fileInfo_test.go deleted file mode 100644 index e2a3edd30..000000000 --- a/source/fileInfo_test.go +++ /dev/null @@ -1,58 +0,0 @@ -// Copyright 2017-present The Hugo Authors. All rights reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package source_test - -import ( - "path/filepath" - "strings" - "testing" - - qt "github.com/frankban/quicktest" - "github.com/gohugoio/hugo/source" -) - -func TestFileInfo(t *testing.T) { - c := qt.New(t) - - s := newTestSourceSpec() - - for _, this := range []struct { - base string - filename string - assert func(f *source.FileInfo) - }{ - {filepath.FromSlash("/a/"), filepath.FromSlash("/a/b/page.md"), func(f *source.FileInfo) { - c.Assert(f.Filename(), qt.Equals, filepath.FromSlash("/a/b/page.md")) - c.Assert(f.Dir(), qt.Equals, filepath.FromSlash("b/")) - c.Assert(f.Path(), qt.Equals, filepath.FromSlash("b/page.md")) - c.Assert(f.Section(), qt.Equals, "b") - c.Assert(f.TranslationBaseName(), qt.Equals, filepath.FromSlash("page")) - c.Assert(f.BaseFileName(), qt.Equals, filepath.FromSlash("page")) - }}, - {filepath.FromSlash("/a/"), filepath.FromSlash("/a/b/c/d/page.md"), func(f *source.FileInfo) { - c.Assert(f.Section(), qt.Equals, "b") - }}, - {filepath.FromSlash("/a/"), filepath.FromSlash("/a/b/page.en.MD"), func(f *source.FileInfo) { - c.Assert(f.Section(), qt.Equals, "b") - c.Assert(f.Path(), qt.Equals, filepath.FromSlash("b/page.en.MD")) - c.Assert(f.TranslationBaseName(), qt.Equals, filepath.FromSlash("page")) - c.Assert(f.BaseFileName(), qt.Equals, filepath.FromSlash("page.en")) - }}, - } { - path := strings.TrimPrefix(this.filename, this.base) - f, err := s.NewFileInfoFrom(path, this.filename) - c.Assert(err, qt.IsNil) - this.assert(f) - } -} diff --git a/source/filesystem.go b/source/filesystem.go index 283863dbf..208f5036f 100644 --- a/source/filesystem.go +++ b/source/filesystem.go @@ -14,66 +14,27 @@ package source import ( - "fmt" "path/filepath" - "sync" "github.com/gohugoio/hugo/hugofs" + "github.com/spf13/afero" ) // Filesystem represents a source filesystem. type Filesystem struct { - files []File - filesInit sync.Once - filesInitErr error - Base string - - fi hugofs.FileMetaInfo - + fs afero.Fs + fi hugofs.FileMetaInfo SourceSpec } -// NewFilesystem returns a new filesystem for a given source spec. +// NewFilesystem returns a new filesytem for a given source spec. func (sp SourceSpec) NewFilesystem(base string) *Filesystem { - return &Filesystem{SourceSpec: sp, Base: base} + return &Filesystem{SourceSpec: sp, Base: base, fs: sp.Fs.Source} } -func (sp SourceSpec) NewFilesystemFromFileMetaInfo(fi hugofs.FileMetaInfo) *Filesystem { - return &Filesystem{SourceSpec: sp, fi: fi} -} - -// Files returns a slice of readable files. -func (f *Filesystem) Files() ([]File, error) { - f.filesInit.Do(func() { - err := f.captureFiles() - if err != nil { - f.filesInitErr = fmt.Errorf("capture files: %w", err) - } - }) - return f.files, f.filesInitErr -} - -// add populates a file in the Filesystem.files -func (f *Filesystem) add(name string, fi hugofs.FileMetaInfo) (err error) { - var file File - - file, err = f.SourceSpec.NewFileInfo(fi) - if err != nil { - return err - } - - f.files = append(f.files, file) - - return err -} - -func (f *Filesystem) captureFiles() error { - walker := func(path string, fi hugofs.FileMetaInfo, err error) error { - if err != nil { - return err - } - +func (f *Filesystem) Walk(addFile func(*File) error) error { + walker := func(path string, fi hugofs.FileMetaInfo) error { if fi.IsDir() { return nil } @@ -87,14 +48,16 @@ func (f *Filesystem) captureFiles() error { } if b { - err = f.add(filename, fi) + if err = addFile(NewFileInfo(fi)); err != nil { + return err + } } return err } w := hugofs.NewWalkway(hugofs.WalkwayConfig{ - Fs: f.SourceFs, + Fs: f.fs, Info: f.fi, Root: f.Base, WalkFn: walker, diff --git a/source/filesystem_test.go b/source/filesystem_test.go index 1067d5839..9118285da 100644 --- a/source/filesystem_test.go +++ b/source/filesystem_test.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -14,71 +14,35 @@ package source_test import ( - "fmt" - "path/filepath" "runtime" "testing" - "github.com/spf13/afero" - qt "github.com/frankban/quicktest" - "github.com/gohugoio/hugo/config" - "github.com/gohugoio/hugo/config/testconfig" - "github.com/gohugoio/hugo/helpers" - "github.com/gohugoio/hugo/hugofs" - "github.com/gohugoio/hugo/source" + "github.com/gohugoio/hugo/hugolib" + "golang.org/x/text/unicode/norm" ) -func TestEmptySourceFilesystem(t *testing.T) { - c := qt.New(t) - ss := newTestSourceSpec() - src := ss.NewFilesystem("") - files, err := src.Files() - c.Assert(err, qt.IsNil) - if len(files) != 0 { - t.Errorf("new filesystem should contain 0 files.") - } -} - func TestUnicodeNorm(t *testing.T) { if runtime.GOOS != "darwin" { - // Normalization code is only for Mac OS, since it is not necessary for other OSes. - return + t.Skip("Skipping test on non-Darwin OS") } + t.Parallel() + files := ` +-- hugo.toml -- +-- content/å.md -- +-- content/é.md -- +-- content/å/å.md -- +-- content/é/é.md -- +-- layouts/_default/single.html -- +Title: {{ .Title }}|File: {{ .File.Path}} +` + b := hugolib.Test(t, files, hugolib.TestOptWithNFDOnDarwin()) - c := qt.New(t) - - paths := []struct { - NFC string - NFD string - }{ - {NFC: "å", NFD: "\x61\xcc\x8a"}, - {NFC: "é", NFD: "\x65\xcc\x81"}, - } - - ss := newTestSourceSpec() - - for i, path := range paths { - base := fmt.Sprintf("base%d", i) - c.Assert(afero.WriteFile(ss.Fs.Source, filepath.Join(base, path.NFD), []byte("some data"), 0777), qt.IsNil) - src := ss.NewFilesystem(base) - files, err := src.Files() - c.Assert(err, qt.IsNil) - f := files[0] - if f.BaseFileName() != path.NFC { - t.Fatalf("file %q name in NFD form should be normalized (%s)", f.BaseFileName(), path.NFC) - } + for _, p := range b.H.Sites[0].RegularPages() { + f := p.File() + b.Assert(norm.NFC.IsNormalString(f.Path()), qt.IsTrue) + b.Assert(norm.NFC.IsNormalString(f.Dir()), qt.IsTrue) + b.Assert(norm.NFC.IsNormalString(f.Filename()), qt.IsTrue) + b.Assert(norm.NFC.IsNormalString(f.BaseFileName()), qt.IsTrue) } } - -func newTestSourceSpec() *source.SourceSpec { - v := config.New() - afs := hugofs.NewBaseFileDecorator(afero.NewMemMapFs()) - conf := testconfig.GetTestConfig(afs, v) - fs := hugofs.NewFrom(afs, conf.BaseConfig()) - ps, err := helpers.NewPathSpec(fs, conf, nil) - if err != nil { - panic(err) - } - return source.NewSourceSpec(ps, nil, fs.Source) -} diff --git a/source/sourceSpec.go b/source/sourceSpec.go index dc44994a8..ea1b977f3 100644 --- a/source/sourceSpec.go +++ b/source/sourceSpec.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -15,7 +15,6 @@ package source import ( - "os" "path/filepath" "runtime" @@ -38,7 +37,6 @@ type SourceSpec struct { // NewSourceSpec initializes SourceSpec using languages the given filesystem and PathSpec. func NewSourceSpec(ps *helpers.PathSpec, inclusionFilter *glob.FilenameFilter, fs afero.Fs) *SourceSpec { - shouldInclude := func(filename string) bool { if !inclusionFilter.Match(filename, false) { return false @@ -90,34 +88,3 @@ func (s *SourceSpec) IgnoreFile(filename string) bool { return false } - -// IsRegularSourceFile returns whether filename represents a regular file in the -// source filesystem. -func (s *SourceSpec) IsRegularSourceFile(filename string) (bool, error) { - fi, err := helpers.LstatIfPossible(s.SourceFs, filename) - if err != nil { - return false, err - } - - if fi.IsDir() { - return false, nil - } - - if fi.Mode()&os.ModeSymlink == os.ModeSymlink { - link, err := filepath.EvalSymlinks(filename) - if err != nil { - return false, err - } - - fi, err = helpers.LstatIfPossible(s.SourceFs, link) - if err != nil { - return false, err - } - - if fi.IsDir() { - return false, nil - } - } - - return true, nil -} diff --git a/testscripts/commands/hugo__processingstats.txt b/testscripts/commands/hugo__processingstats.txt index 0e700b607..3d30b8155 100644 --- a/testscripts/commands/hugo__processingstats.txt +++ b/testscripts/commands/hugo__processingstats.txt @@ -1,17 +1,32 @@ cp $SOURCE/resources/testdata/pix.gif content/en/bundle1/pix.gif cp $SOURCE/resources/testdata/pix.gif content/en/bundle2/pix.gif cp $SOURCE/resources/testdata/pix.gif content/fr/bundle1/pix.gif +mkdir static/images +cp $SOURCE/resources/testdata/pix.gif static/images/p1.gif +cp $SOURCE/resources/testdata/pix.gif static/images/p2.gif +cp $SOURCE/resources/testdata/pix.gif static/images/p3.gif +cp $SOURCE/resources/testdata/pix.gif static/images/p4.gif + hugo stdout 'Pages.*3.*2' stdout 'Processed images.*2.*1' +stdout 'Static files.*4 |' + +ls public/images +stdout 'p1.gif' +stdout 'p2.gif' +stdout 'p3.gif' +stdout 'p4.gif' -- content/en/bundle1/index.md -- -- content/en/bundle2/index.md -- -- content/fr/bundle1/index.md -- -- hugo.toml -- disableKinds = ["taxonomy", "term", "RSS", "sitemap", "robotsTXT", "404"] +defaultLanguage = "en" +defaultLanguageInSubdir = true baseURL = "https://example.com/" [languages] [languages.en] diff --git a/testscripts/commands/hugo__processingstats2.txt b/testscripts/commands/hugo__processingstats2.txt new file mode 100644 index 000000000..2f8226faa --- /dev/null +++ b/testscripts/commands/hugo__processingstats2.txt @@ -0,0 +1,16 @@ +cp $SOURCE/resources/testdata/pix.gif content/posts/post-1/p1.gif +cp $SOURCE/resources/testdata/pix.gif content/posts/post-1/p2.gif + +hugo + +stdout 'Pages.*/| 10\s' +stdout 'Non-page files.*/| 2\s' + +-- content/posts/post-1/index.md -- +-- hugo.toml -- +baseURL = "https://example.com/" +-- layouts/_default/list.html -- +List. +-- layouts/_default/single.html -- +Single. + diff --git a/testscripts/commands/mod_npm.txt b/testscripts/commands/mod_npm.txt index fb0aa38c8..32cc37f06 100644 --- a/testscripts/commands/mod_npm.txt +++ b/testscripts/commands/mod_npm.txt @@ -1,23 +1,43 @@ # Test mod npm. +dostounix golden/package.json + hugo mod npm pack -cmp package.hugo.json golden/package.hugo.json +cmp package.json golden/package.json -- hugo.toml -- baseURL = "https://example.org/" --- package.json -- +[module] +[[module.imports]] +path="github.com/gohugoio/hugoTestModule2" + + +-- golden/package.json -- { - "name": "test", - "version": "1.0.0", + "comments": { + "dependencies": { + "react-dom": "github.com/gohugoio/hugoTestModule2" + }, + "devDependencies": { + "@babel/cli": "github.com/gohugoio/hugoTestModule2", + "@babel/core": "github.com/gohugoio/hugoTestModule2", + "@babel/preset-env": "github.com/gohugoio/hugoTestModule2", + "postcss-cli": "github.com/gohugoio/hugoTestModule2", + "tailwindcss": "github.com/gohugoio/hugoTestModule2" + } + }, "dependencies": { - "mod": "foo-bar" - } -} --- golden/package.hugo.json -- -{ - "name": "test", - "version": "1.0.0", - "dependencies": { - "mod": "foo-bar" - } + "react-dom": "^16.13.1" + }, + "devDependencies": { + "@babel/cli": "7.8.4", + "@babel/core": "7.9.0", + "@babel/preset-env": "7.9.5", + "postcss-cli": "7.1.0", + "tailwindcss": "1.2.0" + }, + "name": "script-mod_npm", + "version": "0.1.0" } +-- go.mod -- +module github.com/gohugoio/hugoTestModule diff --git a/testscripts/commands/mod_npm_withexisting.txt b/testscripts/commands/mod_npm_withexisting.txt new file mode 100644 index 000000000..e92eba3fd --- /dev/null +++ b/testscripts/commands/mod_npm_withexisting.txt @@ -0,0 +1,57 @@ +# Test mod npm. + +dostounix golden/package.json + +hugo mod npm pack +cmp package.json golden/package.json + +-- hugo.toml -- +baseURL = "https://example.org/" +[module] +[[module.imports]] +path="github.com/gohugoio/hugoTestModule2" +-- package.json -- +{ + "comments": { + "foo": { + "a": "b" + } + }, + "devDependencies": { + "tailwindcss": "2.2.0" + }, + "name": "mypackage", + "version": "1.1.0" +} +-- golden/package.json -- +{ + "comments": { + "dependencies": { + "react-dom": "github.com/gohugoio/hugoTestModule2" + }, + "devDependencies": { + "@babel/cli": "github.com/gohugoio/hugoTestModule2", + "@babel/core": "github.com/gohugoio/hugoTestModule2", + "@babel/preset-env": "github.com/gohugoio/hugoTestModule2", + "postcss-cli": "github.com/gohugoio/hugoTestModule2", + "tailwindcss": "project" + }, + "foo": { + "a": "b" + } + }, + "dependencies": { + "react-dom": "^16.13.1" + }, + "devDependencies": { + "@babel/cli": "7.8.4", + "@babel/core": "7.9.0", + "@babel/preset-env": "7.9.5", + "postcss-cli": "7.1.0", + "tailwindcss": "2.2.0" + }, + "name": "mypackage", + "version": "1.1.0" +} +-- go.mod -- +module github.com/gohugoio/hugoTestModule diff --git a/testscripts/commands/new_content_archetypedir.txt b/testscripts/commands/new_content_archetypedir.txt new file mode 100644 index 000000000..ccd85c999 --- /dev/null +++ b/testscripts/commands/new_content_archetypedir.txt @@ -0,0 +1,40 @@ +mkdir content +hugo new content --kind mybundle post/first-post +grep 'First Post' content/post/first-post/index.md +grep 'Site Lang: en' content/post/first-post/index.md +grep 'Site Lang: no' content/post/first-post/index.no.md +grep 'A text file.' content/post/first-post/file.txt + +-- hugo.toml -- +baseURL = "http://example.org/" +[languages] +[languages.en] +languageName = "English" +weight = 1 +[languages.no] +languageName = "Norsk" +weight = 2 + +-- archetypes/mybundle/index.md -- +--- +title: "{{ replace .Name "-" " " | title }}" +date: {{ .Date }} +draft: true +--- + +Site Lang: {{ site.Language.Lang }}. +-- archetypes/mybundle/index.no.md -- +--- +title: "{{ replace .Name "-" " " | title }}" +date: {{ .Date }} +draft: true +--- + +Site Lang: {{ site.Language.Lang }}. + +-- archetypes/mybundle/file.txt -- +A text file. + + + + diff --git a/testscripts/commands/server.txt b/testscripts/commands/server.txt index fd6b200bc..777a91454 100644 --- a/testscripts/commands/server.txt +++ b/testscripts/commands/server.txt @@ -7,6 +7,7 @@ waitServer httpget $HUGOTEST_BASEURL_0 'Title: Hugo Server Test' $HUGOTEST_BASEURL_0 'ServerPort: \d{4,5}' 'myenv: thedevelopment' 'livereload\.js' 'Env: development' 'IsServer: true' httpget ${HUGOTEST_BASEURL_0}doesnotexist 'custom 404' +httpget ${HUGOTEST_BASEURL_0}livereload.js 'function' # By defauilt, the server renders to memory. ! exists public/index.html diff --git a/tpl/collections/apply.go b/tpl/collections/apply.go index 397ba0fdb..3d50395b9 100644 --- a/tpl/collections/apply.go +++ b/tpl/collections/apply.go @@ -67,7 +67,7 @@ func (ns *Namespace) Apply(ctx context.Context, c any, fname string, args ...any func applyFnToThis(ctx context.Context, fn, this reflect.Value, args ...any) (reflect.Value, error) { num := fn.Type().NumIn() - if num > 0 && fn.Type().In(0).Implements(hreflect.ContextInterface) { + if num > 0 && hreflect.IsContextType(fn.Type().In(0)) { args = append([]any{ctx}, args...) } diff --git a/tpl/collections/apply_test.go b/tpl/collections/apply_test.go index aa39923b7..0a5764264 100644 --- a/tpl/collections/apply_test.go +++ b/tpl/collections/apply_test.go @@ -22,6 +22,7 @@ import ( qt "github.com/frankban/quicktest" "github.com/gohugoio/hugo/config/testconfig" + "github.com/gohugoio/hugo/identity" "github.com/gohugoio/hugo/output" "github.com/gohugoio/hugo/output/layouts" "github.com/gohugoio/hugo/tpl" @@ -29,6 +30,10 @@ import ( type templateFinder int +func (templateFinder) GetIdentity(string) (identity.Identity, bool) { + return identity.StringIdentity("test"), true +} + func (templateFinder) Lookup(name string) (tpl.Template, bool) { return nil, false } diff --git a/tpl/collections/collections.go b/tpl/collections/collections.go index e34753f17..61fd138e9 100644 --- a/tpl/collections/collections.go +++ b/tpl/collections/collections.go @@ -35,11 +35,6 @@ import ( "github.com/spf13/cast" ) -func init() { - // htime.Now cannot be used here - rand.Seed(time.Now().UTC().UnixNano()) -} - // New returns a new instance of the collections-namespaced template functions. func New(deps *deps.Deps) *Namespace { language := deps.Conf.Language() @@ -149,7 +144,7 @@ func (ns *Namespace) Delimit(ctx context.Context, l, sep any, last ...any) (stri } default: - return "", fmt.Errorf("can't iterate over %v", l) + return "", fmt.Errorf("can't iterate over %T", l) } return str, nil diff --git a/tpl/collections/collections_test.go b/tpl/collections/collections_test.go index dcdd3bd5c..7dd518759 100644 --- a/tpl/collections/collections_test.go +++ b/tpl/collections/collections_test.go @@ -699,7 +699,6 @@ func TestShuffleRandomising(t *testing.T) { // of the sequence happens to be the same as the original sequence. However // the probability of the event is 10^-158 which is negligible. seqLen := 100 - rand.Seed(time.Now().UTC().UnixNano()) for _, test := range []struct { seq []int @@ -895,6 +894,7 @@ func (x TstX) TstRv2() string { return "r" + x.B } +//lint:ignore U1000 reflect test func (x TstX) unexportedMethod() string { return x.unexported } @@ -923,7 +923,7 @@ func (x TstX) String() string { type TstX struct { A, B string - unexported string + unexported string //lint:ignore U1000 reflect test } type TstParams struct { diff --git a/tpl/collections/integration_test.go b/tpl/collections/integration_test.go index a443755f8..24727a12c 100644 --- a/tpl/collections/integration_test.go +++ b/tpl/collections/integration_test.go @@ -1,4 +1,4 @@ -// Copyright 2022 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -97,11 +97,9 @@ func TestAppendSliceToASliceOfSlices(t *testing.T) { ).Build() b.AssertFileContent("public/index.html", "[[a] [b] [c]]") - } func TestAppendNilToSlice(t *testing.T) { - t.Parallel() files := ` @@ -123,11 +121,9 @@ func TestAppendNilToSlice(t *testing.T) { ).Build() b.AssertFileContent("public/index.html", "[a <nil>]") - } func TestAppendNilsToSliceWithNils(t *testing.T) { - t.Parallel() files := ` @@ -153,7 +149,6 @@ func TestAppendNilsToSliceWithNils(t *testing.T) { b.AssertFileContent("public/index.html", "[a <nil> c <nil>]") } - } // Issue 11234. diff --git a/tpl/collections/where.go b/tpl/collections/where.go index 07c2d3deb..bf3f75044 100644 --- a/tpl/collections/where.go +++ b/tpl/collections/where.go @@ -51,7 +51,7 @@ func (ns *Namespace) Where(ctx context.Context, c, key any, args ...any) (any, e case reflect.Map: return ns.checkWhereMap(ctxv, seqv, kv, mv, path, op) default: - return nil, fmt.Errorf("can't iterate over %v", c) + return nil, fmt.Errorf("can't iterate over %T", c) } } @@ -320,7 +320,7 @@ func evaluateSubElem(ctx, obj reflect.Value, elemName string) (reflect.Value, er mt := objPtr.Type().Method(index) num := mt.Type.NumIn() maxNumIn := 1 - if num > 1 && mt.Type.In(1).Implements(hreflect.ContextInterface) { + if num > 1 && hreflect.IsContextType(mt.Type.In(1)) { args = []reflect.Value{ctx} maxNumIn = 2 } diff --git a/tpl/data/data.go b/tpl/data/data.go index 380c25685..b6b0515e8 100644 --- a/tpl/data/data.go +++ b/tpl/data/data.go @@ -24,6 +24,7 @@ import ( "net/http" "strings" + "github.com/gohugoio/hugo/cache/filecache" "github.com/gohugoio/hugo/common/maps" "github.com/gohugoio/hugo/config/security" @@ -33,7 +34,6 @@ import ( "github.com/spf13/cast" - "github.com/gohugoio/hugo/cache/filecache" "github.com/gohugoio/hugo/deps" ) @@ -108,7 +108,7 @@ func (ns *Namespace) GetJSON(args ...any) (any, error) { req, err := http.NewRequest("GET", url, nil) if err != nil { - return nil, fmt.Errorf("Failed to create request for getJSON resource %s: %w", url, err) + return nil, fmt.Errorf("failed to create request for getJSON resource %s: %w", url, err) } unmarshal := func(b []byte) (bool, error) { diff --git a/tpl/data/resources.go b/tpl/data/resources.go index 45764dae7..3a3701d60 100644 --- a/tpl/data/resources.go +++ b/tpl/data/resources.go @@ -23,7 +23,6 @@ import ( "time" "github.com/gohugoio/hugo/cache/filecache" - "github.com/gohugoio/hugo/helpers" "github.com/spf13/afero" ) @@ -68,7 +67,7 @@ func (ns *Namespace) getRemote(cache *filecache.Cache, unmarshal func([]byte) (b res.Body.Close() if isHTTPError(res) { - return nil, fmt.Errorf("Failed to retrieve remote file: %s, body: %q", http.StatusText(res.StatusCode), b) + return nil, fmt.Errorf("failed to retrieve remote file: %s, body: %q", http.StatusText(res.StatusCode), b) } retry, err = unmarshal(b) diff --git a/tpl/data/resources_test.go b/tpl/data/resources_test.go index d452a2a43..b8003bf43 100644 --- a/tpl/data/resources_test.go +++ b/tpl/data/resources_test.go @@ -15,9 +15,6 @@ package data import ( "bytes" - - "github.com/gohugoio/hugo/common/loggers" - "net/http" "net/http/httptest" "net/url" @@ -26,12 +23,14 @@ import ( "testing" "time" + "github.com/gohugoio/hugo/cache/filecache" + "github.com/gohugoio/hugo/common/loggers" + "github.com/gohugoio/hugo/config/testconfig" "github.com/gohugoio/hugo/helpers" qt "github.com/frankban/quicktest" - "github.com/gohugoio/hugo/cache/filecache" "github.com/gohugoio/hugo/config" "github.com/gohugoio/hugo/deps" "github.com/gohugoio/hugo/hugofs" diff --git a/tpl/debug/integration_test.go b/tpl/debug/integration_test.go index 3d120580d..9a36e2d12 100644 --- a/tpl/debug/integration_test.go +++ b/tpl/debug/integration_test.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -41,5 +41,5 @@ disableKinds = ["taxonomy", "term"] }, ).Build() - b.AssertLogContains("imer: name \"foo\" count '\\x05' duration") + b.AssertLogContains("timer: name foo count 5 duration") } diff --git a/tpl/diagrams/diagrams.go b/tpl/diagrams/diagrams.go index dfa29a978..6a58bcfe4 100644 --- a/tpl/diagrams/diagrams.go +++ b/tpl/diagrams/diagrams.go @@ -1,4 +1,4 @@ -// Copyright 2022 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/tpl/diagrams/goat.go b/tpl/diagrams/goat.go index f3d4f4bfb..fe156f1e8 100644 --- a/tpl/diagrams/goat.go +++ b/tpl/diagrams/goat.go @@ -1,4 +1,4 @@ -// Copyright 2022 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/tpl/diagrams/init.go b/tpl/diagrams/init.go index e6356ce9c..0cbec7e1b 100644 --- a/tpl/diagrams/init.go +++ b/tpl/diagrams/init.go @@ -1,4 +1,4 @@ -// Copyright 2022 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/tpl/fmt/integration_test.go b/tpl/fmt/integration_test.go index 5010fa90e..40bfefcdc 100644 --- a/tpl/fmt/integration_test.go +++ b/tpl/fmt/integration_test.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -41,5 +41,4 @@ ignoreErrors = ['error-b'] b.BuildE() b.AssertLogMatches(`^ERROR a\nYou can suppress this error by adding the following to your site configuration:\nignoreErrors = \['error-a'\]\n$`) - } diff --git a/tpl/images/integration_test.go b/tpl/images/integration_test.go index ad810ad92..81f35e39c 100644 --- a/tpl/images/integration_test.go +++ b/tpl/images/integration_test.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/tpl/internal/go_templates/staticcheck.conf b/tpl/internal/go_templates/staticcheck.conf new file mode 100644 index 000000000..9cf5a78a4 --- /dev/null +++ b/tpl/internal/go_templates/staticcheck.conf @@ -0,0 +1 @@ +checks = ["none"] \ No newline at end of file diff --git a/tpl/internal/go_templates/texttemplate/hugo_template.go b/tpl/internal/go_templates/texttemplate/hugo_template.go index 78be55e18..4db40ce82 100644 --- a/tpl/internal/go_templates/texttemplate/hugo_template.go +++ b/tpl/internal/go_templates/texttemplate/hugo_template.go @@ -1,4 +1,4 @@ -// Copyright 2022 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -59,23 +59,6 @@ func NewExecuter(helper ExecHelper) Executer { return &executer{helper: helper} } -type ( - pageContextKeyType string - hasLockContextKeyType string - stackContextKeyType string - callbackContextKeyType string -) - -const ( - // The data page passed to ExecuteWithContext gets stored with this key. - PageContextKey = pageContextKeyType("page") - // Used in partialCached to signal to nested templates that a lock is already taken. - HasLockContextKey = hasLockContextKeyType("hasLock") - - // Used to pass down a callback function to nested templates. - CallbackContextKey = callbackContextKeyType("callback") -) - // Note: The context is currently not fully implemented in Hugo. This is a work in progress. func (t *executer) ExecuteWithContext(ctx context.Context, p Preparer, wr io.Writer, data any) error { if ctx == nil { diff --git a/tpl/internal/go_templates/texttemplate/hugo_template_test.go b/tpl/internal/go_templates/texttemplate/hugo_template_test.go index cc88151e3..c68b747dd 100644 --- a/tpl/internal/go_templates/texttemplate/hugo_template_test.go +++ b/tpl/internal/go_templates/texttemplate/hugo_template_test.go @@ -1,4 +1,4 @@ -// Copyright 2022 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -56,7 +56,7 @@ func (e *execHelper) GetMapValue(ctx context.Context, tmpl Preparer, m, key refl return m.MapIndex(key), true } -func (e *execHelper) GetMethod(ctx context.Context, tmpl Preparer, receiver reflect.Value, name string) (method reflect.Value, firstArg reflect.Value) { +func (e *execHelper) GetMethod(ctx context.Context, tmpl Preparer, receiver reflect.Value, name string) (reflect.Value, reflect.Value) { if name != "Hello1" { return zero, zero } diff --git a/tpl/internal/templatefuncsRegistry.go b/tpl/internal/templatefuncsRegistry.go index c1b01f5a5..fc02a6ef9 100644 --- a/tpl/internal/templatefuncsRegistry.go +++ b/tpl/internal/templatefuncsRegistry.go @@ -170,7 +170,7 @@ func (namespaces TemplateFuncsNamespaces) MarshalJSON() ([]byte, error) { for i, ns := range namespaces { - b, err := ns.toJSON(context.TODO()) + b, err := ns.toJSON(context.Background()) if err != nil { return nil, err } diff --git a/tpl/js/js.go b/tpl/js/js.go index bb8d20966..63a676532 100644 --- a/tpl/js/js.go +++ b/tpl/js/js.go @@ -34,7 +34,6 @@ func New(deps *deps.Deps) *Namespace { // Namespace provides template functions for the "js" namespace. type Namespace struct { - deps *deps.Deps client *js.Client } diff --git a/tpl/lang/lang_test.go b/tpl/lang/lang_test.go index 8d5430f6f..6ec40cab3 100644 --- a/tpl/lang/lang_test.go +++ b/tpl/lang/lang_test.go @@ -41,8 +41,8 @@ func TestNumFmt(t *testing.T) { {6, -12345.6789, "-|,| ", "|", "-12 345,678900"}, // Arabic, ar_AE - {6, -12345.6789, "‏- ٫ ٬", "", "‏-12٬345٫678900"}, - {6, -12345.6789, "‏-|٫| ", "|", "‏-12 345٫678900"}, + {6, -12345.6789, "\u200f- ٫ ٬", "", "\u200f-12٬345٫678900"}, + {6, -12345.6789, "\u200f-|٫| ", "|", "\u200f-12 345٫678900"}, } for _, cas := range cases { @@ -65,7 +65,6 @@ func TestNumFmt(t *testing.T) { } func TestFormatNumbers(t *testing.T) { - c := qt.New(t) nsNn := New(&deps.Deps{}, translators.GetTranslator("nn")) @@ -103,12 +102,10 @@ func TestFormatNumbers(t *testing.T) { c.Assert(err, qt.IsNil) c.Assert(got, qt.Equals, "$20,000.00") }) - } // Issue 9446 func TestLanguageKeyFormat(t *testing.T) { - c := qt.New(t) nsUnderscoreUpper := New(&deps.Deps{}, translators.GetTranslator("es_ES")) @@ -134,7 +131,5 @@ func TestLanguageKeyFormat(t *testing.T) { got, err = nsHyphenLower.FormatNumber(3, pi) c.Assert(err, qt.IsNil) c.Assert(got, qt.Equals, "3,142") - }) - } diff --git a/tpl/math/math_test.go b/tpl/math/math_test.go index 5b54b6ac8..4cde3fb85 100644 --- a/tpl/math/math_test.go +++ b/tpl/math/math_test.go @@ -335,7 +335,7 @@ func TestRound(t *testing.T) { {0.5, 1.0}, {1.1, 1.0}, {1.5, 2.0}, - {-0.1, -0.0}, + {-0.1, 0.0}, {-0.5, -1.0}, {-1.1, -1.0}, {-1.5, -2.0}, @@ -524,7 +524,6 @@ func TestSum(t *testing.T) { _, err := ns.Sum() c.Assert(err, qt.Not(qt.IsNil)) - } func TestProduct(t *testing.T) { @@ -547,5 +546,4 @@ func TestProduct(t *testing.T) { _, err := ns.Product() c.Assert(err, qt.Not(qt.IsNil)) - } diff --git a/tpl/openapi/openapi3/integration_test.go b/tpl/openapi/openapi3/integration_test.go index d3be0eda9..6914a60b3 100644 --- a/tpl/openapi/openapi3/integration_test.go +++ b/tpl/openapi/openapi3/integration_test.go @@ -67,7 +67,7 @@ API: {{ $api.Info.Title | safeHTML }} b.AssertFileContent("public/index.html", `API: Sample API`) b. - EditFileReplace("assets/api/myapi.yaml", func(s string) string { return strings.ReplaceAll(s, "Sample API", "Hugo API") }). + EditFileReplaceFunc("assets/api/myapi.yaml", func(s string) string { return strings.ReplaceAll(s, "Sample API", "Hugo API") }). Build() b.AssertFileContent("public/index.html", `API: Hugo API`) diff --git a/tpl/openapi/openapi3/openapi3.go b/tpl/openapi/openapi3/openapi3.go index 38857dd98..f929c7f62 100644 --- a/tpl/openapi/openapi3/openapi3.go +++ b/tpl/openapi/openapi3/openapi3.go @@ -15,44 +15,42 @@ package openapi3 import ( + "errors" "fmt" "io" gyaml "github.com/ghodss/yaml" - "errors" - kopenapi3 "github.com/getkin/kin-openapi/openapi3" - "github.com/gohugoio/hugo/cache/namedmemcache" + "github.com/gohugoio/hugo/cache/dynacache" "github.com/gohugoio/hugo/deps" + "github.com/gohugoio/hugo/identity" "github.com/gohugoio/hugo/parser/metadecoders" "github.com/gohugoio/hugo/resources/resource" ) // New returns a new instance of the openapi3-namespaced template functions. func New(deps *deps.Deps) *Namespace { - // TODO(bep) consolidate when merging that "other branch" -- but be aware of the keys. - cache := namedmemcache.New() - deps.BuildStartListeners.Add( - func() { - cache.Clear() - }) - return &Namespace{ - cache: cache, + cache: dynacache.GetOrCreatePartition[string, *OpenAPIDocument](deps.MemCache, "/tmpl/openapi3", dynacache.OptionsPartition{Weight: 30, ClearWhen: dynacache.ClearOnChange}), deps: deps, } } // Namespace provides template functions for the "openapi3". type Namespace struct { - cache *namedmemcache.Cache + cache *dynacache.Partition[string, *OpenAPIDocument] deps *deps.Deps } // OpenAPIDocument represents an OpenAPI 3 document. type OpenAPIDocument struct { *kopenapi3.T + identityGroup identity.Identity +} + +func (o *OpenAPIDocument) GetIdentityGroup() identity.Identity { + return o.identityGroup } // Unmarshal unmarshals the given resource into an OpenAPI 3 document. @@ -62,7 +60,7 @@ func (ns *Namespace) Unmarshal(r resource.UnmarshableResource) (*OpenAPIDocument return nil, errors.New("no Key set in Resource") } - v, err := ns.cache.GetOrCreate(key, func() (any, error) { + v, err := ns.cache.GetOrCreate(key, func(string) (*OpenAPIDocument, error) { f := metadecoders.FormatFromStrings(r.MediaType().Suffixes()...) if f == "" { return nil, fmt.Errorf("MIME %q not supported", r.MediaType()) @@ -92,11 +90,11 @@ func (ns *Namespace) Unmarshal(r resource.UnmarshableResource) (*OpenAPIDocument err = kopenapi3.NewLoader().ResolveRefsIn(s, nil) - return &OpenAPIDocument{T: s}, err + return &OpenAPIDocument{T: s, identityGroup: identity.FirstIdentity(r)}, err }) if err != nil { return nil, err } - return v.(*OpenAPIDocument), nil + return v, nil } diff --git a/tpl/os/integration_test.go b/tpl/os/integration_test.go index d08374f8f..58e0ef70a 100644 --- a/tpl/os/integration_test.go +++ b/tpl/os/integration_test.go @@ -1,4 +1,4 @@ -// Copyright 2022 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/tpl/page/init.go b/tpl/page/init.go index 52aeaafd6..826aa45d3 100644 --- a/tpl/page/init.go +++ b/tpl/page/init.go @@ -1,4 +1,4 @@ -// Copyright 2022 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -32,7 +32,7 @@ func init() { ns := &internal.TemplateFuncsNamespace{ Name: name, Context: func(ctx context.Context, args ...interface{}) (interface{}, error) { - v := tpl.GetPageFromContext(ctx) + v := tpl.Context.Page.Get(ctx) if v == nil { // The multilingual sitemap does not have a page as its context. return nil, nil diff --git a/tpl/page/integration_test.go b/tpl/page/integration_test.go index 74788377d..632c3b64e 100644 --- a/tpl/page/integration_test.go +++ b/tpl/page/integration_test.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -112,11 +112,11 @@ Bundled page: {{ $p2_1.Content }} -- layouts/shortcodes/shortcode.html -- {{ if page.IsHome }}Shortcode {{ .Get 0 }} OK.{{ else }}Failed.{{ end }} -- layouts/sitemap.xml -- -HRE?{{ if eq page . }}Sitemap OK.{{ else }}Failed.{{ end }} +{{ if eq page . }}Sitemap OK.{{ else }}Failed.{{ end }} -- layouts/robots.txt -- {{ if eq page . }}Robots OK.{{ else }}Failed.{{ end }} -- layouts/sitemapindex.xml -- -{{ if not page }}SitemapIndex OK.{{ else }}Failed.{{ end }} +{{ with page }}SitemapIndex OK: {{ .Kind }}{{ else }}Failed.{{ end }} ` @@ -167,15 +167,12 @@ Shortcode in bundled page OK. b.AssertFileContent("public/page/1/index.html", `Alias OK.`) b.AssertFileContent("public/page/2/index.html", `Page OK.`) if multilingual { - b.AssertFileContent("public/sitemap.xml", `SitemapIndex OK.`) + b.AssertFileContent("public/sitemap.xml", `SitemapIndex OK: sitemapindex`) } else { b.AssertFileContent("public/sitemap.xml", `Sitemap OK.`) } - }) - } - } // Issue 10791. @@ -207,5 +204,23 @@ title: "P1" ).Build() b.AssertFileContent("public/p1/index.html", " \n

    Heading 1

    ") - +} + +func TestFromStringRunning(t *testing.T) { + t.Parallel() + + files := ` +-- hugo.toml -- +disableLiveReload = true +-- layouts/index.html -- +{{ with resources.FromString "foo" "{{ seq 3 }}" }} +{{ with resources.ExecuteAsTemplate "bar" $ . }} + {{ .Content | safeHTML }} +{{ end }} +{{ end }} + ` + + b := hugolib.TestRunning(t, files) + + b.AssertFileContent("public/index.html", "1\n2\n3") } diff --git a/tpl/partials/integration_test.go b/tpl/partials/integration_test.go index 3dbaf2ce4..e48f3bb20 100644 --- a/tpl/partials/integration_test.go +++ b/tpl/partials/integration_test.go @@ -1,4 +1,4 @@ -// Copyright 2022 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -297,7 +297,6 @@ timeout = '200ms' b.Assert(err, qt.Not(qt.IsNil)) b.Assert(err.Error(), qt.Contains, "timed out") - } func TestIncludeCachedTimeout(t *testing.T) { @@ -322,7 +321,6 @@ timeout = '200ms' b.Assert(err, qt.Not(qt.IsNil)) b.Assert(err.Error(), qt.Contains, "timed out") - } // See Issue #10789 @@ -350,5 +348,4 @@ BAR ).Build() b.AssertFileContent("public/index.html", "OO:BAR") - } diff --git a/tpl/partials/partials.go b/tpl/partials/partials.go index 3834529ce..8e36e21b9 100644 --- a/tpl/partials/partials.go +++ b/tpl/partials/partials.go @@ -40,9 +40,10 @@ type partialCacheKey struct { Variants []any } type includeResult struct { - name string - result any - err error + name string + result any + mangager identity.Manager + err error } func (k partialCacheKey) Key() string { @@ -65,7 +66,7 @@ type partialCache struct { } func (p *partialCache) clear() { - p.cache.DeleteFunc(func(string, includeResult) bool { + p.cache.DeleteFunc(func(s string, r includeResult) bool { return true }) } @@ -75,7 +76,7 @@ func New(deps *deps.Deps) *Namespace { // This lazycache was introduced in Hugo 0.111.0. // We're going to expand and consolidate all memory caches in Hugo using this, // so just set a high limit for now. - lru := lazycache.New[string, includeResult](lazycache.Options{MaxEntries: 1000}) + lru := lazycache.New(lazycache.Options[string, includeResult]{MaxEntries: 1000}) cache := &partialCache{cache: lru} deps.BuildStartListeners.Add( @@ -142,11 +143,11 @@ func (ns *Namespace) includWithTimeout(ctx context.Context, name string, dataLis case <-timeoutCtx.Done(): err := timeoutCtx.Err() if err == context.DeadlineExceeded { + //lint:ignore ST1005 end user message. err = fmt.Errorf("partial %q timed out after %s. This is most likely due to infinite recursion. If this is just a slow template, you can try to increase the 'timeout' config setting.", name, ns.deps.Conf.Timeout()) } return includeResult{err: err} } - } // include is a helper function that lookups and executes the named partial. @@ -215,7 +216,6 @@ func (ns *Namespace) include(ctx context.Context, name string, dataList ...any) name: templ.Name(), result: result, } - } // IncludeCached executes and caches partial templates. The cache is created with name+variants as the key. @@ -226,12 +226,22 @@ func (ns *Namespace) IncludeCached(ctx context.Context, name string, context any Name: name, Variants: variants, } + depsManagerIn := tpl.Context.GetDependencyManagerInCurrentScope(ctx) r, found, err := ns.cachedPartials.cache.GetOrCreate(key.Key(), func(string) (includeResult, error) { + var depsManagerShared identity.Manager + if ns.deps.Conf.Watching() { + // We need to create a shared dependency manager to pass downwards + // and add those same dependencies to any cached invocation of this partial. + depsManagerShared = identity.NewManager("partials") + ctx = tpl.Context.DependencyManagerScopedProvider.Set(ctx, depsManagerShared.(identity.DependencyManagerScopedProvider)) + } r := ns.includWithTimeout(ctx, key.Name, context) + if ns.deps.Conf.Watching() { + r.mangager = depsManagerShared + } return r, r.err }) - if err != nil { return nil, err } @@ -242,10 +252,13 @@ func (ns *Namespace) IncludeCached(ctx context.Context, name string, context any // We need to track the time spent in the cache to // get the totals correct. ns.deps.Metrics.MeasureSince(key.templateName(), start) - } ns.deps.Metrics.TrackValue(key.templateName(), r.result, found) } + if r.mangager != nil && depsManagerIn != nil { + depsManagerIn.AddIdentity(r.mangager) + } + return r.result, nil } diff --git a/tpl/reflect/reflect_test.go b/tpl/reflect/reflect_test.go index f85af87dd..84ffe813b 100644 --- a/tpl/reflect/reflect_test.go +++ b/tpl/reflect/reflect_test.go @@ -21,8 +21,6 @@ import ( var ns = New() -type tstNoStringer struct{} - func TestIsMap(t *testing.T) { c := qt.New(t) for _, test := range []struct { diff --git a/tpl/resources/integration_test.go b/tpl/resources/integration_test.go index 0e0a29a98..02aa5d29d 100644 --- a/tpl/resources/integration_test.go +++ b/tpl/resources/integration_test.go @@ -72,10 +72,9 @@ Copy3: /blog/js/copies/moo.a677329fc6c4ad947e0c7116d91f37a2.min.js|text/javascri `) - b.AssertDestinationExists("images/copy2.png", true) + b.AssertFileExists("public/images/copy2.png", true) // No permalink used. - b.AssertDestinationExists("images/copy3.png", false) - + b.AssertFileExists("public/images/copy3.png", false) } func TestCopyPageShouldFail(t *testing.T) { @@ -96,7 +95,6 @@ func TestCopyPageShouldFail(t *testing.T) { }).BuildE() b.Assert(err, qt.IsNotNil) - } func TestGet(t *testing.T) { @@ -125,5 +123,4 @@ Image OK Empty string not found `) - } diff --git a/tpl/resources/resources.go b/tpl/resources/resources.go index d18797ebc..04af756ef 100644 --- a/tpl/resources/resources.go +++ b/tpl/resources/resources.go @@ -16,16 +16,15 @@ package resources import ( "context" + "errors" "fmt" "sync" - "errors" - "github.com/gohugoio/hugo/common/maps" + "github.com/gohugoio/hugo/common/paths" "github.com/gohugoio/hugo/tpl/internal/resourcehelpers" - "github.com/gohugoio/hugo/helpers" "github.com/gohugoio/hugo/resources/postpub" "github.com/gohugoio/hugo/deps" @@ -104,7 +103,6 @@ func (ns *Namespace) getscssClientDartSass() (*dartsass.Client, error) { return } ns.deps.BuildClosers.Add(ns.scssClientDartSass) - }) return ns.scssClientDartSass, err @@ -122,7 +120,6 @@ func (ns *Namespace) Copy(s any, r resource.Resource) (resource.Resource, error) // Get locates the filename given in Hugo's assets filesystem // and creates a Resource object that can be used for further transformations. func (ns *Namespace) Get(filename any) resource.Resource { - filenamestr, err := cast.ToStringE(filename) if err != nil { panic(err) @@ -172,7 +169,6 @@ func (ns *Namespace) GetRemote(args ...any) resource.Resource { } return ns.createClient.FromRemote(urlstr, options) - } r, err := get(args...) @@ -183,10 +179,8 @@ func (ns *Namespace) GetRemote(args ...any) resource.Resource { default: return resources.NewErrorResource(resource.NewResourceError(fmt.Errorf("error calling resources.GetRemote: %w", err), make(map[string]any))) } - } return r - } // GetMatch finds the first Resource matching the given pattern, or nil if none found. @@ -344,7 +338,6 @@ func (ns *Namespace) Minify(r resources.ResourceTransformer) (resource.Resource, // as second argument. As an option, you can e.g. specify e.g. the target path (string) // for the converted CSS resource. func (ns *Namespace) ToCSS(args ...any) (resource.Resource, error) { - if len(args) > 2 { return nil, errors.New("must not provide more arguments than resource object and options") } @@ -389,7 +382,7 @@ func (ns *Namespace) ToCSS(args ...any) (resource.Resource, error) { if transpiler == transpilerLibSass { var options scss.Options if targetPath != "" { - options.TargetPath = helpers.ToSlashTrimLeading(targetPath) + options.TargetPath = paths.ToSlashTrimLeading(targetPath) } else if m != nil { options, err = scss.DecodeOptions(m) if err != nil { @@ -413,12 +406,10 @@ func (ns *Namespace) ToCSS(args ...any) (resource.Resource, error) { } return client.ToCSS(r, m) - } // PostCSS processes the given Resource with PostCSS func (ns *Namespace) PostCSS(args ...any) (resource.Resource, error) { - if len(args) > 2 { return nil, errors.New("must not provide more arguments than resource object and options") } @@ -438,7 +429,6 @@ func (ns *Namespace) PostProcess(r resource.Resource) (postpub.PostPublishedReso // Babel processes the given Resource with Babel. func (ns *Namespace) Babel(args ...any) (resource.Resource, error) { - if len(args) > 2 { return nil, errors.New("must not provide more arguments than resource object and options") } diff --git a/tpl/safe/init.go b/tpl/safe/init.go index 8fc0e82ea..3b498e6df 100644 --- a/tpl/safe/init.go +++ b/tpl/safe/init.go @@ -70,11 +70,6 @@ func init() { }, ) - ns.AddMethodMapping(ctx.SanitizeURL, - []string{"sanitizeURL", "sanitizeurl"}, - [][2]string{}, - ) - return ns } diff --git a/tpl/safe/safe.go b/tpl/safe/safe.go index d1a2e8d4e..81b4e0480 100644 --- a/tpl/safe/safe.go +++ b/tpl/safe/safe.go @@ -18,7 +18,6 @@ package safe import ( "html/template" - "github.com/gohugoio/hugo/helpers" "github.com/spf13/cast" ) @@ -65,9 +64,3 @@ func (ns *Namespace) URL(s any) (template.URL, error) { ss, err := cast.ToStringE(s) return template.URL(ss), err } - -// SanitizeURL returns the string s as html/template URL content. -func (ns *Namespace) SanitizeURL(s any) (string, error) { - ss, err := cast.ToStringE(s) - return helpers.SanitizeURL(ss), err -} diff --git a/tpl/safe/safe_test.go b/tpl/safe/safe_test.go index 81fa40fd8..f2a54755d 100644 --- a/tpl/safe/safe_test.go +++ b/tpl/safe/safe_test.go @@ -182,30 +182,3 @@ func TestURL(t *testing.T) { c.Assert(result, qt.Equals, test.expect) } } - -func TestSanitizeURL(t *testing.T) { - t.Parallel() - c := qt.New(t) - - ns := New() - - for _, test := range []struct { - a any - expect any - }{ - {"http://foo/../../bar", "http://foo/bar"}, - // errors - {tstNoStringer{}, false}, - } { - - result, err := ns.SanitizeURL(test.a) - - if b, ok := test.expect.(bool); ok && !b { - c.Assert(err, qt.Not(qt.IsNil)) - continue - } - - c.Assert(err, qt.IsNil) - c.Assert(result, qt.Equals, test.expect) - } -} diff --git a/tpl/site/init.go b/tpl/site/init.go index 1c018e14e..1fcb309a0 100644 --- a/tpl/site/init.go +++ b/tpl/site/init.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/tpl/strings/strings.go b/tpl/strings/strings.go index 9f16f1581..cd233b0a4 100644 --- a/tpl/strings/strings.go +++ b/tpl/strings/strings.go @@ -47,7 +47,7 @@ type Namespace struct { func (ns *Namespace) CountRunes(s any) (int, error) { ss, err := cast.ToStringE(s) if err != nil { - return 0, fmt.Errorf("Failed to convert content to string: %w", err) + return 0, fmt.Errorf("failed to convert content to string: %w", err) } counter := 0 @@ -64,7 +64,7 @@ func (ns *Namespace) CountRunes(s any) (int, error) { func (ns *Namespace) RuneCount(s any) (int, error) { ss, err := cast.ToStringE(s) if err != nil { - return 0, fmt.Errorf("Failed to convert content to string: %w", err) + return 0, fmt.Errorf("failed to convert content to string: %w", err) } return utf8.RuneCountInString(ss), nil } @@ -73,12 +73,12 @@ func (ns *Namespace) RuneCount(s any) (int, error) { func (ns *Namespace) CountWords(s any) (int, error) { ss, err := cast.ToStringE(s) if err != nil { - return 0, fmt.Errorf("Failed to convert content to string: %w", err) + return 0, fmt.Errorf("failed to convert content to string: %w", err) } isCJKLanguage, err := regexp.MatchString(`\p{Han}|\p{Hangul}|\p{Hiragana}|\p{Katakana}`, ss) if err != nil { - return 0, fmt.Errorf("Failed to match regex pattern against string: %w", err) + return 0, fmt.Errorf("failed to match regex pattern against string: %w", err) } if !isCJKLanguage { @@ -103,11 +103,11 @@ func (ns *Namespace) CountWords(s any) (int, error) { func (ns *Namespace) Count(substr, s any) (int, error) { substrs, err := cast.ToStringE(substr) if err != nil { - return 0, fmt.Errorf("Failed to convert substr to string: %w", err) + return 0, fmt.Errorf("failed to convert substr to string: %w", err) } ss, err := cast.ToStringE(s) if err != nil { - return 0, fmt.Errorf("Failed to convert s to string: %w", err) + return 0, fmt.Errorf("failed to convert s to string: %w", err) } return strings.Count(ss, substrs), nil } diff --git a/tpl/template.go b/tpl/template.go index 1f0127c66..e9725bd74 100644 --- a/tpl/template.go +++ b/tpl/template.go @@ -23,6 +23,8 @@ import ( "unicode" bp "github.com/gohugoio/hugo/bufferpool" + "github.com/gohugoio/hugo/common/hcontext" + "github.com/gohugoio/hugo/identity" "github.com/gohugoio/hugo/output/layouts" "github.com/gohugoio/hugo/output" @@ -69,6 +71,7 @@ type TemplateHandler interface { ExecuteWithContext(ctx context.Context, t Template, wr io.Writer, data any) error LookupLayout(d layouts.LayoutDescriptor, f output.Format) (Template, bool, error) HasTemplate(name string) bool + GetIdentity(name string) (identity.Identity, bool) } type TemplateLookup interface { @@ -95,6 +98,27 @@ type Template interface { Prepare() (*texttemplate.Template, error) } +// AddIdentity checks if t is an identity.Identity and returns it if so. +// Else it wraps it in a templateIdentity using its name as the base. +func AddIdentity(t Template) Template { + if _, ok := t.(identity.IdentityProvider); ok { + return t + } + return templateIdentityProvider{ + Template: t, + id: identity.StringIdentity(t.Name()), + } +} + +type templateIdentityProvider struct { + Template + id identity.Identity +} + +func (t templateIdentityProvider) GetIdentity() identity.Identity { + return t.id +} + // TemplateParser is used to parse ad-hoc templates, e.g. in the Resource chain. type TemplateParser interface { Parse(name, tpl string) (Template, error) @@ -111,18 +135,6 @@ type TemplateDebugger interface { Debug() } -// templateInfo wraps a Template with some additional information. -type templateInfo struct { - Template - Info -} - -// templateInfo wraps a Template with some additional information. -type templateInfoManager struct { - Template - InfoManager -} - // TemplatesProvider as implemented by deps.Deps. type TemplatesProvider interface { Tmpl() TemplateHandler @@ -144,34 +156,38 @@ type TemplateFuncGetter interface { GetFunc(name string) (reflect.Value, bool) } -// GetPageFromContext returns the top level Page. -func GetPageFromContext(ctx context.Context) any { - return ctx.Value(texttemplate.PageContextKey) +type contextKey string + +// Context manages values passed in the context to templates. +var Context = struct { + DependencyManagerScopedProvider hcontext.ContextDispatcher[identity.DependencyManagerScopedProvider] + GetDependencyManagerInCurrentScope func(context.Context) identity.Manager + SetDependencyManagerInCurrentScope func(context.Context, identity.Manager) context.Context + DependencyScope hcontext.ContextDispatcher[int] + Page hcontext.ContextDispatcher[page] +}{ + DependencyManagerScopedProvider: hcontext.NewContextDispatcher[identity.DependencyManagerScopedProvider](contextKey("DependencyManagerScopedProvider")), + DependencyScope: hcontext.NewContextDispatcher[int](contextKey("DependencyScope")), + Page: hcontext.NewContextDispatcher[page](contextKey("Page")), } -// SetPageInContext sets the top level Page. -func SetPageInContext(ctx context.Context, p page) context.Context { - return context.WithValue(ctx, texttemplate.PageContextKey, p) +func init() { + Context.GetDependencyManagerInCurrentScope = func(ctx context.Context) identity.Manager { + idmsp := Context.DependencyManagerScopedProvider.Get(ctx) + if idmsp != nil { + return idmsp.GetDependencyManagerForScope(Context.DependencyScope.Get(ctx)) + } + return nil + } } type page interface { IsNode() bool } -func GetCallbackFunctionFromContext(ctx context.Context) any { - return ctx.Value(texttemplate.CallbackContextKey) -} - -func SetCallbackFunctionInContext(ctx context.Context, fn any) context.Context { - return context.WithValue(ctx, texttemplate.CallbackContextKey, fn) -} - const hugoNewLinePlaceholder = "___hugonl_" -var ( - stripHTMLReplacerPre = strings.NewReplacer("\n", " ", "

    ", hugoNewLinePlaceholder, "
    ", hugoNewLinePlaceholder, "
    ", hugoNewLinePlaceholder) - whitespaceRe = regexp.MustCompile(`\s+`) -) +var stripHTMLReplacerPre = strings.NewReplacer("\n", " ", "

    ", hugoNewLinePlaceholder, "
    ", hugoNewLinePlaceholder, "
    ", hugoNewLinePlaceholder) // StripHTML strips out all HTML tags in s. func StripHTML(s string) string { diff --git a/tpl/template_info.go b/tpl/template_info.go index 5f748d682..b27debf1f 100644 --- a/tpl/template_info.go +++ b/tpl/template_info.go @@ -13,18 +13,11 @@ package tpl -import ( - "github.com/gohugoio/hugo/identity" -) - // Increments on breaking changes. const TemplateVersion = 2 type Info interface { ParseInfo() ParseInfo - - // Identifies this template and its dependencies. - identity.Provider } type FileInfo interface { @@ -32,13 +25,6 @@ type FileInfo interface { Filename() string } -type InfoManager interface { - ParseInfo() ParseInfo - - // Identifies and manages this template and its dependencies. - identity.Manager -} - type ParseInfo struct { // Set for shortcode templates with any {{ .Inner }} IsInner bool diff --git a/tpl/template_test.go b/tpl/template_test.go index d989b7158..333513a3d 100644 --- a/tpl/template_test.go +++ b/tpl/template_test.go @@ -67,5 +67,3 @@ More text here.

    } } } - -const tstHTMLContent = "

    This is some text.
    And some more.

    " diff --git a/tpl/templates/integration_test.go b/tpl/templates/integration_test.go index 7935fa5e3..7e0bcc824 100644 --- a/tpl/templates/integration_test.go +++ b/tpl/templates/integration_test.go @@ -1,4 +1,4 @@ -// Copyright 2022 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/tpl/time/init.go b/tpl/time/init.go index 01783270f..5f9dd77bf 100644 --- a/tpl/time/init.go +++ b/tpl/time/init.go @@ -51,7 +51,7 @@ func init() { // 3 or more arguments. Currently not supported. default: - return nil, errors.New("Invalid arguments supplied to `time`. Refer to time documentation: https://gohugo.io/functions/time/") + return nil, errors.New("invalid arguments supplied to `time`") } }, } diff --git a/tpl/time/time.go b/tpl/time/time.go index cd78b83aa..57b115f35 100644 --- a/tpl/time/time.go +++ b/tpl/time/time.go @@ -17,7 +17,6 @@ package time import ( "fmt" "time" - _time "time" "github.com/gohugoio/hugo/common/htime" @@ -47,14 +46,13 @@ func (ns *Namespace) AsTime(v any, args ...any) (any, error) { if err != nil { return nil, err } - loc, err = _time.LoadLocation(locStr) + loc, err = time.LoadLocation(locStr) if err != nil { return nil, err } } return htime.ToTimeInDefaultLocationE(v, loc) - } // Format converts the textual representation of the datetime string in v into @@ -69,7 +67,7 @@ func (ns *Namespace) Format(layout string, v any) (string, error) { } // Now returns the current local time or `clock` time -func (ns *Namespace) Now() _time.Time { +func (ns *Namespace) Now() time.Time { return htime.Now() } @@ -79,34 +77,34 @@ func (ns *Namespace) Now() _time.Time { // such as "300ms", "-1.5h" or "2h45m". // Valid time units are "ns", "us" (or "µs"), "ms", "s", "m", "h". // See https://golang.org/pkg/time/#ParseDuration -func (ns *Namespace) ParseDuration(s any) (_time.Duration, error) { +func (ns *Namespace) ParseDuration(s any) (time.Duration, error) { ss, err := cast.ToStringE(s) if err != nil { return 0, err } - return _time.ParseDuration(ss) + return time.ParseDuration(ss) } -var durationUnits = map[string]_time.Duration{ - "nanosecond": _time.Nanosecond, - "ns": _time.Nanosecond, - "microsecond": _time.Microsecond, - "us": _time.Microsecond, - "µs": _time.Microsecond, - "millisecond": _time.Millisecond, - "ms": _time.Millisecond, - "second": _time.Second, - "s": _time.Second, - "minute": _time.Minute, - "m": _time.Minute, - "hour": _time.Hour, - "h": _time.Hour, +var durationUnits = map[string]time.Duration{ + "nanosecond": time.Nanosecond, + "ns": time.Nanosecond, + "microsecond": time.Microsecond, + "us": time.Microsecond, + "µs": time.Microsecond, + "millisecond": time.Millisecond, + "ms": time.Millisecond, + "second": time.Second, + "s": time.Second, + "minute": time.Minute, + "m": time.Minute, + "hour": time.Hour, + "h": time.Hour, } // Duration converts the given number to a time.Duration. // Unit is one of nanosecond/ns, microsecond/us/µs, millisecond/ms, second/s, minute/m or hour/h. -func (ns *Namespace) Duration(unit any, number any) (_time.Duration, error) { +func (ns *Namespace) Duration(unit any, number any) (time.Duration, error) { unitStr, err := cast.ToStringE(unit) if err != nil { return 0, err @@ -119,5 +117,5 @@ func (ns *Namespace) Duration(unit any, number any) (_time.Duration, error) { if err != nil { return 0, err } - return _time.Duration(n) * unitDuration, nil + return time.Duration(n) * unitDuration, nil } diff --git a/tpl/tplimpl/template.go b/tpl/tplimpl/template.go index 053b53b53..a8ba6815d 100644 --- a/tpl/tplimpl/template.go +++ b/tpl/tplimpl/template.go @@ -42,7 +42,6 @@ import ( "github.com/gohugoio/hugo/common/herrors" "github.com/gohugoio/hugo/hugofs" - "github.com/gohugoio/hugo/hugofs/files" htmltemplate "github.com/gohugoio/hugo/tpl/internal/go_templates/htmltemplate" texttemplate "github.com/gohugoio/hugo/tpl/internal/go_templates/texttemplate" @@ -121,10 +120,6 @@ func needsBaseTemplate(templ string) bool { return baseTemplateDefineRe.MatchString(templ[idx:]) } -func newIdentity(name string) identity.Manager { - return identity.NewManager(identity.NewPathIdentity(files.ComponentFolderLayouts, name)) -} - func newStandaloneTextTemplate(funcs map[string]any) tpl.TemplateParseFinder { return &textTemplateWrapperWithLock{ RWMutex: &sync.RWMutex{}, @@ -147,7 +142,6 @@ func newTemplateHandlers(d *deps.Deps) (*tpl.TemplateHandlers, error) { h := &templateHandler{ nameBaseTemplateName: make(map[string]string), transformNotFound: make(map[string]*templateState), - identityNotFound: make(map[string][]identity.Manager), shortcodes: make(map[string]*shortcodeTemplates), templateInfo: make(map[string]tpl.Info), @@ -187,7 +181,6 @@ func newTemplateHandlers(d *deps.Deps) (*tpl.TemplateHandlers, error) { Tmpl: e, TxtTmpl: newStandaloneTextTemplate(funcMap), }, nil - } func newTemplateNamespace(funcs map[string]any) *templateNamespace { @@ -200,13 +193,16 @@ func newTemplateNamespace(funcs map[string]any) *templateNamespace { } } -func newTemplateState(templ tpl.Template, info templateInfo) *templateState { +func newTemplateState(templ tpl.Template, info templateInfo, id identity.Identity) *templateState { + if id == nil { + id = info + } return &templateState{ info: info, typ: info.resolveType(), Template: templ, - Manager: newIdentity(info.name), parseInfo: tpl.DefaultParseInfo, + id: id, } } @@ -288,7 +284,7 @@ func (t *templateExec) UnusedTemplates() []tpl.FileInfo { for _, ts := range t.main.templates { ti := ts.info - if strings.HasPrefix(ti.name, "_internal/") || ti.realFilename == "" { + if strings.HasPrefix(ti.name, "_internal/") || ti.meta == nil { continue } @@ -346,9 +342,6 @@ type templateHandler struct { // AST transformation pass. transformNotFound map[string]*templateState - // Holds identities of templates not found during first pass. - identityNotFound map[string][]identity.Manager - // shortcodes maps shortcode name to template variants // (language, output format etc.) of that shortcode. shortcodes map[string]*shortcodeTemplates @@ -405,7 +398,6 @@ func (t *templateHandler) LookupLayout(d layouts.LayoutDescriptor, f output.Form cacheVal := layoutCacheEntry{found: found, templ: templ, err: err} t.layoutTemplateCache[key] = cacheVal return cacheVal.templ, cacheVal.found, cacheVal.err - } // This currently only applies to shortcodes and what we get here is the @@ -456,6 +448,22 @@ func (t *templateHandler) HasTemplate(name string) bool { return found } +func (t *templateHandler) GetIdentity(name string) (identity.Identity, bool) { + if _, found := t.needsBaseof[name]; found { + return identity.StringIdentity(name), true + } + + if _, found := t.baseof[name]; found { + return identity.StringIdentity(name), true + } + + tt, found := t.Lookup(name) + if !found { + return nil, false + } + return tt.(identity.IdentityProvider).GetIdentity(), found +} + func (t *templateHandler) findLayout(d layouts.LayoutDescriptor, f output.Format) (tpl.Template, bool, error) { d.OutputFormatName = f.Name d.Suffix = f.MediaType.FirstSuffix.Suffix @@ -488,13 +496,10 @@ func (t *templateHandler) findLayout(d layouts.LayoutDescriptor, f output.Format return nil, false, err } - ts := newTemplateState(templ, overlay) + ts := newTemplateState(templ, overlay, identity.Or(base, overlay)) if found { ts.baseInfo = base - - // Add the base identity to detect changes - ts.Add(identity.NewPathIdentity(files.ComponentFolderLayouts, base.name)) } t.applyTemplateTransformers(t.main, ts) @@ -510,13 +515,6 @@ func (t *templateHandler) findLayout(d layouts.LayoutDescriptor, f output.Format return nil, false, nil } -func (t *templateHandler) findTemplate(name string) *templateState { - if templ, found := t.Lookup(name); found { - return templ.(*templateState) - } - return nil -} - func (t *templateHandler) newTemplateInfo(name, tpl string) templateInfo { var isText bool name, isText = t.nameIsText(name) @@ -539,9 +537,8 @@ func (t *templateHandler) addFileContext(templ tpl.Template, inerr error) error identifiers := t.extractIdentifiers(inerr.Error()) - //lint:ignore ST1008 the error is the main result checkFilename := func(info templateInfo, inErr error) (error, bool) { - if info.filename == "" { + if info.meta == nil { return inErr, false } @@ -560,13 +557,13 @@ func (t *templateHandler) addFileContext(templ tpl.Template, inerr error) error return -1 } - f, err := t.layoutsFs.Open(info.filename) + f, err := info.meta.Open() if err != nil { return inErr, false } defer f.Close() - fe := herrors.NewFileErrorFromName(inErr, info.realFilename) + fe := herrors.NewFileErrorFromName(inErr, info.meta.Filename) fe.UpdateContent(f, lineMatcher) if !fe.ErrorContext().Position.IsValid() { @@ -621,37 +618,33 @@ func (t *templateHandler) addShortcodeVariant(ts *templateState) { } } -func (t *templateHandler) addTemplateFile(name, path string) error { - getTemplate := func(filename string) (templateInfo, error) { - fs := t.Layouts.Fs - b, err := afero.ReadFile(fs, filename) +func (t *templateHandler) addTemplateFile(name string, fim hugofs.FileMetaInfo) error { + getTemplate := func(fim hugofs.FileMetaInfo) (templateInfo, error) { + meta := fim.Meta() + f, err := meta.Open() if err != nil { - return templateInfo{filename: filename, fs: fs}, err + return templateInfo{meta: meta}, err + } + defer f.Close() + b, err := io.ReadAll(f) + if err != nil { + return templateInfo{meta: meta}, err } s := removeLeadingBOM(string(b)) - realFilename := filename - if fi, err := fs.Stat(filename); err == nil { - if fim, ok := fi.(hugofs.FileMetaInfo); ok { - realFilename = fim.Meta().Filename - } - } - var isText bool name, isText = t.nameIsText(name) return templateInfo{ - name: name, - isText: isText, - template: s, - filename: filename, - realFilename: realFilename, - fs: fs, + name: name, + isText: isText, + template: s, + meta: meta, }, nil } - tinfo, err := getTemplate(path) + tinfo, err := getTemplate(fim) if err != nil { return err } @@ -741,11 +734,6 @@ func (t *templateHandler) applyTemplateTransformers(ns *templateNamespace, ts *t for k := range c.templateNotFound { t.transformNotFound[k] = ts - t.identityNotFound[k] = append(t.identityNotFound[k], c.t) - } - - for k := range c.identityNotFound { - t.identityNotFound[k] = append(t.identityNotFound[k], c.t) } return c, err @@ -804,9 +792,9 @@ func (t *templateHandler) loadEmbedded() error { } func (t *templateHandler) loadTemplates() error { - walker := func(path string, fi hugofs.FileMetaInfo, err error) error { - if err != nil || fi.IsDir() { - return err + walker := func(path string, fi hugofs.FileMetaInfo) error { + if fi.IsDir() { + return nil } if isDotFile(path) || isBackupFile(path) { @@ -822,14 +810,14 @@ func (t *templateHandler) loadTemplates() error { name = textTmplNamePrefix + name } - if err := t.addTemplateFile(name, path); err != nil { + if err := t.addTemplateFile(name, fi); err != nil { return err } return nil } - if err := helpers.SymbolicWalk(t.Layouts.Fs, "", walker); err != nil { + if err := helpers.Walk(t.Layouts.Fs, "", walker); err != nil { if !herrors.IsNotExist(err) { return err } @@ -861,7 +849,7 @@ func (t *templateHandler) extractPartials(templ tpl.Template) error { continue } - ts := newTemplateState(templ, templateInfo{name: templ.Name()}) + ts := newTemplateState(templ, templateInfo{name: templ.Name()}, nil) ts.typ = templatePartial t.main.mu.RLock() @@ -927,15 +915,6 @@ func (t *templateHandler) postTransform() error { } } - for k, v := range t.identityNotFound { - ts := t.findTemplate(k) - if ts != nil { - for _, im := range v { - im.Add(ts) - } - } - } - for _, v := range t.shortcodes { sort.Slice(v.variants, func(i, j int) bool { v1, v2 := v.variants[i], v.variants[j] @@ -1008,7 +987,7 @@ func (t *templateNamespace) newTemplateLookup(in *templateState) func(name strin return templ } if templ, found := findTemplateIn(name, in); found { - return newTemplateState(templ, templateInfo{name: templ.Name()}) + return newTemplateState(templ, templateInfo{name: templ.Name()}, nil) } return nil } @@ -1026,7 +1005,7 @@ func (t *templateNamespace) parse(info templateInfo) (*templateState, error) { return nil, err } - ts := newTemplateState(templ, info) + ts := newTemplateState(templ, info, nil) t.templates[info.name] = ts @@ -1040,7 +1019,7 @@ func (t *templateNamespace) parse(info templateInfo) (*templateState, error) { return nil, err } - ts := newTemplateState(templ, info) + ts := newTemplateState(templ, info, nil) t.templates[info.name] = ts @@ -1052,12 +1031,16 @@ type templateState struct { typ templateType parseInfo tpl.ParseInfo - identity.Manager + id identity.Identity info templateInfo baseInfo templateInfo // Set when a base template is used. } +func (t *templateState) GetIdentity() identity.Identity { + return t.id +} + func (t *templateState) ParseInfo() tpl.ParseInfo { return t.parseInfo } @@ -1066,6 +1049,10 @@ func (t *templateState) isText() bool { return isText(t.Template) } +func (t *templateState) String() string { + return t.Name() +} + func isText(templ tpl.Template) bool { _, isText := templ.(*texttemplate.Template) return isText @@ -1076,11 +1063,6 @@ type templateStateMap struct { templates map[string]*templateState } -type templateWrapperWithLock struct { - *sync.RWMutex - tpl.Template -} - type textTemplateWrapperWithLock struct { *sync.RWMutex *texttemplate.Template diff --git a/tpl/tplimpl/template_ast_transformers.go b/tpl/tplimpl/template_ast_transformers.go index 8d5d8d1b3..92558a903 100644 --- a/tpl/tplimpl/template_ast_transformers.go +++ b/tpl/tplimpl/template_ast_transformers.go @@ -14,17 +14,14 @@ package tplimpl import ( + "errors" "fmt" - "regexp" - "strings" htmltemplate "github.com/gohugoio/hugo/tpl/internal/go_templates/htmltemplate" texttemplate "github.com/gohugoio/hugo/tpl/internal/go_templates/texttemplate" "github.com/gohugoio/hugo/tpl/internal/go_templates/texttemplate/parse" - "errors" - "github.com/gohugoio/hugo/common/maps" "github.com/gohugoio/hugo/tpl" "github.com/mitchellh/mapstructure" @@ -41,7 +38,6 @@ const ( type templateContext struct { visited map[string]bool templateNotFound map[string]bool - identityNotFound map[string]bool lookupFn func(name string) *templateState // The last error encountered. @@ -74,19 +70,20 @@ func (c templateContext) getIfNotVisited(name string) *templateState { func newTemplateContext( t *templateState, - lookupFn func(name string) *templateState) *templateContext { + lookupFn func(name string) *templateState, +) *templateContext { return &templateContext{ t: t, lookupFn: lookupFn, visited: make(map[string]bool), templateNotFound: make(map[string]bool), - identityNotFound: make(map[string]bool), } } func applyTemplateTransformers( t *templateState, - lookupFn func(name string) *templateState) (*templateContext, error) { + lookupFn func(name string) *templateState, +) (*templateContext, error) { if t == nil { return nil, errors.New("expected template, but none provided") } @@ -179,7 +176,6 @@ func (c *templateContext) applyTransformations(n parse.Node) (bool, error) { } case *parse.CommandNode: - c.collectPartialInfo(x) c.collectInner(x) keep := c.collectReturnNode(x) @@ -280,39 +276,6 @@ func (c *templateContext) collectInner(n *parse.CommandNode) { } } -var partialRe = regexp.MustCompile(`^partial(Cached)?$|^partials\.Include(Cached)?$`) - -func (c *templateContext) collectPartialInfo(x *parse.CommandNode) { - if len(x.Args) < 2 { - return - } - - first := x.Args[0] - var id string - switch v := first.(type) { - case *parse.IdentifierNode: - id = v.Ident - case *parse.ChainNode: - id = v.String() - } - - if partialRe.MatchString(id) { - partialName := strings.Trim(x.Args[1].String(), "\"") - if !strings.Contains(partialName, ".") { - partialName += ".html" - } - partialName = "partials/" + partialName - info := c.lookupFn(partialName) - - if info != nil { - c.t.Add(info) - } else { - // Delay for later - c.identityNotFound[partialName] = true - } - } -} - func (c *templateContext) collectReturnNode(n *parse.CommandNode) bool { if c.t.typ != templatePartial || c.returnNode != nil { return true diff --git a/tpl/tplimpl/template_ast_transformers_test.go b/tpl/tplimpl/template_ast_transformers_test.go index 90ca325ab..bd889b832 100644 --- a/tpl/tplimpl/template_ast_transformers_test.go +++ b/tpl/tplimpl/template_ast_transformers_test.go @@ -52,6 +52,7 @@ func newTestTemplate(templ tpl.Template) *templateState { templateInfo{ name: templ.Name(), }, + nil, ) } diff --git a/tpl/tplimpl/template_errors.go b/tpl/tplimpl/template_errors.go index ac8a72df5..34e73a07a 100644 --- a/tpl/tplimpl/template_errors.go +++ b/tpl/tplimpl/template_errors.go @@ -17,22 +17,22 @@ import ( "fmt" "github.com/gohugoio/hugo/common/herrors" - "github.com/spf13/afero" + "github.com/gohugoio/hugo/hugofs" + "github.com/gohugoio/hugo/identity" ) +var _ identity.Identity = (*templateInfo)(nil) + type templateInfo struct { name string template string isText bool // HTML or plain text template. - // Used to create some error context in error situations - fs afero.Fs + meta *hugofs.FileMeta +} - // The filename relative to the fs above. - filename string - - // The real filename (if possible). Used for logging. - realFilename string +func (t templateInfo) IdentifierBase() string { + return t.name } func (t templateInfo) Name() string { @@ -40,7 +40,7 @@ func (t templateInfo) Name() string { } func (t templateInfo) Filename() string { - return t.realFilename + return t.meta.Filename } func (t templateInfo) IsZero() bool { @@ -53,12 +53,11 @@ func (t templateInfo) resolveType() templateType { func (info templateInfo) errWithFileContext(what string, err error) error { err = fmt.Errorf(what+": %w", err) - fe := herrors.NewFileErrorFromName(err, info.realFilename) - f, err := info.fs.Open(info.filename) + fe := herrors.NewFileErrorFromName(err, info.meta.Filename) + f, err := info.meta.Open() if err != nil { return err } defer f.Close() return fe.UpdateContent(f, nil) - } diff --git a/tpl/tplimpl/template_funcs.go b/tpl/tplimpl/template_funcs.go index 97d1b40dd..8997c83d6 100644 --- a/tpl/tplimpl/template_funcs.go +++ b/tpl/tplimpl/template_funcs.go @@ -22,6 +22,7 @@ import ( "github.com/gohugoio/hugo/common/hreflect" "github.com/gohugoio/hugo/common/maps" + "github.com/gohugoio/hugo/identity" "github.com/gohugoio/hugo/tpl" template "github.com/gohugoio/hugo/tpl/internal/go_templates/htmltemplate" @@ -65,9 +66,8 @@ import ( ) var ( - _ texttemplate.ExecHelper = (*templateExecHelper)(nil) - zero reflect.Value - contextInterface = reflect.TypeOf((*context.Context)(nil)).Elem() + _ texttemplate.ExecHelper = (*templateExecHelper)(nil) + zero reflect.Value ) type templateExecHelper struct { @@ -81,7 +81,7 @@ func (t *templateExecHelper) GetFunc(ctx context.Context, tmpl texttemplate.Prep if fn, found := t.funcs[name]; found { if fn.Type().NumIn() > 0 { first := fn.Type().In(0) - if first.Implements(contextInterface) { + if hreflect.IsContextType(first) { // TODO(bep) check if we can void this conversion every time -- and if that matters. // The first argument may be context.Context. This is never provided by the end user, but it's used to pass down // contextual information, e.g. the top level data context (e.g. Page). @@ -95,6 +95,13 @@ func (t *templateExecHelper) GetFunc(ctx context.Context, tmpl texttemplate.Prep } func (t *templateExecHelper) Init(ctx context.Context, tmpl texttemplate.Preparer) { + if t.running { + _, ok := tmpl.(identity.IdentityProvider) + if ok { + t.trackDependencies(ctx, tmpl, "", reflect.Value{}) + } + + } } func (t *templateExecHelper) GetMapValue(ctx context.Context, tmpl texttemplate.Preparer, receiver, key reflect.Value) (reflect.Value, bool) { @@ -116,22 +123,14 @@ func (t *templateExecHelper) GetMapValue(ctx context.Context, tmpl texttemplate. var typeParams = reflect.TypeOf(maps.Params{}) func (t *templateExecHelper) GetMethod(ctx context.Context, tmpl texttemplate.Preparer, receiver reflect.Value, name string) (method reflect.Value, firstArg reflect.Value) { - if t.running { - switch name { - case "GetPage", "Render": - if info, ok := tmpl.(tpl.Info); ok { - if m := receiver.MethodByName(name + "WithTemplateInfo"); m.IsValid() { - return m, reflect.ValueOf(info) - } - } - } - } - if strings.EqualFold(name, "mainsections") && receiver.Type() == typeParams && receiver.Pointer() == t.siteParams.Pointer() { - // MOved to site.MainSections in Hugo 0.112.0. + // Moved to site.MainSections in Hugo 0.112.0. receiver = t.site name = "MainSections" + } + if t.running { + ctx = t.trackDependencies(ctx, tmpl, name, receiver) } fn := hreflect.GetMethodByName(receiver, name) @@ -141,7 +140,7 @@ func (t *templateExecHelper) GetMethod(ctx context.Context, tmpl texttemplate.Pr if fn.Type().NumIn() > 0 { first := fn.Type().In(0) - if first.Implements(contextInterface) { + if hreflect.IsContextType(first) { // The first argument may be context.Context. This is never provided by the end user, but it's used to pass down // contextual information, e.g. the top level data context (e.g. Page). return fn, reflect.ValueOf(ctx) @@ -151,6 +150,43 @@ func (t *templateExecHelper) GetMethod(ctx context.Context, tmpl texttemplate.Pr return fn, zero } +func (t *templateExecHelper) trackDependencies(ctx context.Context, tmpl texttemplate.Preparer, name string, receiver reflect.Value) context.Context { + if tmpl == nil { + panic("must provide a template") + } + + idm := tpl.Context.GetDependencyManagerInCurrentScope(ctx) + if idm == nil { + return ctx + } + + if info, ok := tmpl.(identity.IdentityProvider); ok { + idm.AddIdentity(info.GetIdentity()) + } + + // The receive is the "." in the method execution or map lookup, e.g. the Page in .Resources. + if hreflect.IsValid(receiver) { + in := receiver.Interface() + + if idlp, ok := in.(identity.ForEeachIdentityByNameProvider); ok { + // This will skip repeated .RelPermalink usage on transformed resources + // which is not fingerprinted, e.g. to + // prevent all HTML pages to be re-rendered on a small CSS change. + idlp.ForEeachIdentityByName(name, func(id identity.Identity) bool { + idm.AddIdentity(id) + return false + }) + } else { + identity.WalkIdentitiesShallow(in, func(level int, id identity.Identity) bool { + idm.AddIdentity(id) + return false + }) + } + } + + return ctx +} + func newTemplateExecuter(d *deps.Deps) (texttemplate.Executer, map[string]reflect.Value) { funcs := createFuncMap(d) funcsv := make(map[string]reflect.Value) diff --git a/tpl/transform/integration_test.go b/tpl/transform/integration_test.go index f035ec719..351420a67 100644 --- a/tpl/transform/integration_test.go +++ b/tpl/transform/integration_test.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -77,7 +77,7 @@ disableKinds = ['section','sitemap','taxonomy','term'] --- title: p1 --- -a **b** c +a **b** ` + "\v" + ` c ` b := hugolib.Test(t, files) diff --git a/tpl/transform/transform.go b/tpl/transform/transform.go index 8078bc0ce..7054c6988 100644 --- a/tpl/transform/transform.go +++ b/tpl/transform/transform.go @@ -22,10 +22,11 @@ import ( "html/template" "strings" - "github.com/gohugoio/hugo/cache/namedmemcache" + "github.com/gohugoio/hugo/cache/dynacache" "github.com/gohugoio/hugo/markup/converter/hooks" "github.com/gohugoio/hugo/markup/highlight" "github.com/gohugoio/hugo/markup/highlight/chromalexers" + "github.com/gohugoio/hugo/resources" "github.com/gohugoio/hugo/tpl" "github.com/gohugoio/hugo/deps" @@ -35,21 +36,23 @@ import ( // New returns a new instance of the transform-namespaced template functions. func New(deps *deps.Deps) *Namespace { - cache := namedmemcache.New() - deps.BuildStartListeners.Add( - func() { - cache.Clear() - }) + if deps.MemCache == nil { + panic("must provide MemCache") + } return &Namespace{ - cache: cache, - deps: deps, + deps: deps, + cache: dynacache.GetOrCreatePartition[string, *resources.StaleValue[any]]( + deps.MemCache, + "/tmpl/transform", + dynacache.OptionsPartition{Weight: 30, ClearWhen: dynacache.ClearOnChange}, + ), } } // Namespace provides template functions for the "transform" namespace. type Namespace struct { - cache *namedmemcache.Cache + cache *dynacache.Partition[string, *resources.StaleValue[any]] deps *deps.Deps } @@ -154,7 +157,6 @@ func (ns *Namespace) XMLEscape(s any) (string, error) { // Markdownify renders s from Markdown to HTML. func (ns *Namespace) Markdownify(ctx context.Context, s any) (template.HTML, error) { - home := ns.deps.Site.Home() if home == nil { panic("home must not be nil") diff --git a/tpl/transform/unmarshal.go b/tpl/transform/unmarshal.go index 3936126ca..d876c88d7 100644 --- a/tpl/transform/unmarshal.go +++ b/tpl/transform/unmarshal.go @@ -14,18 +14,18 @@ package transform import ( + "errors" "fmt" "io" "strings" + "github.com/gohugoio/hugo/resources" "github.com/gohugoio/hugo/resources/resource" "github.com/gohugoio/hugo/common/types" "github.com/mitchellh/mapstructure" - "errors" - "github.com/gohugoio/hugo/helpers" "github.com/gohugoio/hugo/parser/metadecoders" @@ -71,7 +71,7 @@ func (ns *Namespace) Unmarshal(args ...any) (any, error) { key += decoder.OptionsKey() } - return ns.cache.GetOrCreate(key, func() (any, error) { + v, err := ns.cache.GetOrCreate(key, func(string) (*resources.StaleValue[any], error) { f := metadecoders.FormatFromStrings(r.MediaType().Suffixes()...) if f == "" { return nil, fmt.Errorf("MIME %q not supported", r.MediaType()) @@ -88,8 +88,24 @@ func (ns *Namespace) Unmarshal(args ...any) (any, error) { return nil, err } - return decoder.Unmarshal(b, f) + v, err := decoder.Unmarshal(b, f) + if err != nil { + return nil, err + } + + return &resources.StaleValue[any]{ + Value: v, + IsStaleFunc: func() bool { + return resource.IsStaleAny(r) + }, + }, nil }) + if err != nil { + return nil, err + } + + return v.Value, nil + } dataStr, err := types.ToStringE(data) @@ -103,14 +119,29 @@ func (ns *Namespace) Unmarshal(args ...any) (any, error) { key := helpers.MD5String(dataStr) - return ns.cache.GetOrCreate(key, func() (any, error) { + v, err := ns.cache.GetOrCreate(key, func(string) (*resources.StaleValue[any], error) { f := decoder.FormatFromContentString(dataStr) if f == "" { return nil, errors.New("unknown format") } - return decoder.Unmarshal([]byte(dataStr), f) + v, err := decoder.Unmarshal([]byte(dataStr), f) + if err != nil { + return nil, err + } + + return &resources.StaleValue[any]{ + Value: v, + IsStaleFunc: func() bool { + return false + }, + }, nil }) + if err != nil { + return nil, err + } + + return v.Value, nil } func decodeDecoder(m map[string]any) (metadecoders.Decoder, error) { diff --git a/tpl/transform/unmarshal_test.go b/tpl/transform/unmarshal_test.go index 12774298a..1b976c449 100644 --- a/tpl/transform/unmarshal_test.go +++ b/tpl/transform/unmarshal_test.go @@ -14,6 +14,7 @@ package transform_test import ( + "context" "fmt" "math/rand" "strings" @@ -193,9 +194,11 @@ func BenchmarkUnmarshalString(b *testing.B) { jsons[i] = strings.Replace(testJSON, "ROOT_KEY", fmt.Sprintf("root%d", i), 1) } + ctx := context.Background() + b.ResetTimer() for i := 0; i < b.N; i++ { - result, err := ns.Unmarshal(jsons[rand.Intn(numJsons)]) + result, err := ns.Unmarshal(ctx, jsons[rand.Intn(numJsons)]) if err != nil { b.Fatal(err) } @@ -220,9 +223,11 @@ func BenchmarkUnmarshalResource(b *testing.B) { jsons[i] = testContentResource{key: key, content: strings.Replace(testJSON, "ROOT_KEY", key, 1), mime: media.Builtin.JSONType} } + ctx := context.Background() + b.ResetTimer() for i := 0; i < b.N; i++ { - result, err := ns.Unmarshal(jsons[rand.Intn(numJsons)]) + result, err := ns.Unmarshal(ctx, jsons[rand.Intn(numJsons)]) if err != nil { b.Fatal(err) } diff --git a/transform/livereloadinject/livereloadinject.go b/transform/livereloadinject/livereloadinject.go index 1e21a92e6..e88e3895b 100644 --- a/transform/livereloadinject/livereloadinject.go +++ b/transform/livereloadinject/livereloadinject.go @@ -36,7 +36,7 @@ var ( // New creates a function that can be used to inject a script tag for // the livereload JavaScript at the start of an HTML document's head. -func New(baseURL url.URL) transform.Transformer { +func New(baseURL *url.URL) transform.Transformer { return func(ft transform.FromTo) error { b := ft.From().Bytes() diff --git a/transform/livereloadinject/livereloadinject_test.go b/transform/livereloadinject/livereloadinject_test.go index dc8740208..d406b9c4d 100644 --- a/transform/livereloadinject/livereloadinject_test.go +++ b/transform/livereloadinject/livereloadinject_test.go @@ -37,7 +37,7 @@ func TestLiveReloadInject(t *testing.T) { out := new(bytes.Buffer) in := strings.NewReader(s) - tr := transform.New(New(*lrurl)) + tr := transform.New(New(lrurl)) tr.Apply(out, in) return out.String() @@ -134,7 +134,7 @@ func BenchmarkLiveReloadInject(b *testing.B) { if err != nil { b.Fatalf("Parsing test URL failed") } - tr := transform.New(New(*lrurl)) + tr := transform.New(New(lrurl)) b.ResetTimer() for i := 0; i < b.N; i++ { diff --git a/unused.sh b/unused.sh deleted file mode 100755 index 351892368..000000000 --- a/unused.sh +++ /dev/null @@ -1 +0,0 @@ -deadcode -test ./... | grep -v go_templ \ No newline at end of file diff --git a/watcher/filenotify/poller_test.go b/watcher/filenotify/poller_test.go index 8dadec1af..9b52b9780 100644 --- a/watcher/filenotify/poller_test.go +++ b/watcher/filenotify/poller_test.go @@ -20,9 +20,8 @@ const ( ) var ( - isMacOs = runtime.GOOS == "darwin" - isWindows = runtime.GOOS == "windows" - isCI = htesting.IsCI() + isMacOs = runtime.GOOS == "darwin" + isCI = htesting.IsCI() ) func TestPollerAddRemove(t *testing.T) {