Avoid impporting deploy from config when nodeploy tag is set

Test:

```
go list -tags nodeploy ./... | grep deploy
```

Fixes #12009
This commit is contained in:
Bjørn Erik Pedersen 2024-02-07 18:24:02 +01:00
parent a65622a13e
commit 0257eb50a4
8 changed files with 65 additions and 60 deletions

View file

@ -32,8 +32,10 @@ package commands
import ( import (
"context" "context"
"github.com/bep/simplecobra"
"github.com/gohugoio/hugo/deploy" "github.com/gohugoio/hugo/deploy"
"github.com/gohugoio/hugo/deploy/deployconfig"
"github.com/bep/simplecobra"
"github.com/spf13/cobra" "github.com/spf13/cobra"
) )
@ -62,9 +64,9 @@ documentation.
cmd.Flags().Bool("confirm", false, "ask for confirmation before making changes to the target") cmd.Flags().Bool("confirm", false, "ask for confirmation before making changes to the target")
cmd.Flags().Bool("dryRun", false, "dry run") cmd.Flags().Bool("dryRun", false, "dry run")
cmd.Flags().Bool("force", false, "force upload of all files") cmd.Flags().Bool("force", false, "force upload of all files")
cmd.Flags().Bool("invalidateCDN", deploy.DefaultConfig.InvalidateCDN, "invalidate the CDN cache listed in the deployment target") cmd.Flags().Bool("invalidateCDN", deployconfig.DefaultConfig.InvalidateCDN, "invalidate the CDN cache listed in the deployment target")
cmd.Flags().Int("maxDeletes", deploy.DefaultConfig.MaxDeletes, "maximum # of files to delete, or -1 to disable") cmd.Flags().Int("maxDeletes", deployconfig.DefaultConfig.MaxDeletes, "maximum # of files to delete, or -1 to disable")
cmd.Flags().Int("workers", deploy.DefaultConfig.Workers, "number of workers to transfer files. defaults to 10") cmd.Flags().Int("workers", deployconfig.DefaultConfig.Workers, "number of workers to transfer files. defaults to 10")
}, },
} }
} }

View file

@ -37,7 +37,7 @@ import (
"github.com/gohugoio/hugo/config/privacy" "github.com/gohugoio/hugo/config/privacy"
"github.com/gohugoio/hugo/config/security" "github.com/gohugoio/hugo/config/security"
"github.com/gohugoio/hugo/config/services" "github.com/gohugoio/hugo/config/services"
"github.com/gohugoio/hugo/deploy" "github.com/gohugoio/hugo/deploy/deployconfig"
"github.com/gohugoio/hugo/helpers" "github.com/gohugoio/hugo/helpers"
"github.com/gohugoio/hugo/langs" "github.com/gohugoio/hugo/langs"
"github.com/gohugoio/hugo/markup/markup_config" "github.com/gohugoio/hugo/markup/markup_config"
@ -141,8 +141,8 @@ type Config struct {
// <docsmeta>{"refs": ["config:languages:menus"] }</docsmeta> // <docsmeta>{"refs": ["config:languages:menus"] }</docsmeta>
Menus *config.ConfigNamespace[map[string]navigation.MenuConfig, navigation.Menus] `mapstructure:"-"` Menus *config.ConfigNamespace[map[string]navigation.MenuConfig, navigation.Menus] `mapstructure:"-"`
// The deployment configuration section contains for hugo deploy. // The deployment configuration section contains for hugo deployconfig.
Deployment deploy.DeployConfig `mapstructure:"-"` Deployment deployconfig.DeployConfig `mapstructure:"-"`
// Module configuration. // Module configuration.
Module modules.Config `mapstructure:"-"` Module modules.Config `mapstructure:"-"`

View file

@ -25,7 +25,7 @@ import (
"github.com/gohugoio/hugo/config/privacy" "github.com/gohugoio/hugo/config/privacy"
"github.com/gohugoio/hugo/config/security" "github.com/gohugoio/hugo/config/security"
"github.com/gohugoio/hugo/config/services" "github.com/gohugoio/hugo/config/services"
"github.com/gohugoio/hugo/deploy" "github.com/gohugoio/hugo/deploy/deployconfig"
"github.com/gohugoio/hugo/langs" "github.com/gohugoio/hugo/langs"
"github.com/gohugoio/hugo/markup/markup_config" "github.com/gohugoio/hugo/markup/markup_config"
"github.com/gohugoio/hugo/media" "github.com/gohugoio/hugo/media"
@ -333,7 +333,7 @@ var allDecoderSetups = map[string]decodeWeight{
key: "deployment", key: "deployment",
decode: func(d decodeWeight, p decodeConfig) error { decode: func(d decodeWeight, p decodeConfig) error {
var err error var err error
p.c.Deployment, err = deploy.DecodeConfig(p.p) p.c.Deployment, err = deployconfig.DecodeConfig(p.p)
return err return err
}, },
}, },

View file

@ -24,6 +24,7 @@ import (
"github.com/aws/aws-sdk-go-v2/aws" "github.com/aws/aws-sdk-go-v2/aws"
"github.com/aws/aws-sdk-go-v2/service/cloudfront" "github.com/aws/aws-sdk-go-v2/service/cloudfront"
"github.com/aws/aws-sdk-go-v2/service/cloudfront/types" "github.com/aws/aws-sdk-go-v2/service/cloudfront/types"
"github.com/gohugoio/hugo/deploy/deployconfig"
gcaws "gocloud.dev/aws" gcaws "gocloud.dev/aws"
) )
@ -38,7 +39,7 @@ var v2ConfigValidParams = map[string]bool{
// InvalidateCloudFront invalidates the CloudFront cache for distributionID. // InvalidateCloudFront invalidates the CloudFront cache for distributionID.
// Uses AWS credentials config from the bucket URL. // Uses AWS credentials config from the bucket URL.
func InvalidateCloudFront(ctx context.Context, target *Target) error { func InvalidateCloudFront(ctx context.Context, target *deployconfig.Target) error {
u, err := url.Parse(target.URL) u, err := url.Parse(target.URL)
if err != nil { if err != nil {
return err return err

View file

@ -38,6 +38,7 @@ import (
"github.com/gobwas/glob" "github.com/gobwas/glob"
"github.com/gohugoio/hugo/common/loggers" "github.com/gohugoio/hugo/common/loggers"
"github.com/gohugoio/hugo/config" "github.com/gohugoio/hugo/config"
"github.com/gohugoio/hugo/deploy/deployconfig"
"github.com/gohugoio/hugo/media" "github.com/gohugoio/hugo/media"
"github.com/spf13/afero" "github.com/spf13/afero"
"golang.org/x/text/unicode/norm" "golang.org/x/text/unicode/norm"
@ -57,10 +58,10 @@ type Deployer struct {
mediaTypes media.Types // Hugo's MediaType to guess ContentType mediaTypes media.Types // Hugo's MediaType to guess ContentType
quiet bool // true reduces STDOUT // TODO(bep) remove, this is a global feature. quiet bool // true reduces STDOUT // TODO(bep) remove, this is a global feature.
cfg DeployConfig cfg deployconfig.DeployConfig
logger loggers.Logger logger loggers.Logger
target *Target // the target to deploy to target *deployconfig.Target // the target to deploy to
// For tests... // For tests...
summary deploySummary // summary of latest Deploy results summary deploySummary // summary of latest Deploy results
@ -74,7 +75,7 @@ const metaMD5Hash = "md5chksum" // the meta key to store md5hash in
// New constructs a new *Deployer. // New constructs a new *Deployer.
func New(cfg config.AllProvider, logger loggers.Logger, localFs afero.Fs) (*Deployer, error) { func New(cfg config.AllProvider, logger loggers.Logger, localFs afero.Fs) (*Deployer, error) {
dcfg := cfg.GetConfigSection(deploymentConfigKey).(DeployConfig) dcfg := cfg.GetConfigSection(deployconfig.DeploymentConfigKey).(deployconfig.DeployConfig)
targetName := dcfg.Target targetName := dcfg.Target
if len(dcfg.Targets) == 0 { if len(dcfg.Targets) == 0 {
@ -83,7 +84,7 @@ func New(cfg config.AllProvider, logger loggers.Logger, localFs afero.Fs) (*Depl
mediaTypes := cfg.GetConfigSection("mediaTypes").(media.Types) mediaTypes := cfg.GetConfigSection("mediaTypes").(media.Types)
// Find the target to deploy to. // Find the target to deploy to.
var tgt *Target var tgt *deployconfig.Target
if targetName == "" { if targetName == "" {
// Default to the first target. // Default to the first target.
tgt = dcfg.Targets[0] tgt = dcfg.Targets[0]
@ -133,7 +134,7 @@ func (d *Deployer) Deploy(ctx context.Context) error {
// Load local files from the source directory. // Load local files from the source directory.
var include, exclude glob.Glob var include, exclude glob.Glob
if d.target != nil { if d.target != nil {
include, exclude = d.target.includeGlob, d.target.excludeGlob include, exclude = d.target.IncludeGlob, d.target.ExcludeGlob
} }
local, err := d.walkLocal(d.localFs, d.cfg.Matchers, include, exclude, d.mediaTypes) local, err := d.walkLocal(d.localFs, d.cfg.Matchers, include, exclude, d.mediaTypes)
if err != nil { if err != nil {
@ -178,7 +179,7 @@ func (d *Deployer) Deploy(ctx context.Context) error {
// Order the uploads. They are organized in groups; all uploads in a group // Order the uploads. They are organized in groups; all uploads in a group
// must be complete before moving on to the next group. // must be complete before moving on to the next group.
uploadGroups := applyOrdering(d.cfg.ordering, uploads) uploadGroups := applyOrdering(d.cfg.Ordering, uploads)
nParallel := d.cfg.Workers nParallel := d.cfg.Workers
var errs []error var errs []error
@ -343,14 +344,14 @@ type localFile struct {
UploadSize int64 UploadSize int64
fs afero.Fs fs afero.Fs
matcher *Matcher matcher *deployconfig.Matcher
md5 []byte // cache md5 []byte // cache
gzipped bytes.Buffer // cached of gzipped contents if gzipping gzipped bytes.Buffer // cached of gzipped contents if gzipping
mediaTypes media.Types mediaTypes media.Types
} }
// newLocalFile initializes a *localFile. // newLocalFile initializes a *localFile.
func newLocalFile(fs afero.Fs, nativePath, slashpath string, m *Matcher, mt media.Types) (*localFile, error) { func newLocalFile(fs afero.Fs, nativePath, slashpath string, m *deployconfig.Matcher, mt media.Types) (*localFile, error) {
f, err := fs.Open(nativePath) f, err := fs.Open(nativePath)
if err != nil { if err != nil {
return nil, err return nil, err
@ -482,7 +483,7 @@ func knownHiddenDirectory(name string) bool {
// walkLocal walks the source directory and returns a flat list of files, // walkLocal walks the source directory and returns a flat list of files,
// using localFile.SlashPath as the map keys. // using localFile.SlashPath as the map keys.
func (d *Deployer) walkLocal(fs afero.Fs, matchers []*Matcher, include, exclude glob.Glob, mediaTypes media.Types) (map[string]*localFile, error) { func (d *Deployer) walkLocal(fs afero.Fs, matchers []*deployconfig.Matcher, include, exclude glob.Glob, mediaTypes media.Types) (map[string]*localFile, error) {
retval := map[string]*localFile{} retval := map[string]*localFile{}
err := afero.Walk(fs, "", func(path string, info os.FileInfo, err error) error { err := afero.Walk(fs, "", func(path string, info os.FileInfo, err error) error {
if err != nil { if err != nil {
@ -521,7 +522,7 @@ func (d *Deployer) walkLocal(fs afero.Fs, matchers []*Matcher, include, exclude
} }
// Find the first matching matcher (if any). // Find the first matching matcher (if any).
var m *Matcher var m *deployconfig.Matcher
for _, cur := range matchers { for _, cur := range matchers {
if cur.Matches(slashpath) { if cur.Matches(slashpath) {
m = cur m = cur

View file

@ -31,6 +31,7 @@ import (
"testing" "testing"
"github.com/gohugoio/hugo/common/loggers" "github.com/gohugoio/hugo/common/loggers"
"github.com/gohugoio/hugo/deploy/deployconfig"
"github.com/gohugoio/hugo/hugofs" "github.com/gohugoio/hugo/hugofs"
"github.com/gohugoio/hugo/media" "github.com/gohugoio/hugo/media"
"github.com/google/go-cmp/cmp" "github.com/google/go-cmp/cmp"
@ -110,7 +111,7 @@ func TestFindDiffs(t *testing.T) {
{ {
Description: "local == remote with route.Force true -> diffs", Description: "local == remote with route.Force true -> diffs",
Local: []*localFile{ Local: []*localFile{
{NativePath: "aaa", SlashPath: "aaa", UploadSize: 1, matcher: &Matcher{Force: true}, md5: hash1}, {NativePath: "aaa", SlashPath: "aaa", UploadSize: 1, matcher: &deployconfig.Matcher{Force: true}, md5: hash1},
makeLocal("bbb", 2, hash1), makeLocal("bbb", 2, hash1),
}, },
Remote: []*blob.ListObject{ Remote: []*blob.ListObject{
@ -293,7 +294,7 @@ func TestLocalFile(t *testing.T) {
tests := []struct { tests := []struct {
Description string Description string
Path string Path string
Matcher *Matcher Matcher *deployconfig.Matcher
MediaTypesConfig map[string]any MediaTypesConfig map[string]any
WantContent []byte WantContent []byte
WantSize int64 WantSize int64
@ -319,7 +320,7 @@ func TestLocalFile(t *testing.T) {
{ {
Description: "CacheControl from matcher", Description: "CacheControl from matcher",
Path: "foo.txt", Path: "foo.txt",
Matcher: &Matcher{CacheControl: "max-age=630720000"}, Matcher: &deployconfig.Matcher{CacheControl: "max-age=630720000"},
WantContent: contentBytes, WantContent: contentBytes,
WantSize: contentLen, WantSize: contentLen,
WantMD5: contentMD5[:], WantMD5: contentMD5[:],
@ -328,7 +329,7 @@ func TestLocalFile(t *testing.T) {
{ {
Description: "ContentEncoding from matcher", Description: "ContentEncoding from matcher",
Path: "foo.txt", Path: "foo.txt",
Matcher: &Matcher{ContentEncoding: "foobar"}, Matcher: &deployconfig.Matcher{ContentEncoding: "foobar"},
WantContent: contentBytes, WantContent: contentBytes,
WantSize: contentLen, WantSize: contentLen,
WantMD5: contentMD5[:], WantMD5: contentMD5[:],
@ -337,7 +338,7 @@ func TestLocalFile(t *testing.T) {
{ {
Description: "ContentType from matcher", Description: "ContentType from matcher",
Path: "foo.txt", Path: "foo.txt",
Matcher: &Matcher{ContentType: "foo/bar"}, Matcher: &deployconfig.Matcher{ContentType: "foo/bar"},
WantContent: contentBytes, WantContent: contentBytes,
WantSize: contentLen, WantSize: contentLen,
WantMD5: contentMD5[:], WantMD5: contentMD5[:],
@ -346,7 +347,7 @@ func TestLocalFile(t *testing.T) {
{ {
Description: "gzipped content", Description: "gzipped content",
Path: "foo.txt", Path: "foo.txt",
Matcher: &Matcher{Gzip: true}, Matcher: &deployconfig.Matcher{Gzip: true},
WantContent: gzBytes, WantContent: gzBytes,
WantSize: gzLen, WantSize: gzLen,
WantMD5: gzMD5[:], WantMD5: gzMD5[:],
@ -560,7 +561,7 @@ func TestEndToEndSync(t *testing.T) {
localFs: test.fs, localFs: test.fs,
bucket: test.bucket, bucket: test.bucket,
mediaTypes: media.DefaultTypes, mediaTypes: media.DefaultTypes,
cfg: DeployConfig{MaxDeletes: -1}, cfg: deployconfig.DeployConfig{MaxDeletes: -1},
} }
// Initial deployment should sync remote with local. // Initial deployment should sync remote with local.
@ -643,7 +644,7 @@ func TestMaxDeletes(t *testing.T) {
localFs: test.fs, localFs: test.fs,
bucket: test.bucket, bucket: test.bucket,
mediaTypes: media.DefaultTypes, mediaTypes: media.DefaultTypes,
cfg: DeployConfig{MaxDeletes: -1}, cfg: deployconfig.DeployConfig{MaxDeletes: -1},
} }
// Sync remote with local. // Sync remote with local.
@ -764,16 +765,16 @@ func TestIncludeExclude(t *testing.T) {
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }
tgt := &Target{ tgt := &deployconfig.Target{
Include: test.Include, Include: test.Include,
Exclude: test.Exclude, Exclude: test.Exclude,
} }
if err := tgt.parseIncludeExclude(); err != nil { if err := tgt.ParseIncludeExclude(); err != nil {
t.Error(err) t.Error(err)
} }
deployer := &Deployer{ deployer := &Deployer{
localFs: fsTest.fs, localFs: fsTest.fs,
cfg: DeployConfig{MaxDeletes: -1}, bucket: fsTest.bucket, cfg: deployconfig.DeployConfig{MaxDeletes: -1}, bucket: fsTest.bucket,
target: tgt, target: tgt,
mediaTypes: media.DefaultTypes, mediaTypes: media.DefaultTypes,
} }
@ -830,7 +831,7 @@ func TestIncludeExcludeRemoteDelete(t *testing.T) {
} }
deployer := &Deployer{ deployer := &Deployer{
localFs: fsTest.fs, localFs: fsTest.fs,
cfg: DeployConfig{MaxDeletes: -1}, bucket: fsTest.bucket, cfg: deployconfig.DeployConfig{MaxDeletes: -1}, bucket: fsTest.bucket,
mediaTypes: media.DefaultTypes, mediaTypes: media.DefaultTypes,
} }
@ -848,11 +849,11 @@ func TestIncludeExcludeRemoteDelete(t *testing.T) {
} }
// Second sync // Second sync
tgt := &Target{ tgt := &deployconfig.Target{
Include: test.Include, Include: test.Include,
Exclude: test.Exclude, Exclude: test.Exclude,
} }
if err := tgt.parseIncludeExclude(); err != nil { if err := tgt.ParseIncludeExclude(); err != nil {
t.Error(err) t.Error(err)
} }
deployer.target = tgt deployer.target = tgt
@ -882,7 +883,7 @@ func TestCompression(t *testing.T) {
deployer := &Deployer{ deployer := &Deployer{
localFs: test.fs, localFs: test.fs,
bucket: test.bucket, bucket: test.bucket,
cfg: DeployConfig{MaxDeletes: -1, Matchers: []*Matcher{{Pattern: ".*", Gzip: true, re: regexp.MustCompile(".*")}}}, cfg: deployconfig.DeployConfig{MaxDeletes: -1, Matchers: []*deployconfig.Matcher{{Pattern: ".*", Gzip: true, Re: regexp.MustCompile(".*")}}},
mediaTypes: media.DefaultTypes, mediaTypes: media.DefaultTypes,
} }
@ -937,7 +938,7 @@ func TestMatching(t *testing.T) {
deployer := &Deployer{ deployer := &Deployer{
localFs: test.fs, localFs: test.fs,
bucket: test.bucket, bucket: test.bucket,
cfg: DeployConfig{MaxDeletes: -1, Matchers: []*Matcher{{Pattern: "^subdir/aaa$", Force: true, re: regexp.MustCompile("^subdir/aaa$")}}}, cfg: deployconfig.DeployConfig{MaxDeletes: -1, Matchers: []*deployconfig.Matcher{{Pattern: "^subdir/aaa$", Force: true, Re: regexp.MustCompile("^subdir/aaa$")}}},
mediaTypes: media.DefaultTypes, mediaTypes: media.DefaultTypes,
} }
@ -962,7 +963,7 @@ func TestMatching(t *testing.T) {
} }
// Repeat with a matcher that should now match 3 files. // Repeat with a matcher that should now match 3 files.
deployer.cfg.Matchers = []*Matcher{{Pattern: "aaa", Force: true, re: regexp.MustCompile("aaa")}} deployer.cfg.Matchers = []*deployconfig.Matcher{{Pattern: "aaa", Force: true, Re: regexp.MustCompile("aaa")}}
if err := deployer.Deploy(ctx); err != nil { if err := deployer.Deploy(ctx); err != nil {
t.Errorf("no-op deploy with triple force matcher: %v", err) t.Errorf("no-op deploy with triple force matcher: %v", err)
} }

View file

@ -1,4 +1,4 @@
// Copyright 2019 The Hugo Authors. All rights reserved. // Copyright 2024 The Hugo Authors. All rights reserved.
// //
// Licensed under the Apache License, Version 2.0 (the "License"); // Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. // you may not use this file except in compliance with the License.
@ -11,7 +11,7 @@
// See the License for the specific language governing permissions and // See the License for the specific language governing permissions and
// limitations under the License. // limitations under the License.
package deploy package deployconfig
import ( import (
"errors" "errors"
@ -24,7 +24,7 @@ import (
"github.com/mitchellh/mapstructure" "github.com/mitchellh/mapstructure"
) )
const deploymentConfigKey = "deployment" const DeploymentConfigKey = "deployment"
// DeployConfig is the complete configuration for deployment. // DeployConfig is the complete configuration for deployment.
type DeployConfig struct { type DeployConfig struct {
@ -48,7 +48,7 @@ type DeployConfig struct {
// Number of concurrent workers to use when uploading files. // Number of concurrent workers to use when uploading files.
Workers int Workers int
ordering []*regexp.Regexp // compiled Order Ordering []*regexp.Regexp `json:"-"` // compiled Order
} }
type Target struct { type Target struct {
@ -67,20 +67,20 @@ type Target struct {
Exclude string Exclude string
// Parsed versions of Include/Exclude. // Parsed versions of Include/Exclude.
includeGlob glob.Glob IncludeGlob glob.Glob `json:"-"`
excludeGlob glob.Glob ExcludeGlob glob.Glob `json:"-"`
} }
func (tgt *Target) parseIncludeExclude() error { func (tgt *Target) ParseIncludeExclude() error {
var err error var err error
if tgt.Include != "" { if tgt.Include != "" {
tgt.includeGlob, err = hglob.GetGlob(tgt.Include) tgt.IncludeGlob, err = hglob.GetGlob(tgt.Include)
if err != nil { if err != nil {
return fmt.Errorf("invalid deployment.target.include %q: %v", tgt.Include, err) return fmt.Errorf("invalid deployment.target.include %q: %v", tgt.Include, err)
} }
} }
if tgt.Exclude != "" { if tgt.Exclude != "" {
tgt.excludeGlob, err = hglob.GetGlob(tgt.Exclude) tgt.ExcludeGlob, err = hglob.GetGlob(tgt.Exclude)
if err != nil { if err != nil {
return fmt.Errorf("invalid deployment.target.exclude %q: %v", tgt.Exclude, err) return fmt.Errorf("invalid deployment.target.exclude %q: %v", tgt.Exclude, err)
} }
@ -115,12 +115,12 @@ type Matcher struct {
// other route-determined metadata (e.g., ContentType) has changed. // other route-determined metadata (e.g., ContentType) has changed.
Force bool Force bool
// re is Pattern compiled. // Re is Pattern compiled.
re *regexp.Regexp Re *regexp.Regexp `json:"-"`
} }
func (m *Matcher) Matches(path string) bool { func (m *Matcher) Matches(path string) bool {
return m.re.MatchString(path) return m.Re.MatchString(path)
} }
var DefaultConfig = DeployConfig{ var DefaultConfig = DeployConfig{
@ -133,10 +133,10 @@ var DefaultConfig = DeployConfig{
func DecodeConfig(cfg config.Provider) (DeployConfig, error) { func DecodeConfig(cfg config.Provider) (DeployConfig, error) {
dcfg := DefaultConfig dcfg := DefaultConfig
if !cfg.IsSet(deploymentConfigKey) { if !cfg.IsSet(DeploymentConfigKey) {
return dcfg, nil return dcfg, nil
} }
if err := mapstructure.WeakDecode(cfg.GetStringMap(deploymentConfigKey), &dcfg); err != nil { if err := mapstructure.WeakDecode(cfg.GetStringMap(DeploymentConfigKey), &dcfg); err != nil {
return dcfg, err return dcfg, err
} }
@ -148,7 +148,7 @@ func DecodeConfig(cfg config.Provider) (DeployConfig, error) {
if *tgt == (Target{}) { if *tgt == (Target{}) {
return dcfg, errors.New("empty deployment target") return dcfg, errors.New("empty deployment target")
} }
if err := tgt.parseIncludeExclude(); err != nil { if err := tgt.ParseIncludeExclude(); err != nil {
return dcfg, err return dcfg, err
} }
} }
@ -157,7 +157,7 @@ func DecodeConfig(cfg config.Provider) (DeployConfig, error) {
if *m == (Matcher{}) { if *m == (Matcher{}) {
return dcfg, errors.New("empty deployment matcher") return dcfg, errors.New("empty deployment matcher")
} }
m.re, err = regexp.Compile(m.Pattern) m.Re, err = regexp.Compile(m.Pattern)
if err != nil { if err != nil {
return dcfg, fmt.Errorf("invalid deployment.matchers.pattern: %v", err) return dcfg, fmt.Errorf("invalid deployment.matchers.pattern: %v", err)
} }
@ -167,7 +167,7 @@ func DecodeConfig(cfg config.Provider) (DeployConfig, error) {
if err != nil { if err != nil {
return dcfg, fmt.Errorf("invalid deployment.orderings.pattern: %v", err) return dcfg, fmt.Errorf("invalid deployment.orderings.pattern: %v", err)
} }
dcfg.ordering = append(dcfg.ordering, re) dcfg.Ordering = append(dcfg.Ordering, re)
} }
return dcfg, nil return dcfg, nil

View file

@ -1,4 +1,4 @@
// Copyright 2019 The Hugo Authors. All rights reserved. // Copyright 2024 The Hugo Authors. All rights reserved.
// //
// Licensed under the Apache License, Version 2.0 (the "License"); // Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. // you may not use this file except in compliance with the License.
@ -14,7 +14,7 @@
//go:build !nodeploy //go:build !nodeploy
// +build !nodeploy // +build !nodeploy
package deploy package deployconfig
import ( import (
"fmt" "fmt"
@ -91,7 +91,7 @@ force = true
c.Assert(len(dcfg.Order), qt.Equals, 2) c.Assert(len(dcfg.Order), qt.Equals, 2)
c.Assert(dcfg.Order[0], qt.Equals, "o1") c.Assert(dcfg.Order[0], qt.Equals, "o1")
c.Assert(dcfg.Order[1], qt.Equals, "o2") c.Assert(dcfg.Order[1], qt.Equals, "o2")
c.Assert(len(dcfg.ordering), qt.Equals, 2) c.Assert(len(dcfg.Ordering), qt.Equals, 2)
// Targets. // Targets.
c.Assert(len(dcfg.Targets), qt.Equals, 3) c.Assert(len(dcfg.Targets), qt.Equals, 3)
@ -104,11 +104,11 @@ force = true
c.Assert(tgt.CloudFrontDistributionID, qt.Equals, fmt.Sprintf("cdn%d", i)) c.Assert(tgt.CloudFrontDistributionID, qt.Equals, fmt.Sprintf("cdn%d", i))
c.Assert(tgt.Include, qt.Equals, wantInclude[i]) c.Assert(tgt.Include, qt.Equals, wantInclude[i])
if wantInclude[i] != "" { if wantInclude[i] != "" {
c.Assert(tgt.includeGlob, qt.Not(qt.IsNil)) c.Assert(tgt.IncludeGlob, qt.Not(qt.IsNil))
} }
c.Assert(tgt.Exclude, qt.Equals, wantExclude[i]) c.Assert(tgt.Exclude, qt.Equals, wantExclude[i])
if wantExclude[i] != "" { if wantExclude[i] != "" {
c.Assert(tgt.excludeGlob, qt.Not(qt.IsNil)) c.Assert(tgt.ExcludeGlob, qt.Not(qt.IsNil))
} }
} }
@ -117,7 +117,7 @@ force = true
for i := 0; i < 3; i++ { for i := 0; i < 3; i++ {
m := dcfg.Matchers[i] m := dcfg.Matchers[i]
c.Assert(m.Pattern, qt.Equals, fmt.Sprintf("^pattern%d$", i)) c.Assert(m.Pattern, qt.Equals, fmt.Sprintf("^pattern%d$", i))
c.Assert(m.re, qt.Not(qt.IsNil)) c.Assert(m.Re, qt.Not(qt.IsNil))
c.Assert(m.CacheControl, qt.Equals, fmt.Sprintf("cachecontrol%d", i)) c.Assert(m.CacheControl, qt.Equals, fmt.Sprintf("cachecontrol%d", i))
c.Assert(m.ContentEncoding, qt.Equals, fmt.Sprintf("contentencoding%d", i)) c.Assert(m.ContentEncoding, qt.Equals, fmt.Sprintf("contentencoding%d", i))
c.Assert(m.ContentType, qt.Equals, fmt.Sprintf("contenttype%d", i)) c.Assert(m.ContentType, qt.Equals, fmt.Sprintf("contenttype%d", i))