Add a remote retry for resources.Get

This commit is contained in:
Bjørn Erik Pedersen 2021-12-02 13:31:22 +01:00
parent 66753416b5
commit 94f149b21e
3 changed files with 50 additions and 14 deletions

View file

@ -48,6 +48,9 @@ type Cache struct {
// 0 is effectively turning this cache off.
maxAge time.Duration
// Number of retries on create error.
retries int
// When set, we just remove this entire root directory on expiration.
pruneAllRootDir string
@ -84,11 +87,12 @@ type ItemInfo struct {
}
// NewCache creates a new file cache with the given filesystem and max age.
func NewCache(fs afero.Fs, maxAge time.Duration, pruneAllRootDir string) *Cache {
func NewCache(fs afero.Fs, maxAge time.Duration, retries int, pruneAllRootDir string) *Cache {
return &Cache{
Fs: fs,
nlocker: &lockTracker{Locker: locker.NewLocker(), seen: make(map[string]struct{})},
maxAge: maxAge,
retries: retries,
pruneAllRootDir: pruneAllRootDir,
}
}
@ -175,7 +179,19 @@ func (c *Cache) GetOrCreate(id string, create func() (io.ReadCloser, error)) (It
return info, r, nil
}
r, err := create()
var (
r io.ReadCloser
err error
)
for i := -1; i < c.retries; i++ {
r, err = create()
if err == nil || c.retries == 0 {
break
}
time.Sleep(1 * time.Second)
}
if err != nil {
return info, nil, err
}
@ -206,7 +222,19 @@ func (c *Cache) GetOrCreateBytes(id string, create func() ([]byte, error)) (Item
return info, b, err
}
b, err := create()
var (
b []byte
err error
)
for i := -1; i < c.retries; i++ {
b, err = create()
if err == nil || c.retries == 0 {
break
}
time.Sleep(1 * time.Second)
}
if err != nil {
return info, nil, err
}
@ -360,7 +388,7 @@ func NewCaches(p *helpers.PathSpec) (Caches, error) {
pruneAllRootDir = "pkg"
}
m[k] = NewCache(bfs, v.MaxAge, pruneAllRootDir)
m[k] = NewCache(bfs, v.MaxAge, v.retries, pruneAllRootDir)
}
return m, nil

View file

@ -34,20 +34,21 @@ const (
cachesConfigKey = "caches"
resourcesGenDir = ":resourceDir/_gen"
cacheDirProject = ":cacheDir/:project"
)
var defaultCacheConfig = Config{
MaxAge: -1, // Never expire
Dir: ":cacheDir/:project",
Dir: cacheDirProject,
}
const (
cacheKeyGetJSON = "getjson"
cacheKeyGetCSV = "getcsv"
cacheKeyImages = "images"
cacheKeyAssets = "assets"
cacheKeyModules = "modules"
cacheGetResource = "getresource"
cacheKeyGetJSON = "getjson"
cacheKeyGetCSV = "getcsv"
cacheKeyImages = "images"
cacheKeyAssets = "assets"
cacheKeyModules = "modules"
cacheKeyGetResource = "getresource"
)
type Configs map[string]Config
@ -71,7 +72,11 @@ var defaultCacheConfigs = Configs{
MaxAge: -1,
Dir: resourcesGenDir,
},
cacheGetResource: defaultCacheConfig,
cacheKeyGetResource: Config{
MaxAge: -1, // Never expire
Dir: cacheDirProject,
retries: 3, // Retries on error getting the remote resource.
},
}
type Config struct {
@ -86,6 +91,10 @@ type Config struct {
// Will resources/_gen will get its own composite filesystem that
// also checks any theme.
isResourceDir bool
// Number of retries when errors occurs when creating the element,
// only used for remote resources.
retries int
}
// GetJSONCache gets the file cache for getJSON.
@ -115,7 +124,7 @@ func (f Caches) AssetsCache() *Cache {
// GetResourceCache gets the file cache for remote resources.
func (f Caches) GetResourceCache() *Cache {
return f[cacheGetResource]
return f[cacheKeyGetResource]
}
func DecodeConfig(fs afero.Fs, cfg config.Provider) (Configs, error) {

View file

@ -243,7 +243,6 @@ func (c *Client) FromRemote(uri string, options map[string]interface{}) (resourc
return c.rs.New(
resources.ResourceSourceDescriptor{
Fs: c.rs.FileCaches.AssetsCache().Fs,
LazyPublish: true,
OpenReadSeekCloser: func() (hugio.ReadSeekCloser, error) {
return hugio.NewReadSeekerNoOpCloser(bytes.NewReader(body)), nil