diff --git a/.golangci.yml b/.golangci.yml index 9710cd813..ecd3f79cf 100644 --- a/.golangci.yml +++ b/.golangci.yml @@ -34,4 +34,4 @@ linters-settings: - G501 - G401 - G505 - - G115 # Temporarily disabled, see discussion in https://github.com/securego/gosec/pull/1149 + - G115 # Can't check context, where the warning is clearly a false positive. See discussion in https://github.com/securego/gosec/pull/1149 diff --git a/utils/cache/file_caches.go b/utils/cache/file_caches.go index 1157d4736..9fa0e0168 100644 --- a/utils/cache/file_caches.go +++ b/utils/cache/file_caches.go @@ -207,7 +207,7 @@ func newFSCache(name, cacheSize, cacheFolder string, maxItems int) (fscache.Cach return nil, nil } - lru := NewFileHaunter(name, maxItems, int64(size), consts.DefaultCacheCleanUpInterval) + lru := NewFileHaunter(name, maxItems, size, consts.DefaultCacheCleanUpInterval) h := fscache.NewLRUHaunterStrategy(lru) cacheFolder = filepath.Join(conf.Server.CacheFolder, cacheFolder) diff --git a/utils/cache/file_haunter.go b/utils/cache/file_haunter.go index 22d34c5c6..bee06ef28 100644 --- a/utils/cache/file_haunter.go +++ b/utils/cache/file_haunter.go @@ -21,7 +21,7 @@ type haunterKV struct { // If maxItems or maxSize are 0, they won't be checked // // Based on fscache.NewLRUHaunter -func NewFileHaunter(name string, maxItems int, maxSize int64, period time.Duration) fscache.LRUHaunter { +func NewFileHaunter(name string, maxItems int, maxSize uint64, period time.Duration) fscache.LRUHaunter { return &fileHaunter{ name: name, period: period, @@ -34,7 +34,7 @@ type fileHaunter struct { name string period time.Duration maxItems int - maxSize int64 + maxSize uint64 } func (j *fileHaunter) Next() time.Duration { @@ -43,10 +43,10 @@ func (j *fileHaunter) Next() time.Duration { func (j *fileHaunter) Scrub(c fscache.CacheAccessor) (keysToReap []string) { var count int - var size int64 + var size uint64 var okFiles []haunterKV - log.Trace("Running cache cleanup", "cache", j.name, "maxSize", humanize.Bytes(uint64(j.maxSize))) + log.Trace("Running cache cleanup", "cache", j.name, "maxSize", humanize.Bytes(j.maxSize)) c.EnumerateEntries(func(key string, e fscache.Entry) bool { if e.InUse() { return true @@ -63,7 +63,7 @@ func (j *fileHaunter) Scrub(c fscache.CacheAccessor) (keysToReap []string) { } count++ - size = size + fileInfo.Size() + size = size + uint64(fileInfo.Size()) okFiles = append(okFiles, haunterKV{ key: key, value: e, @@ -94,7 +94,7 @@ func (j *fileHaunter) Scrub(c fscache.CacheAccessor) (keysToReap []string) { return true } - log.Trace("Current cache stats", "cache", j.name, "size", humanize.Bytes(uint64(size)), "numItems", count) + log.Trace("Current cache stats", "cache", j.name, "size", humanize.Bytes(size), "numItems", count) if j.maxItems > 0 { for count > j.maxItems { @@ -118,13 +118,13 @@ func (j *fileHaunter) Scrub(c fscache.CacheAccessor) (keysToReap []string) { return keysToReap } -func (j *fileHaunter) removeFirst(items *[]haunterKV, count int, size int64) (*string, int, int64, error) { +func (j *fileHaunter) removeFirst(items *[]haunterKV, count int, size uint64) (*string, int, uint64, error) { var f haunterKV f, *items = (*items)[0], (*items)[1:] count-- - size = size - f.info.Size() + size = size - uint64(f.info.Size()) return &f.key, count, size, nil } diff --git a/utils/cache/file_haunter_test.go b/utils/cache/file_haunter_test.go index 3192b3e28..bd1eb568d 100644 --- a/utils/cache/file_haunter_test.go +++ b/utils/cache/file_haunter_test.go @@ -20,7 +20,7 @@ var _ = Describe("FileHaunter", func() { var cacheDir string var err error var maxItems int - var maxSize int64 + var maxSize uint64 JustBeforeEach(func() { tempDir, _ := os.MkdirTemp("", "spread_fs") @@ -29,7 +29,9 @@ var _ = Describe("FileHaunter", func() { Expect(err).ToNot(HaveOccurred()) DeferCleanup(func() { _ = os.RemoveAll(tempDir) }) - fsCache, err = fscache.NewCacheWithHaunter(fs, fscache.NewLRUHaunterStrategy(cache.NewFileHaunter("", maxItems, maxSize, 300*time.Millisecond))) + fsCache, err = fscache.NewCacheWithHaunter(fs, fscache.NewLRUHaunterStrategy( + cache.NewFileHaunter("", maxItems, maxSize, 300*time.Millisecond), + )) Expect(err).ToNot(HaveOccurred()) DeferCleanup(fsCache.Clean)