mirror of
https://github.com/DNSCrypt/dnscrypt-proxy.git
synced 2025-04-07 07:07:37 +03:00
Minor cleanup, mostly in tests
This commit is contained in:
parent
c0e34d1a9e
commit
b697283309
2 changed files with 17 additions and 25 deletions
|
@ -52,7 +52,6 @@ func (source *Source) checkSignature(bin, sig []byte) (err error) {
|
||||||
var timeNow = time.Now
|
var timeNow = time.Now
|
||||||
|
|
||||||
func (source *Source) fetchFromCache() (delayTillNextUpdate time.Duration, err error) {
|
func (source *Source) fetchFromCache() (delayTillNextUpdate time.Duration, err error) {
|
||||||
delayTillNextUpdate = 0
|
|
||||||
var bin, sig []byte
|
var bin, sig []byte
|
||||||
if bin, err = ioutil.ReadFile(source.cacheFile); err != nil {
|
if bin, err = ioutil.ReadFile(source.cacheFile); err != nil {
|
||||||
return
|
return
|
||||||
|
@ -69,8 +68,8 @@ func (source *Source) fetchFromCache() (delayTillNextUpdate time.Duration, err e
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
if elapsed := timeNow().Sub(fi.ModTime()); elapsed < source.cacheTTL {
|
if elapsed := timeNow().Sub(fi.ModTime()); elapsed < source.cacheTTL {
|
||||||
dlog.Debugf("Cache file [%s] is still fresh", source.cacheFile)
|
|
||||||
delayTillNextUpdate = source.prefetchDelay - elapsed
|
delayTillNextUpdate = source.prefetchDelay - elapsed
|
||||||
|
dlog.Debugf("Cache file [%s] is still fresh, next update: %v", source.cacheFile, delayTillNextUpdate)
|
||||||
} else {
|
} else {
|
||||||
dlog.Debugf("Cache file [%s] needs to be refreshed", source.cacheFile)
|
dlog.Debugf("Cache file [%s] needs to be refreshed", source.cacheFile)
|
||||||
}
|
}
|
||||||
|
@ -117,7 +116,7 @@ func (source *Source) fetchWithCache(xTransport *XTransport, urlStr string) (del
|
||||||
dlog.Debugf("Delay till next update: %v", delayTillNextUpdate)
|
dlog.Debugf("Delay till next update: %v", delayTillNextUpdate)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
delayTillNextUpdate = MinimumPrefetchInterval
|
||||||
dlog.Infof("Loading source information from URL [%s]", urlStr)
|
dlog.Infof("Loading source information from URL [%s]", urlStr)
|
||||||
|
|
||||||
var srcURL *url.URL
|
var srcURL *url.URL
|
||||||
|
|
|
@ -54,12 +54,12 @@ type SourceTestData struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
type SourceTestExpect struct {
|
type SourceTestExpect struct {
|
||||||
success, download bool
|
success bool
|
||||||
cachePath string
|
cachePath string
|
||||||
cache []SourceFixture
|
cache []SourceFixture
|
||||||
refresh time.Time
|
Source *Source
|
||||||
Source *Source
|
delay time.Duration
|
||||||
err string
|
err string
|
||||||
}
|
}
|
||||||
|
|
||||||
func readFixture(t *testing.T, name string) []byte {
|
func readFixture(t *testing.T, name string) []byte {
|
||||||
|
@ -246,7 +246,7 @@ func prepSourceTestCache(t *testing.T, d *SourceTestData, e *SourceTestExpect, s
|
||||||
e.cache = []SourceFixture{d.fixtures[state][source], d.fixtures[state][source+".minisig"]}
|
e.cache = []SourceFixture{d.fixtures[state][source], d.fixtures[state][source+".minisig"]}
|
||||||
switch state {
|
switch state {
|
||||||
case TestStateCorrect:
|
case TestStateCorrect:
|
||||||
e.Source.in, e.success, e.refresh = e.cache[0].content, true, d.timeUpd
|
e.Source.in, e.success = e.cache[0].content, true
|
||||||
case TestStateExpired:
|
case TestStateExpired:
|
||||||
e.Source.in = e.cache[0].content
|
e.Source.in = e.cache[0].content
|
||||||
case TestStatePartial, TestStatePartialSig:
|
case TestStatePartial, TestStatePartialSig:
|
||||||
|
@ -267,7 +267,7 @@ func prepSourceTestDownload(t *testing.T, d *SourceTestData, e *SourceTestExpect
|
||||||
switch state {
|
switch state {
|
||||||
case TestStateCorrect:
|
case TestStateCorrect:
|
||||||
e.cache = []SourceFixture{d.fixtures[state][source], d.fixtures[state][source+".minisig"]}
|
e.cache = []SourceFixture{d.fixtures[state][source], d.fixtures[state][source+".minisig"]}
|
||||||
e.Source.in, e.success, e.download, e.refresh = e.cache[0].content, true, true, d.timeUpd
|
e.Source.in, e.success = e.cache[0].content, true
|
||||||
fallthrough
|
fallthrough
|
||||||
case TestStateMissingSig, TestStatePartial, TestStatePartialSig, TestStateReadSigErr:
|
case TestStateMissingSig, TestStatePartial, TestStatePartialSig, TestStateReadSigErr:
|
||||||
d.reqExpect[path+".minisig"]++
|
d.reqExpect[path+".minisig"]++
|
||||||
|
@ -294,9 +294,11 @@ func prepSourceTestDownload(t *testing.T, d *SourceTestData, e *SourceTestExpect
|
||||||
}
|
}
|
||||||
if e.success {
|
if e.success {
|
||||||
e.err = ""
|
e.err = ""
|
||||||
|
e.delay = DefaultPrefetchDelay
|
||||||
|
} else {
|
||||||
|
e.delay = MinimumPrefetchInterval
|
||||||
}
|
}
|
||||||
} else {
|
e.Source.refresh = d.timeNow.Add(e.delay)
|
||||||
e.refresh = time.Time{}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -305,7 +307,6 @@ func setupSourceTestCase(t *testing.T, d *SourceTestData, i int,
|
||||||
id = strconv.Itoa(d.n) + "-" + strconv.Itoa(i)
|
id = strconv.Itoa(d.n) + "-" + strconv.Itoa(i)
|
||||||
e = &SourceTestExpect{
|
e = &SourceTestExpect{
|
||||||
cachePath: filepath.Join(d.tempDir, id),
|
cachePath: filepath.Join(d.tempDir, id),
|
||||||
refresh: d.timeNow,
|
|
||||||
}
|
}
|
||||||
e.Source = &Source{urls: []string{}, format: SourceFormatV2, minisignKey: d.key,
|
e.Source = &Source{urls: []string{}, format: SourceFormatV2, minisignKey: d.key,
|
||||||
cacheFile: e.cachePath, cacheTTL: DefaultPrefetchDelay * 3, prefetchDelay: DefaultPrefetchDelay}
|
cacheFile: e.cachePath, cacheTTL: DefaultPrefetchDelay * 3, prefetchDelay: DefaultPrefetchDelay}
|
||||||
|
@ -314,7 +315,6 @@ func setupSourceTestCase(t *testing.T, d *SourceTestData, i int,
|
||||||
i = (i + 1) % len(d.sources) // make the cached and downloaded fixtures different
|
i = (i + 1) % len(d.sources) // make the cached and downloaded fixtures different
|
||||||
}
|
}
|
||||||
prepSourceTestDownload(t, d, e, d.sources[i], downloadTest)
|
prepSourceTestDownload(t, d, e, d.sources[i], downloadTest)
|
||||||
e.Source.refresh = e.refresh
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -369,10 +369,8 @@ func TestPrefetchSources(t *testing.T) {
|
||||||
c := check.T(t)
|
c := check.T(t)
|
||||||
expectDelay := MinimumPrefetchInterval
|
expectDelay := MinimumPrefetchInterval
|
||||||
for _, e := range expects {
|
for _, e := range expects {
|
||||||
if e.refresh.After(d.timeNow) {
|
if e.delay >= MinimumPrefetchInterval && (expectDelay == MinimumPrefetchInterval || expectDelay > e.delay) {
|
||||||
expectDelay = e.refresh.Sub(d.timeNow)
|
expectDelay = e.delay
|
||||||
} else if e.download {
|
|
||||||
expectDelay = DefaultPrefetchDelay
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
c.InDelta(got, expectDelay, time.Second, "Unexpected return")
|
c.InDelta(got, expectDelay, time.Second, "Unexpected return")
|
||||||
|
@ -381,6 +379,7 @@ func TestPrefetchSources(t *testing.T) {
|
||||||
checkSourceCache(c, e.cachePath, e.cache)
|
checkSourceCache(c, e.cachePath, e.cache)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
timeNow = func() time.Time { return d.timeUpd } // since the fixtures are prepared using real now, make the tested code think it's the future
|
||||||
for downloadTestName, downloadTest := range d.downloadTests {
|
for downloadTestName, downloadTest := range d.downloadTests {
|
||||||
d.n++
|
d.n++
|
||||||
sources := []*Source{}
|
sources := []*Source{}
|
||||||
|
@ -389,12 +388,6 @@ func TestPrefetchSources(t *testing.T) {
|
||||||
_, e := setupSourceTestCase(t, d, i, nil, downloadTest)
|
_, e := setupSourceTestCase(t, d, i, nil, downloadTest)
|
||||||
sources = append(sources, e.Source)
|
sources = append(sources, e.Source)
|
||||||
expects = append(expects, e)
|
expects = append(expects, e)
|
||||||
if !e.Source.refresh.IsZero() {
|
|
||||||
e.Source.refresh = d.timeOld
|
|
||||||
}
|
|
||||||
if e.download {
|
|
||||||
e.refresh = d.timeUpd
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
t.Run("download "+downloadTestName, func(t *testing.T) {
|
t.Run("download "+downloadTestName, func(t *testing.T) {
|
||||||
got := PrefetchSources(d.xTransport, sources)
|
got := PrefetchSources(d.xTransport, sources)
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue