diff options
| author | Shulhan <ms@kilabit.info> | 2025-06-19 01:06:48 +0700 |
|---|---|---|
| committer | Shulhan <ms@kilabit.info> | 2025-06-19 01:06:48 +0700 |
| commit | 8bc8fce1bd80b5a25c452ac5a24b1a1e3f5a4feb (patch) | |
| tree | 8fa07a3fb043655aae542c9fad71098c3f2a6d84 /brokenlinks/worker.go | |
| parent | 5ac82928702a0f0a7be0ef6e96ab04c39a7e8e9d (diff) | |
| download | jarink-8bc8fce1bd80b5a25c452ac5a24b1a1e3f5a4feb.tar.xz | |
all: add test cases for simulating slow server
The test run a server that contains three six pages that contains
various [time.Sleep] duration before returning the response.
This allow us to see how the main scan loop works, waiting
for resultq and listWaitStatus.
Diffstat (limited to 'brokenlinks/worker.go')
| -rw-r--r-- | brokenlinks/worker.go | 53 |
1 files changed, 16 insertions, 37 deletions
diff --git a/brokenlinks/worker.go b/brokenlinks/worker.go index 94be90b..3eacf01 100644 --- a/brokenlinks/worker.go +++ b/brokenlinks/worker.go @@ -145,49 +145,29 @@ func (wrk *worker) scanAll() (result *Result, err error) { go wrk.scan(linkq) } - var tick = time.NewTicker(500 * time.Millisecond) - var listWaitStatus []linkQueue - var isScanning = true - for isScanning { - select { - case resultq := <-wrk.resultq: - listWaitStatus = wrk.processResult(resultq, listWaitStatus) - - case <-tick.C: - wrk.wg.Wait() - if len(wrk.resultq) != 0 { - continue - } - if len(listWaitStatus) != 0 { - // There are links that still waiting for - // scanning to be completed. - continue - } - isScanning = false - } - } - wrk.result.sort() + wrk.processAndWait() return wrk.result, nil } // scanPastResult scan only pages reported inside // [Result.BrokenLinks]. -func (wrk *worker) scanPastResult() ( - result *Result, err error, -) { - go func() { - for page := range wrk.pastResult.BrokenLinks { - var linkq = linkQueue{ - parentUrl: nil, - url: page, - status: http.StatusProcessing, - } - wrk.seenLink[linkq.url] = http.StatusProcessing - wrk.wg.Add(1) - go wrk.scan(linkq) +func (wrk *worker) scanPastResult() (result *Result, err error) { + for page := range wrk.pastResult.BrokenLinks { + var linkq = linkQueue{ + parentUrl: nil, + url: page, + status: http.StatusProcessing, } - }() + wrk.seenLink[linkq.url] = http.StatusProcessing + wrk.wg.Add(1) + go wrk.scan(linkq) + } + wrk.processAndWait() + return wrk.result, nil +} + +func (wrk *worker) processAndWait() { var tick = time.NewTicker(500 * time.Millisecond) var listWaitStatus []linkQueue var isScanning = true @@ -210,7 +190,6 @@ func (wrk *worker) scanPastResult() ( } } wrk.result.sort() - return wrk.result, nil } // processResult the resultq contains the original URL being scanned |
