1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
|
package fetcher
import (
"context"
"log/slog"
"net/url"
"path"
"searchix/internal/config"
"searchix/internal/file"
"github.com/pkg/errors"
)
type DownloadFetcher struct {
DataPath string
Source *config.Source
SourceFile string
Logger *slog.Logger
}
var files = map[string]string{
"revision": "revision",
"options": "options.json",
}
func (i *DownloadFetcher) FetchIfNeeded(
parent context.Context,
) (f FetchedFiles, updated bool, err error) {
ctx, cancel := context.WithTimeout(parent, i.Source.FetchTimeout)
defer cancel()
root := i.DataPath
err = file.Mkdirp(root)
if err != nil {
err = errors.WithMessagef(err, "error creating directory for data: %s", root)
return
}
var fetchURL string
for _, filename := range files {
fetchURL, err = url.JoinPath(i.Source.URL, filename)
if err != nil {
err = errors.WithMessagef(
err,
"could not build URL with elements %s and %s",
i.Source.URL,
filename,
)
return
}
outPath := path.Join(root, filename)
i.Logger.Debug("preparing to fetch URL", "url", fetchURL, "path", outPath)
updated, err = fetchFileIfNeeded(ctx, outPath, fetchURL)
if err != nil {
return
}
// don't bother to issue requests for the later files
if !updated {
break
}
}
f = FetchedFiles{
Revision: path.Join(root, "revision"),
Options: path.Join(root, "options.json"),
}
return
}
|