internal/fetcher/download.go (view raw)
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 | package fetcher import ( "context" "fmt" "log/slog" "net/url" "searchix/internal/config" "searchix/internal/index" "github.com/pkg/errors" ) type DownloadFetcher struct { Source *config.Source SourceFile string Logger *slog.Logger } func NewDownloadFetcher( source *config.Source, logger *slog.Logger, ) (*DownloadFetcher, error) { switch source.Importer { case config.Options: return &DownloadFetcher{ Source: source, Logger: logger, }, nil default: return nil, fmt.Errorf("unsupported importer type %s", source.Importer) } } var files = map[string]string{ "revision": "revision", "options": "options.json", } func (i *DownloadFetcher) FetchIfNeeded( ctx context.Context, sourceMeta *index.SourceMeta, ) (f FetchedFiles, err error) { var fetchURL string sourceUpdated := sourceMeta.Updated for key, filename := range files { fetchURL, err = url.JoinPath(i.Source.URL, filename) if err != nil { err = errors.WithMessagef( err, "could not build URL with elements %s and %s", i.Source.URL, filename, ) return } i.Logger.Debug("preparing to fetch URL", "url", fetchURL) body, mtime, err := fetchFileIfNeeded(ctx, sourceUpdated, fetchURL) if err != nil { i.Logger.Warn("failed to fetch file", "url", fetchURL, "error", err) return f, err } // don't bother to issue requests for the later files if mtime.Before(sourceUpdated) { break } sourceMeta.Updated = mtime switch key { case "revision": f.Revision = body case "options": f.Options = body default: return f, errors.Errorf("unknown file kind %s", key) } } return } |