about summary refs log tree commit diff stats
path: root/internal/fetcher/download.go
diff options
context:
space:
mode:
authorAlan Pearce2024-05-16 23:41:57 +0200
committerAlan Pearce2024-05-16 23:41:57 +0200
commita5e758d41c151c17ed03b39454470ba8dd0c3b99 (patch)
tree386333b5020477eabcf490773113b029e47a21ef /internal/fetcher/download.go
parentd558039919b6198a246a6a3fd007276191cb4b2f (diff)
downloadsearchix-a5e758d41c151c17ed03b39454470ba8dd0c3b99.tar.lz
searchix-a5e758d41c151c17ed03b39454470ba8dd0c3b99.tar.zst
searchix-a5e758d41c151c17ed03b39454470ba8dd0c3b99.zip
refactor: separate fetch and import logic
Diffstat (limited to 'internal/fetcher/download.go')
-rw-r--r--internal/fetcher/download.go75
1 files changed, 75 insertions, 0 deletions
diff --git a/internal/fetcher/download.go b/internal/fetcher/download.go
new file mode 100644
index 0000000..6bce5a8
--- /dev/null
+++ b/internal/fetcher/download.go
@@ -0,0 +1,75 @@
+package fetcher
+
+import (
+	"context"
+	"log/slog"
+	"net/url"
+	"path"
+	"searchix/internal/config"
+	"searchix/internal/file"
+
+	"github.com/pkg/errors"
+)
+
+type DownloadFetcher struct {
+	DataPath   string
+	Source     *config.Source
+	SourceFile string
+	Logger     *slog.Logger
+}
+
+var files = map[string]string{
+	"revision": "revision",
+	"options":  "options.json",
+}
+
+func (i *DownloadFetcher) FetchIfNeeded(
+	parent context.Context,
+) (f FetchedFiles, updated bool, err error) {
+	ctx, cancel := context.WithTimeout(parent, i.Source.FetchTimeout)
+	defer cancel()
+
+	root := i.DataPath
+
+	err = file.Mkdirp(root)
+	if err != nil {
+		err = errors.WithMessagef(err, "error creating directory for data: %s", root)
+
+		return
+	}
+
+	var fetchURL string
+	for _, filename := range files {
+		fetchURL, err = url.JoinPath(i.Source.URL, filename)
+		if err != nil {
+			err = errors.WithMessagef(
+				err,
+				"could not build URL with elements %s and %s",
+				i.Source.URL,
+				filename,
+			)
+
+			return
+		}
+
+		outPath := path.Join(root, filename)
+
+		i.Logger.Debug("preparing to fetch URL", "url", fetchURL, "path", outPath)
+
+		updated, err = fetchFileIfNeeded(ctx, outPath, fetchURL)
+		if err != nil {
+			return
+		}
+		// don't bother to issue requests for the later files
+		if !updated {
+			return
+		}
+	}
+
+	f = FetchedFiles{
+		Revision: path.Join(root, "revision"),
+		Options:  path.Join(root, "options.json"),
+	}
+
+	return
+}