about summary refs log tree commit diff stats
path: root/internal
diff options
context:
space:
mode:
authorAlan Pearce2024-05-17 14:06:06 +0200
committerAlan Pearce2024-05-17 14:06:06 +0200
commit05e61978906a08132c4340a5f9ae518134dd0fa9 (patch)
tree0ab5e550a1fe5aaf560459c40eccfdc00e298515 /internal
parent42611df8133fc88bac5947a65a18fa095d68a951 (diff)
downloadsearchix-05e61978906a08132c4340a5f9ae518134dd0fa9.tar.lz
searchix-05e61978906a08132c4340a5f9ae518134dd0fa9.tar.zst
searchix-05e61978906a08132c4340a5f9ae518134dd0fa9.zip
feat: support searching packages
Diffstat (limited to 'internal')
-rw-r--r--internal/index/search.go8
-rw-r--r--internal/server/mux.go208
-rw-r--r--internal/server/templates.go11
3 files changed, 122 insertions, 105 deletions
diff --git a/internal/index/search.go b/internal/index/search.go
index a86cc02..5c18edb 100644
--- a/internal/index/search.go
+++ b/internal/index/search.go
@@ -4,6 +4,7 @@ import (
 	"bytes"
 	"context"
 	"encoding/gob"
+	"searchix/internal/config"
 	"searchix/internal/nix"
 
 	"github.com/blevesearch/bleve/v2"
@@ -15,7 +16,7 @@ const ResultsPerPage = 20
 
 type DocumentMatch struct {
 	search.DocumentMatch
-	Data nix.Option
+	Data nix.Importable
 }
 
 type Result struct {
@@ -53,11 +54,12 @@ func (index *ReadIndex) GetSource(ctx context.Context, name string) (*bleve.Sear
 
 func (index *ReadIndex) Search(
 	ctx context.Context,
-	source string,
+	source *config.Source,
 	keyword string,
 	from uint64,
 ) (*Result, error) {
-	sourceQuery := bleve.NewTermQuery(source)
+	sourceQuery := bleve.NewTermQuery(source.Key)
+	sourceQuery.SetField("Source")
 	userQuery := bleve.NewMatchQuery(keyword)
 	userQuery.Analyzer = "option_name"
 
diff --git a/internal/server/mux.go b/internal/server/mux.go
index 78fee60..27cd4bf 100644
--- a/internal/server/mux.go
+++ b/internal/server/mux.go
@@ -17,7 +17,6 @@ import (
 	"searchix/frontend"
 	"searchix/internal/config"
 	search "searchix/internal/index"
-	"searchix/internal/nix"
 
 	"github.com/blevesearch/bleve/v2"
 	sentryhttp "github.com/getsentry/sentry-go/http"
@@ -51,7 +50,7 @@ type TemplateData struct {
 	Message       string
 }
 
-type ResultData[T nix.Option] struct {
+type ResultData struct {
 	TemplateData
 	Query          string
 	ResultsPerPage int
@@ -109,131 +108,133 @@ func NewMux(
 	})
 
 	const searchTimeout = 1 * time.Second
-	mux.HandleFunc("/options/{source}/search", func(w http.ResponseWriter, r *http.Request) {
-		sourceKey := r.PathValue("source")
+	createSearchHandler := func(importerType config.ImporterType) func(http.ResponseWriter, *http.Request) {
+		return func(w http.ResponseWriter, r *http.Request) {
+			var err error
+			source := cfg.Importer.Sources[r.PathValue("source")]
+			if source == nil || importerType != source.Importer {
+				errorHandler(w, r, http.StatusText(http.StatusNotFound), http.StatusNotFound)
 
-		source := cfg.Importer.Sources[sourceKey]
-		if source == nil {
-			errorHandler(w, r, "Source not found", http.StatusNotFound)
-
-			return
-		}
+				return
+			}
 
-		ctx, cancel := context.WithTimeout(r.Context(), searchTimeout)
-		defer cancel()
-
-		if r.URL.Query().Has("query") {
-			qs := r.URL.Query().Get("query")
-			pg := r.URL.Query().Get("page")
-			var page uint64 = 1
-			if pg != "" {
-				page, err = strconv.ParseUint(pg, 10, 64)
-				if err != nil || page == 0 {
-					errorHandler(w, r, "Bad query string", http.StatusBadRequest)
+			ctx, cancel := context.WithTimeout(r.Context(), searchTimeout)
+			defer cancel()
+
+			if r.URL.Query().Has("query") {
+				qs := r.URL.Query().Get("query")
+				pg := r.URL.Query().Get("page")
+				var page uint64 = 1
+				if pg != "" {
+					page, err = strconv.ParseUint(pg, 10, 64)
+					if err != nil || page == 0 {
+						errorHandler(w, r, "Bad query string", http.StatusBadRequest)
+					}
 				}
-			}
-			results, err := index.Search(ctx, sourceKey, qs, (page-1)*search.ResultsPerPage)
-			if err != nil {
-				if err == context.DeadlineExceeded {
-					errorHandler(w, r, "Search timed out", http.StatusInternalServerError)
+				results, err := index.Search(ctx, source, qs, (page-1)*search.ResultsPerPage)
+				if err != nil {
+					if err == context.DeadlineExceeded {
+						errorHandler(w, r, "Search timed out", http.StatusInternalServerError)
 
-					return
+						return
+					}
+					slog.Error("search error", "error", err)
+					errorHandler(w, r, err.Error(), http.StatusInternalServerError)
 				}
-				slog.Error("search error", "error", err)
-				errorHandler(w, r, err.Error(), http.StatusInternalServerError)
-			}
 
-			tdata := ResultData[nix.Option]{
-				TemplateData: TemplateData{
-					ExtraHeadHTML: cfg.Web.ExtraHeadHTML,
-					Source:        *source,
-					Sources:       cfg.Importer.Sources,
-					Version:       *versionInfo,
-				},
-				ResultsPerPage: search.ResultsPerPage,
-				Query:          qs,
-				Results:        results,
-			}
+				tdata := ResultData{
+					TemplateData: TemplateData{
+						ExtraHeadHTML: cfg.Web.ExtraHeadHTML,
+						Source:        *source,
+						Sources:       cfg.Importer.Sources,
+						Version:       *versionInfo,
+					},
+					ResultsPerPage: search.ResultsPerPage,
+					Query:          qs,
+					Results:        results,
+				}
 
-			hits := uint64(len(results.Hits))
-			if results.Total > hits {
-				q, err := url.ParseQuery(r.URL.RawQuery)
-				if err != nil {
-					errorHandler(w, r, "Query string error", http.StatusBadRequest)
+				hits := uint64(len(results.Hits))
+				if results.Total > hits {
+					q, err := url.ParseQuery(r.URL.RawQuery)
+					if err != nil {
+						errorHandler(w, r, "Query string error", http.StatusBadRequest)
 
-					return
-				}
+						return
+					}
 
-				if page*search.ResultsPerPage > results.Total {
-					errorHandler(w, r, "Not found", http.StatusNotFound)
+					if page*search.ResultsPerPage > results.Total {
+						errorHandler(w, r, "Not found", http.StatusNotFound)
 
-					return
-				}
+						return
+					}
 
-				if page*search.ResultsPerPage < results.Total {
-					q.Set("page", strconv.FormatUint(page+1, 10))
-					tdata.Next = "search?" + q.Encode()
-				}
+					if page*search.ResultsPerPage < results.Total {
+						q.Set("page", strconv.FormatUint(page+1, 10))
+						tdata.Next = "search?" + q.Encode()
+					}
 
-				if page > 1 {
-					p := page - 1
-					if p == 1 {
-						q.Del("page")
-					} else {
-						q.Set("page", strconv.FormatUint(p, 10))
+					if page > 1 {
+						p := page - 1
+						if p == 1 {
+							q.Del("page")
+						} else {
+							q.Set("page", strconv.FormatUint(p, 10))
+						}
+						tdata.Prev = "search?" + q.Encode()
 					}
-					tdata.Prev = "search?" + q.Encode()
 				}
-			}
 
-			w.Header().Add("Cache-Control", "max-age=300")
-			w.Header().Add("Vary", "Fetch")
-			if r.Header.Get("Fetch") == "true" {
-				w.Header().Add("Content-Type", "text/html; charset=utf-8")
-				err = templates["options"].ExecuteTemplate(w, "results", tdata)
+				w.Header().Add("Cache-Control", "max-age=300")
+				w.Header().Add("Vary", "Fetch")
+				if r.Header.Get("Fetch") == "true" {
+					w.Header().Add("Content-Type", "text/html; charset=utf-8")
+					err = templates[importerType.String()].ExecuteTemplate(w, "results", tdata)
+				} else {
+					err = templates[importerType.String()].Execute(w, tdata)
+				}
+				if err != nil {
+					slog.Error("template error", "template", importerType, "error", err)
+					errorHandler(w, r, err.Error(), http.StatusInternalServerError)
+				}
 			} else {
-				err = templates["options"].Execute(w, tdata)
-			}
-			if err != nil {
-				slog.Error("template error", "template", "options", "error", err)
-				errorHandler(w, r, err.Error(), http.StatusInternalServerError)
-			}
-		} else {
-			sourceResult, err := index.GetSource(ctx, sourceKey)
-			if err != nil {
-				errorHandler(w, r, err.Error(), http.StatusInternalServerError)
+				sourceResult, err := index.GetSource(ctx, source.Key)
+				if err != nil {
+					errorHandler(w, r, err.Error(), http.StatusInternalServerError)
 
-				return
-			}
+					return
+				}
 
-			w.Header().Add("Cache-Control", "max-age=14400")
-			err = templates["search"].Execute(w, TemplateData{
-				ExtraHeadHTML: cfg.Web.ExtraHeadHTML,
-				Sources:       cfg.Importer.Sources,
-				Source:        *source,
-				SourceResult:  sourceResult,
-				Version:       *versionInfo,
-			})
-			if err != nil {
-				errorHandler(w, r, err.Error(), http.StatusInternalServerError)
+				w.Header().Add("Cache-Control", "max-age=14400")
+				err = templates["search"].Execute(w, TemplateData{
+					ExtraHeadHTML: cfg.Web.ExtraHeadHTML,
+					Sources:       cfg.Importer.Sources,
+					Source:        *source,
+					SourceResult:  sourceResult,
+					Version:       *versionInfo,
+				})
+				if err != nil {
+					errorHandler(w, r, err.Error(), http.StatusInternalServerError)
 
-				return
+					return
+				}
 			}
 		}
-	})
+	}
 
-	mux.HandleFunc(
-		"/options/{source}/opensearch.xml",
-		func(w http.ResponseWriter, r *http.Request) {
+	mux.HandleFunc("/options/{source}/search", createSearchHandler(config.Options))
+	mux.HandleFunc("/packages/{source}/search", createSearchHandler(config.Packages))
+
+	createOpenSearchXMLHandler := func(importerType config.ImporterType) func(http.ResponseWriter, *http.Request) {
+		return func(w http.ResponseWriter, r *http.Request) {
 			type openSearchData struct {
 				BaseURL string
 				Source  *config.Source
 			}
 
-			sourceKey := r.PathValue("source")
-			source := cfg.Importer.Sources[sourceKey]
-			if source == nil {
-				errorHandler(w, r, "Source not found", http.StatusNotFound)
+			source := cfg.Importer.Sources[r.PathValue("source")]
+			if source == nil || importerType != source.Importer {
+				errorHandler(w, r, http.StatusText(http.StatusNotFound), http.StatusNotFound)
 
 				return
 			}
@@ -256,8 +257,11 @@ func NewMux(
 					http.StatusInternalServerError,
 				)
 			}
-		},
-	)
+		}
+	}
+
+	mux.HandleFunc("/options/{source}/opensearch.xml", createOpenSearchXMLHandler(config.Options))
+	mux.HandleFunc("/packages/{source}/opensearch.xml", createOpenSearchXMLHandler(config.Packages))
 
 	fs := http.FileServer(http.FS(frontend.Files))
 	mux.HandleFunc("/static/", func(w http.ResponseWriter, r *http.Request) {
diff --git a/internal/server/templates.go b/internal/server/templates.go
index 3d45167..8967599 100644
--- a/internal/server/templates.go
+++ b/internal/server/templates.go
@@ -8,6 +8,7 @@ import (
 	"log/slog"
 	"path"
 	"searchix/frontend"
+	"searchix/internal/config"
 	"searchix/internal/nix"
 	"strings"
 
@@ -33,6 +34,16 @@ var templateFuncs = template.FuncMap{
 
 		return template.HTML(out.String()) // #nosec G203
 	},
+	"sourceNameAndType": func(source config.Source) (string, error) {
+		switch source.Importer {
+		case config.Options:
+			return source.Name + " " + source.Importer.String(), nil
+		case config.Packages:
+			return source.Name, nil
+		default:
+			return "", errors.Errorf("unknown source importer type %s", source.Importer.String())
+		}
+	},
 	"sourceName": func(input string) string {
 		switch input {
 		case "nixos":