about summary refs log tree commit diff stats
diff options
context:
space:
mode:
authorAlan Pearce2024-05-12 14:50:36 +0200
committerAlan Pearce2024-05-12 14:50:36 +0200
commitf769decb676f26210bd6884d33e84ada5898b17c (patch)
treeb87898af5c2debe7920c6db2bc1c0de67655255d
parent4f5be8a89755dfdece7b2ec60c90d74bfde1b707 (diff)
downloadsearchix-f769decb676f26210bd6884d33e84ada5898b17c.tar.lz
searchix-f769decb676f26210bd6884d33e84ada5898b17c.tar.zst
searchix-f769decb676f26210bd6884d33e84ada5898b17c.zip
refactor: merge search and results pages
-rw-r--r--frontend/templates/blocks/search.gotmpl2
-rw-r--r--internal/server/mux.go165
2 files changed, 78 insertions, 89 deletions
diff --git a/frontend/templates/blocks/search.gotmpl b/frontend/templates/blocks/search.gotmpl
index e9d69b6..feb77cc 100644
--- a/frontend/templates/blocks/search.gotmpl
+++ b/frontend/templates/blocks/search.gotmpl
@@ -2,7 +2,7 @@
 {{- template "js" . }}
 
 {{- define "main" }}
-  <form id="search" action="results">
+  <form id="search">
     <label for="query">{{ .Source.Name }} option search</label>
     <fieldset>
       <input id="query" name="query" type="search" value="{{ .Query }}" />
diff --git a/internal/server/mux.go b/internal/server/mux.go
index 95b7e14..d98d415 100644
--- a/internal/server/mux.go
+++ b/internal/server/mux.go
@@ -120,7 +120,7 @@ func NewMux(runtimeConfig *Config) (*http.ServeMux, error) {
 		}
 	})
 
-	const getSourceTimeout = 1 * time.Second
+	const searchTimeout = 1 * time.Second
 	mux.HandleFunc("/options/{source}/search", func(w http.ResponseWriter, r *http.Request) {
 		sourceKey := r.PathValue("source")
 
@@ -131,113 +131,102 @@ func NewMux(runtimeConfig *Config) (*http.ServeMux, error) {
 			return
 		}
 
-		ctx, cancel := context.WithTimeout(r.Context(), getSourceTimeout)
+		ctx, cancel := context.WithTimeout(r.Context(), searchTimeout)
 		defer cancel()
 
-		sourceResult, err := index.GetSource(ctx, sourceKey)
-		if err != nil {
-			http.Error(w, err.Error(), http.StatusInternalServerError)
+		if r.URL.Query().Has("query") {
+			qs := r.URL.Query().Get("query")
+			pg := r.URL.Query().Get("page")
+			var page uint64 = 1
+			if pg != "" {
+				page, err = strconv.ParseUint(pg, 10, 64)
+				if err != nil || page == 0 {
+					http.Error(w, "Bad query string", http.StatusBadRequest)
+				}
+			}
+			results, err := index.Search(ctx, sourceKey, qs, (page-1)*search.ResultsPerPage)
+			if err != nil {
+				if err == context.DeadlineExceeded {
+					http.Error(w, "Search timed out", http.StatusInternalServerError)
 
-			return
-		}
+					return
+				}
+				slog.Error("search error", "error", err)
+				http.Error(w, err.Error(), http.StatusInternalServerError)
+			}
 
-		err = templates["search"].Execute(w, TemplateData{
-			LiveReload:   jsSnippet,
-			Sources:      config.Sources,
-			Source:       *source,
-			SourceResult: sourceResult,
-		})
-		if err != nil {
-			http.Error(w, err.Error(), http.StatusInternalServerError)
+			tdata := ResultData[options.NixOption]{
+				TemplateData: TemplateData{
+					LiveReload: jsSnippet,
+					Source:     *source,
+					Sources:    config.Sources,
+				},
+				ResultsPerPage: search.ResultsPerPage,
+				Query:          qs,
+				Results:        results,
+			}
 
-			return
-		}
-	})
+			hits := uint64(len(results.Hits))
+			if results.Total > hits {
+				q, err := url.ParseQuery(r.URL.RawQuery)
+				if err != nil {
+					http.Error(w, "Query string error", http.StatusBadRequest)
 
-	timeout := 1 * time.Second
-	mux.HandleFunc("/options/{source}/results", func(w http.ResponseWriter, r *http.Request) {
-		sourceKey := r.PathValue("source")
-		ctx, cancel := context.WithTimeoutCause(r.Context(), timeout, errors.New("timeout"))
-		defer cancel()
-		source := config.Sources[sourceKey]
-		if source == nil {
-			http.Error(w, "Unknown source", http.StatusNotFound)
+					return
+				}
 
-			return
-		}
+				if page*search.ResultsPerPage > results.Total {
+					http.Error(w, "Not found", http.StatusNotFound)
 
-		qs := r.URL.Query().Get("query")
-		pg := r.URL.Query().Get("page")
-		var page uint64 = 1
-		if pg != "" {
-			page, err = strconv.ParseUint(pg, 10, 64)
-			if err != nil || page == 0 {
-				http.Error(w, "Bad query string", http.StatusBadRequest)
-			}
-		}
-		results, err := index.Search(ctx, sourceKey, qs, (page-1)*search.ResultsPerPage)
-		if err != nil {
-			if err == context.DeadlineExceeded {
-				http.Error(w, "Search timed out", http.StatusInternalServerError)
+					return
+				}
 
-				return
-			}
-			slog.Error("search error", "error", err)
-			http.Error(w, err.Error(), http.StatusInternalServerError)
-		}
+				if page*search.ResultsPerPage < results.Total {
+					q.Set("page", strconv.FormatUint(page+1, 10))
+					tdata.Next = "search?" + q.Encode()
+				}
 
-		tdata := ResultData[options.NixOption]{
-			TemplateData: TemplateData{
-				LiveReload: jsSnippet,
-				Source:     *source,
-				Sources:    config.Sources,
-			},
-			ResultsPerPage: search.ResultsPerPage,
-			Query:          qs,
-			Results:        results,
-		}
+				if page > 1 {
+					p := page - 1
+					if p == 1 {
+						q.Del("page")
+					} else {
+						q.Set("page", strconv.FormatUint(p, 10))
+					}
+					tdata.Prev = "search?" + q.Encode()
+				}
+			}
 
-		hits := uint64(len(results.Hits))
-		if results.Total > hits {
-			q, err := url.ParseQuery(r.URL.RawQuery)
+			if r.Header.Get("Fetch") == "true" {
+				w.Header().Add("Content-Type", "text/html; charset=utf-8")
+				err = templates["options"].ExecuteTemplate(w, "options.gotmpl", tdata)
+			} else {
+				err = templates["options"].ExecuteTemplate(w, "index.gotmpl", tdata)
+			}
 			if err != nil {
-				http.Error(w, "Query string error", http.StatusBadRequest)
-
-				return
+				slog.Error("template error", "template", "options", "error", err)
+				http.Error(w, err.Error(), http.StatusInternalServerError)
 			}
-
-			if page*search.ResultsPerPage > results.Total {
-				http.Error(w, "Not found", http.StatusNotFound)
+		} else {
+			sourceResult, err := index.GetSource(ctx, sourceKey)
+			if err != nil {
+				http.Error(w, err.Error(), http.StatusInternalServerError)
 
 				return
 			}
 
-			if page*search.ResultsPerPage < results.Total {
-				q.Set("page", strconv.FormatUint(page+1, 10))
-				tdata.Next = "results?" + q.Encode()
-			}
+			err = templates["search"].Execute(w, TemplateData{
+				LiveReload:   jsSnippet,
+				Sources:      config.Sources,
+				Source:       *source,
+				SourceResult: sourceResult,
+			})
+			if err != nil {
+				http.Error(w, err.Error(), http.StatusInternalServerError)
 
-			if page > 1 {
-				p := page - 1
-				if p == 1 {
-					q.Del("page")
-				} else {
-					q.Set("page", strconv.FormatUint(p, 10))
-				}
-				tdata.Prev = "results?" + q.Encode()
+				return
 			}
 		}
-
-		if r.Header.Get("Fetch") == "true" {
-			w.Header().Add("Content-Type", "text/html; charset=utf-8")
-			err = templates["options"].ExecuteTemplate(w, "options.gotmpl", tdata)
-		} else {
-			err = templates["options"].ExecuteTemplate(w, "index.gotmpl", tdata)
-		}
-		if err != nil {
-			slog.Error("template error", "template", "options", "error", err)
-			http.Error(w, err.Error(), http.StatusInternalServerError)
-		}
 	})
 
 	mux.Handle("/static/", http.FileServer(http.FS(frontend.Files)))