about summary refs log tree commit diff stats
path: root/internal/server
diff options
context:
space:
mode:
authorAlan Pearce2025-01-05 20:51:24 +0100
committerAlan Pearce2025-01-05 20:51:24 +0100
commit920764211064521930441b96ed914034302a7470 (patch)
tree17ec08690e47ecc7e3103de7fe1729874ca9527c /internal/server
parentd289e54af28de6b21cbe49cebc262c4e3ca9f6d2 (diff)
downloadsearchix-920764211064521930441b96ed914034302a7470.tar.lz
searchix-920764211064521930441b96ed914034302a7470.tar.zst
searchix-920764211064521930441b96ed914034302a7470.zip
feat: offer to show all results on one page (if < 10000)
Implements: https://todo.sr.ht/~alanpearce/searchix/5
Diffstat (limited to 'internal/server')
-rw-r--r--internal/server/mux.go25
1 files changed, 11 insertions, 14 deletions
diff --git a/internal/server/mux.go b/internal/server/mux.go
index 80c68cb..83fde10 100644
--- a/internal/server/mux.go
+++ b/internal/server/mux.go
@@ -110,11 +110,15 @@ func NewMux(
 				var pageNumber = 1
 				if pg := r.URL.Query().Get("page"); pg != "" {
 					pageNumber, err = strconv.Atoi(pg)
-					if err != nil || pageNumber <= 0 || pageNumber > math.MaxInt {
+					if err != nil || pageNumber > math.MaxInt {
 						errorHandler(w, r, "Bad query string", http.StatusBadRequest)
 
 						return
 					}
+					if pageNumber == 0 {
+						pageNumber = 1
+						pageSize = math.MaxInt
+					}
 				}
 				page := pagination.New(pageNumber, pageSize)
 				results, err := index.Search(ctx, source, qs, page.From, page.Size)
@@ -129,6 +133,9 @@ func NewMux(
 
 					return
 				}
+				if pageSize == math.MaxInt && results.Total > config.MaxResultsShowAll {
+					errorHandler(w, r, "Too many results, use pagination", http.StatusBadRequest)
+				}
 
 				tdata := components.ResultData{
 					TemplateData: components.TemplateData{
@@ -143,19 +150,6 @@ func NewMux(
 				}
 
 				page.SetResults(results.Total)
-				log.Debug(
-					"pag",
-					"needed",
-					page.Needed,
-					"current",
-					page.Current,
-					"total",
-					results.Total,
-					"next",
-					page.Next,
-					"prev",
-					page.Prev,
-				)
 				if page.Needed {
 					q, err := url.ParseQuery(r.URL.RawQuery)
 					if err != nil {
@@ -177,6 +171,9 @@ func NewMux(
 						}
 						tdata.Prev = "search?" + q.Encode()
 					}
+
+					q.Set("page", "0")
+					tdata.All = "search?" + q.Encode()
 				}
 
 				w.Header().Add("Cache-Control", "max-age=300")