refactor: return errors with stack traces, where appropriate
30 files changed, 248 insertions(+), 192 deletions(-)
jump to
- cmd/build/main.go
- internal/atom/atom.go
- internal/buffer/buffer.go
- internal/builder/builder.go
- internal/builder/template/template.go
- internal/config/config.go
- internal/config/cspgenerator.go
- internal/content/posts.go
- internal/events/file.go
- internal/events/redis.go
- internal/events/update.go
- internal/fetcher/fetcher.go
- internal/file/file.go
- internal/markdown/markdown.go
- internal/multibuf/writecloser.go
- internal/multifile/compress.go
- internal/multifile/multifile.go
- internal/server/server.go
- internal/server/tcp.go
- internal/stats/goatcounter/count.go
- internal/storage/file.go
- internal/storage/files/file.go
- internal/storage/files/reader.go
- internal/storage/files/writer.go
- internal/storage/interface.go
- internal/storage/sqlite/reader.go
- internal/storage/sqlite/writer.go
- internal/vcs/filelog.go
- internal/vcs/repository.go
- internal/website/website.go
M cmd/build/main.go → cmd/build/main.go
@@ -59,13 +59,13 @@ panic("could not create storage: " + err.Error()) } case "sqlite": var stat os.FileInfo - stat, err = os.Stat(options.Destination) + stat, err = file.Stat(options.Destination) if err != nil && !errors.Is(err, os.ErrNotExist) { panic("could not stat destination: " + err.Error()) } switch { case stat == nil: - err = os.MkdirAll(options.Destination, 0o0755) + err := os.MkdirAll(options.Destination, 0o0755) if err != nil { panic("could not make directory: " + err.Error()) }
M internal/atom/atom.go → internal/atom/atom.go
@@ -6,6 +6,7 @@ "encoding/xml" "net/url" "time" + "gitlab.com/tozd/go/errors" "go.alanpearce.eu/homestead/internal/config" )@@ -13,18 +14,18 @@ func MakeTagURI(config *config.Config, specific string) string { return "tag:" + config.OriginalDomain + "," + config.DomainStartDate + ":" + specific } -func LinkXSL(w *bytes.Buffer, url string) error { +func LinkXSL(w *bytes.Buffer, url string) errors.E { _, err := w.WriteString(`<?xml-stylesheet href="`) if err != nil { - return err + return errors.WithStack(err) } err = xml.EscapeText(w, []byte(url)) if err != nil { - return err + return errors.WithStack(err) } _, err = w.WriteString(`" type="text/xsl"?>`) if err != nil { - return err + return errors.WithStack(err) } return nil
M internal/buffer/buffer.go → internal/buffer/buffer.go
@@ -19,6 +19,7 @@ } } // Read implements io.Reader's Read method +// Return normal error for special io.EOF handling func (b *Buffer) Read(p []byte) (int, error) { if b.pos >= b.len { return 0, io.EOF
M internal/builder/builder.go → internal/builder/builder.go
@@ -39,20 +39,20 @@ return filepath.Join(src, rel) } } -func copyFile(storage storage.Writer, src string, rel string) error { +func copyFile(storage storage.Writer, src string, rel string) errors.E { buf := new(buffer.Buffer) sf, err := os.Open(src) if err != nil { - return err + return errors.WithStack(err) } defer sf.Close() buf.Reset() if _, err := io.Copy(buf, sf); err != nil { - return err + return errors.WithStack(err) } if err := storage.Write("/"+rel, "", buf); err != nil { - return err + return errors.WithStack(err) } return nil@@ -62,7 +62,7 @@ func build( options *Options, config *config.Config, log *log.Logger, -) error { +) errors.E { ctx := context.TODO() buf := new(buffer.Buffer) joinSource := joinSourcePath(options.Source)@@ -76,7 +76,7 @@ PostDir: postDir, Repo: options.Repo, }, log.Named("content")) if err != nil { - return err + return errors.WithStack(err) } sitemap := sitemap.New(config)@@ -94,17 +94,17 @@ return errors.WithMessage(err, "could not render post") } if err := storage.WritePost(post, buf); err != nil { - return err + return errors.WithStack(err) } } log.Debug("rendering tags list") buf.Reset() if err := templates.TagsPage(config, "tags", mapset.Sorted(cc.Tags), "/tags").Render(ctx, buf); err != nil { - return err + return errors.WithStack(err) } if err := storage.Write("/tags/", "Tags", buf); err != nil { - return err + return errors.WithStack(err) } sitemap.AddPath("/tags/", lastMod)@@ -119,10 +119,10 @@ log.Debug("rendering tags page", "tag", tag) url := path.Join("/tags", tag) + "/" buf.Reset() if err := templates.TagPage(config, tag, matchingPosts, url).Render(ctx, buf); err != nil { - return err + return errors.WithStack(err) } if err = storage.Write(url, tag, buf); err != nil { - return err + return errors.WithStack(err) } sitemap.AddPath(url, matchingPosts[0].Date)@@ -139,20 +139,20 @@ return errors.WithMessage(err, "could not render tag feed page") } buf.Reset() if _, err := feed.WriteTo(buf); err != nil { - return err + return errors.WithStack(err) } if err := storage.Write(path.Join("/tags", tag, "atom.xml"), title, buf); err != nil { - return err + return errors.WithStack(err) } } log.Debug("rendering list page") buf.Reset() if err := templates.ListPage(config, cc.Posts, path.Join("/", postDir)).Render(ctx, buf); err != nil { - return err + return errors.WithStack(err) } if err := storage.Write(path.Join("/", postDir)+"/", "Posts", buf); err != nil { - return err + return errors.WithStack(err) } sitemap.AddPath(path.Join("/", postDir)+"/", lastMod)@@ -163,10 +163,10 @@ return errors.WithMessage(err, "could not render feed") } buf.Reset() if _, err := feed.WriteTo(buf); err != nil { - return err + return errors.WithStack(err) } if err := storage.Write("/atom.xml", config.Title, buf); err != nil { - return err + return errors.WithStack(err) } for _, filename := range []string{"feed-styles.xsl", "style.css"} {@@ -176,7 +176,7 @@ if err := template.CopyFile(filename, buf); err != nil { return errors.WithMessagef(err, "could not render template file %s", filename) } if err := storage.Write("/"+filename, "", buf); err != nil { - return err + return errors.WithStack(err) } }@@ -185,17 +185,17 @@ buf.Reset() log.Debug("rendering page", "source", post.Input, "path", post.URL) if post.URL == "/" { if err := templates.Homepage(config, cc.Posts, post).Render(ctx, buf); err != nil { - return err + return errors.WithStack(err) } } else { if err := templates.Page(config, post).Render(ctx, buf); err != nil { - return err + return errors.WithStack(err) } } file := storage.NewFileFromPost(post) file.ContentType = "text/html; charset=utf-8" if err := storage.WriteFile(file, buf); err != nil { - return err + return errors.WithStack(err) } }@@ -203,31 +203,30 @@ // it would be nice to set LastMod here, but using the latest post // date would be wrong as the homepage has its own content file // without a date, which could be newer sitemap.AddPath("/", time.Time{}) - err = buf.SeekStart() - if err != nil { - return err + if err := buf.SeekStart(); err != nil { + return errors.WithStack(err) } log.Debug("rendering sitemap") buf.Reset() if _, err := sitemap.WriteTo(buf); err != nil { - return err + return errors.WithStack(err) } if err := storage.Write("/sitemap.xml", "sitemap", buf); err != nil { - return err + return errors.WithStack(err) } log.Debug("rendering robots.txt") rob, err := template.RenderRobotsTXT(config) if err != nil { - return err + return errors.WithStack(err) } buf.Reset() if _, err := io.Copy(buf, rob); err != nil { - return err + return errors.WithStack(err) } if err := storage.Write("/robots.txt", "", buf); err != nil { - return err + return errors.WithStack(err) } for _, sf := range cc.StaticFiles {@@ -235,14 +234,14 @@ src := joinSource(sf) log.Debug("copying static file", "sf", sf, "src", src) err = copyFile(storage, src, sf) if err != nil { - return err + return errors.WithStack(err) } } return nil } -func BuildSite(options *Options, cfg *config.Config, log *log.Logger) error { +func BuildSite(options *Options, cfg *config.Config, log *log.Logger) errors.E { if cfg == nil { return errors.New("config is nil") }
M internal/builder/template/template.go → internal/builder/template/template.go
@@ -33,7 +33,7 @@ type QueryDocument struct { *goquery.Document } -func NewDocumentFromReader(r io.Reader) (*QueryDocument, error) { +func NewDocumentFromReader(r io.Reader) (*QueryDocument, errors.E) { doc, err := goquery.NewDocumentFromReader(r) return &QueryDocument{doc}, errors.WithMessage(err, "could not create query document")@@ -43,11 +43,11 @@ func (q *QueryDocument) Find(selector string) *QuerySelection { return &QuerySelection{q.Document.Find(selector)} } -func RenderRobotsTXT(config *config.Config) (io.Reader, error) { +func RenderRobotsTXT(config *config.Config) (io.Reader, errors.E) { r, w := io.Pipe() tpl, err := template.ParseFS(templates.Files, "robots.tmpl") if err != nil { - return nil, err + return nil, errors.WithStack(err) } go func() { err = tpl.Execute(w, map[string]interface{}{@@ -67,7 +67,7 @@ title string, config *config.Config, posts []*content.Post, specific string, -) (io.WriterTo, error) { +) (io.WriterTo, errors.E) { buf := &bytes.Buffer{} datetime := posts[0].Date.UTC()@@ -103,32 +103,31 @@ }, } } enc := xml.NewEncoder(buf) - err = enc.Encode(feed) - if err != nil { - return nil, err + if err := enc.Encode(feed); err != nil { + return nil, errors.WithStack(err) } return buf, nil } -func CopyFile(filename string, w io.Writer) error { +func CopyFile(filename string, w io.Writer) errors.E { f, err := templates.Files.Open(filename) if err != nil { - return err + return errors.WithStack(err) } defer f.Close() if _, err := io.Copy(w, f); err != nil { - return err + return errors.WithStack(err) } return nil } -func GetFeedStyleHash(r io.Reader) (string, error) { +func GetFeedStyleHash(r io.Reader) (string, errors.E) { doc, err := xmlquery.Parse(r) if err != nil { - return "", err + return "", errors.WithStack(err) } expr, err := xpath.CompileWithNS("//xhtml:style", nsMap) if err != nil {@@ -139,7 +138,7 @@ return Hash(style.InnerText()), nil } -func GetHTMLStyleHash(r io.Reader) (string, error) { +func GetHTMLStyleHash(r io.Reader) (string, errors.E) { doc, err := NewDocumentFromReader(r) if err != nil { return "", err
M internal/config/config.go → internal/config/config.go
@@ -52,7 +52,7 @@ } Menus map[string][]MenuItem } -func GetConfig(dir string, log *log.Logger) (*Config, error) { +func GetConfig(dir string, log *log.Logger) (*Config, errors.E) { config := Config{} filename := filepath.Join(dir, "config.toml") log.Debug("reading config", "filename", filename)
M internal/config/cspgenerator.go → internal/config/cspgenerator.go
@@ -12,7 +12,7 @@ "github.com/fatih/structtag" "gitlab.com/tozd/go/errors" ) -func GenerateCSP() error { +func GenerateCSP() errors.E { t := reflect.TypeFor[csp.Header]() file, err := os.OpenFile("./csp.go", os.O_CREATE|os.O_TRUNC|os.O_WRONLY, 0) if err != nil {
M internal/content/posts.go → internal/content/posts.go
@@ -94,7 +94,7 @@ "index.md", "", ".md", "", ) -func (cc *Collection) GetPage(filename string) (*Post, error) { +func (cc *Collection) GetPage(filename string) (*Post, errors.E) { fp := filepath.Join(cc.config.Root, filename) url := path.Join("/", pageURLReplacer.Replace(filename)) cs, err := cc.config.Repo.GetFileLog(filename)@@ -121,7 +121,7 @@ return post, nil } -func parse(fp string, post *Post) error { +func parse(fp string, post *Post) errors.E { content, err := os.Open(fp) if err != nil { return errors.WithMessagef(err, "could not open post %s", fp)@@ -145,7 +145,7 @@ func (p *Post) Render(_ context.Context, w io.Writer) error { return markdown.Convert(p.content, w) } -func (p *Post) RenderString() (string, error) { +func (p *Post) RenderString() (string, errors.E) { var buf bytes.Buffer if err := p.Render(context.Background(), &buf); err != nil { return "", errors.WithMessage(err, "could not convert markdown content")@@ -191,7 +191,7 @@ return nil } -func NewContentCollection(config *Config, log *log.Logger) (*Collection, error) { +func NewContentCollection(config *Config, log *log.Logger) (*Collection, errors.E) { cc := &Collection{ Posts: []*Post{}, Tags: mapset.NewSet[string](),
M internal/events/file.go → internal/events/file.go
@@ -29,7 +29,7 @@ log *log.Logger *fsnotify.Watcher } -func NewFileWatcher(logger *log.Logger, dirs ...string) (*FileWatcher, error) { +func NewFileWatcher(logger *log.Logger, dirs ...string) (*FileWatcher, errors.E) { fsn, err := fsnotify.NewWatcher() if err != nil { return nil, errors.WithMessage(err, "could not create file watcher")@@ -65,7 +65,7 @@ func (fw *FileWatcher) ignored(pathname string) bool { return slices.ContainsFunc(ignores, fw.matches(path.Base(pathname))) } -func (fw *FileWatcher) AddRecursive(from string) error { +func (fw *FileWatcher) AddRecursive(from string) errors.E { fw.log.Debug("walking directory tree", "root", from) err := filepath.WalkDir(from, func(path string, entry fs.DirEntry, err error) error { if err != nil {@@ -89,11 +89,11 @@ return errors.WithMessage(err, "error walking directory tree") } -func (fw *FileWatcher) GetLatestRunID() (uint64, error) { +func (fw *FileWatcher) GetLatestRunID() (uint64, errors.E) { return 0, nil } -func (fw *FileWatcher) Subscribe() (<-chan Event, error) { +func (fw *FileWatcher) Subscribe() (<-chan Event, errors.E) { var timer *time.Timer events := make(chan Event, 1)
M internal/events/redis.go → internal/events/redis.go
@@ -38,7 +38,7 @@ client *redis.Client log *log.Logger } -func NewRedisListener(opts *RedisOptions, log *log.Logger) (*RedisListener, error) { +func NewRedisListener(opts *RedisOptions, log *log.Logger) (*RedisListener, errors.E) { clientConfig := &redis.Options{ Addr: opts.Address, Username: opts.Username,@@ -69,7 +69,7 @@ log: log, }, nil } -func (rl *RedisListener) GetLatestRunID() (uint64, error) { +func (rl *RedisListener) GetLatestRunID() (uint64, errors.E) { ctx, cancel := context.WithTimeout(context.TODO(), 2*time.Second) defer cancel()@@ -93,7 +93,7 @@ func getKeyspaceName(key string) string { return fmt.Sprintf("__keyspace@%d__:%s", db, key) } -func (rl *RedisListener) Subscribe() (<-chan Event, error) { +func (rl *RedisListener) Subscribe() (<-chan Event, errors.E) { events := make(chan Event, 1) ctx := context.TODO() channel := getKeyspaceName(key)
M internal/events/update.go → internal/events/update.go
@@ -1,6 +1,9 @@ package events -import "github.com/fsnotify/fsnotify" +import ( + "github.com/fsnotify/fsnotify" + "gitlab.com/tozd/go/errors" +) type CIEvent struct { RunID uint64@@ -16,6 +19,6 @@ FSEvent } type Listener interface { - GetLatestRunID() (uint64, error) - Subscribe() (<-chan Event, error) + GetLatestRunID() (uint64, errors.E) + Subscribe() (<-chan Event, errors.E) }
M internal/fetcher/fetcher.go → internal/fetcher/fetcher.go
@@ -45,7 +45,7 @@ updater: options.Listener, } } -func (f *Fetcher) getArtefacts(run uint64) error { +func (f *Fetcher) getArtefacts(run uint64) errors.E { runID := strconv.FormatUint(run, 10) f.log.Debug("getting artefacts", "run_id", runID)@@ -70,7 +70,7 @@ return nil } -func (f *Fetcher) checkFolder() error { +func (f *Fetcher) checkFolder() errors.E { contents, err := os.ReadDir(f.options.Root) if err != nil { return errors.WithMessage(err, "could not read root directory")@@ -84,13 +84,15 @@ } } if len(badFiles) > 0 { - return errors.Basef("unexpected files in root directory: %s", strings.Join(badFiles, ", ")) + return errors.WithStack( + errors.Basef("unexpected files in root directory: %s", strings.Join(badFiles, ", ")), + ) } return nil } -func (f *Fetcher) CleanOldRevisions() error { +func (f *Fetcher) CleanOldRevisions() errors.E { contents, err := os.ReadDir(f.options.Root) if err != nil { return errors.WithMessage(err, "could not read root directory")@@ -125,7 +127,7 @@ } }() } -func (f *Fetcher) getFile(runID, basename string) error { +func (f *Fetcher) getFile(runID, basename string) errors.E { filename := filepath.Join(f.options.Root, runID, basename) url := f.options.FetchURL.JoinPath(runID, basename).String()@@ -162,7 +164,7 @@ return nil } -func (f *Fetcher) getCurrentVersion() (uint64, error) { +func (f *Fetcher) getCurrentVersion() (uint64, errors.E) { target, err := os.Readlink(filepath.Join(f.options.Root, "current")) if err != nil && errors.Is(err, fs.ErrNotExist) { return 0, errors.WithMessage(err, "could not stat current link")@@ -175,7 +177,7 @@ return f.current, nil } -func (f *Fetcher) initialiseStorage() (uint64, error) { +func (f *Fetcher) initialiseStorage() (uint64, errors.E) { latest, err := f.updater.GetLatestRunID() if err != nil { f.log.Warn("could not get latest run ID, using fallback", "error", err)@@ -196,7 +198,7 @@ return f.current, nil } -func (f *Fetcher) Subscribe() (<-chan string, error) { +func (f *Fetcher) Subscribe() (<-chan string, errors.E) { ch := make(chan string, 1) err := f.checkFolder() if err != nil {
M internal/file/file.go → internal/file/file.go
@@ -9,8 +9,17 @@ "gitlab.com/tozd/go/errors" ) +func Stat(path string) (fs.FileInfo, errors.E) { + stat, err := os.Stat(path) + if err != nil && !errors.Is(err, fs.ErrNotExist) { + return nil, errors.WithStack(err) + } + + return stat, nil +} + func Exists(path string) bool { - stat, err := os.Stat(path) + stat, err := Stat(path) if err != nil && !errors.Is(err, fs.ErrNotExist) { panic("could not stat path " + path + ": " + err.Error()) }@@ -18,7 +27,7 @@ return stat != nil } -func Copy(src, dest string) error { +func Copy(src, dest string) errors.E { stat, err := os.Stat(src) if err != nil { return errors.WithMessage(err, "could not stat source file")@@ -48,15 +57,15 @@ return nil } -func CleanDir(dir string) (err error) { +func CleanDir(dir string) errors.E { files, err := os.ReadDir(dir) if err != nil { - return errors.WithMessage(err, "could not read directory") + return errors.WithMessage(errors.WithStack(err), "could not read directory") } for _, file := range files { err = errors.Join(err, os.RemoveAll(filepath.Join(dir, file.Name()))) } - return err + return errors.WithStack(err) }
M internal/markdown/markdown.go → internal/markdown/markdown.go
@@ -7,6 +7,7 @@ fences "github.com/stefanfritsch/goldmark-fences" "github.com/yuin/goldmark" "github.com/yuin/goldmark/extension" htmlrenderer "github.com/yuin/goldmark/renderer/html" + "gitlab.com/tozd/go/errors" ) var markdown = goldmark.New(@@ -21,6 +22,6 @@ &fences.Extender{}, ), ) -func Convert(content []byte, w io.Writer) error { - return markdown.Convert(content, w) +func Convert(content []byte, w io.Writer) errors.E { + return errors.WithStack(markdown.Convert(content, w)) }
M internal/multibuf/writecloser.go → internal/multibuf/writecloser.go
@@ -2,6 +2,8 @@ package multibuf import ( "io" + + "gitlab.com/tozd/go/errors" ) type WriteCloser struct {@@ -30,5 +32,5 @@ lastErr = err } } - return lastErr + return errors.WithStack(lastErr) }
M internal/multifile/compress.go → internal/multifile/compress.go
@@ -1,9 +1,10 @@ package multifile import ( - "errors" "io" "os" + + "gitlab.com/tozd/go/errors" ) type CompressWriter struct {@@ -18,10 +19,15 @@ Writer: writer, } } -func (cw *CompressWriter) Write(p []byte) (n int, err error) { - return cw.Writer.Write(p) +func (cw *CompressWriter) Write(p []byte) (int, error) { + n, err := cw.Writer.Write(p) + if err != nil { + return 0, errors.WithStack(err) + } + + return n, nil } func (cw *CompressWriter) Close() error { - return errors.Join(cw.Writer.Close(), cw.File.Close()) + return errors.WithStack(errors.Join(cw.Writer.Close(), cw.File.Close())) }
M internal/multifile/multifile.go → internal/multifile/multifile.go
@@ -39,14 +39,14 @@ lastErr = err } } - return lastErr + return errors.WithStack(lastErr) } func (mf *MultiFile) Name() string { return mf.files[0].Name() } -func (mf *MultiFile) Chtimes(mtime time.Time) error { +func (mf *MultiFile) Chtimes(mtime time.Time) errors.E { var lastErr error for _, f := range mf.files { err := os.Chtimes(f.Name(), mtime, mtime)@@ -55,5 +55,5 @@ lastErr = errors.Join(lastErr, err) } } - return lastErr + return errors.WithStack(lastErr) }
M internal/server/server.go → internal/server/server.go
@@ -48,7 +48,7 @@ wrappedHandler.ServeHTTP(w, r) }) } -func New(options *Options, log *log.Logger) (*Server, error) { +func New(options *Options, log *log.Logger) (*Server, errors.E) { return &Server{ mux: http.NewServeMux(), log: log,@@ -56,7 +56,7 @@ options: options, }, nil } -func (s *Server) HostApp(app *App) error { +func (s *Server) HostApp(app *App) errors.E { if app.Domain == "" { return errors.New("app needs a domain") }@@ -76,7 +76,7 @@ } s.mux.Handle("/", app.Handler) } -func (s *Server) Start() error { +func (s *Server) Start() errors.E { top := http.NewServeMux() top.Handle("/", serverHeaderHandler(
M internal/server/tcp.go → internal/server/tcp.go
@@ -4,10 +4,11 @@ import ( "net" "strconv" + "gitlab.com/tozd/go/errors" "go.alanpearce.eu/x/listenfd" ) -func (s *Server) serveTCP() error { +func (s *Server) serveTCP() errors.E { addr := joinHostPort(s.options.ListenAddress, s.options.Port) s.log.Debug( "fallback listener",@@ -16,10 +17,10 @@ addr, ) l, err := listenfd.GetListener(0, addr, s.log.Named("tcp.listenfd")) if err != nil { - return err + return errors.WithStack(err) } - return s.server.Serve(l) + return errors.WithStack(s.server.Serve(l)) } func joinHostPort(host string, port int) string {
M internal/stats/goatcounter/count.go → internal/stats/goatcounter/count.go
@@ -69,7 +69,7 @@ gc.log.Warn("could not log page view", "error", err) } } -func (gc *Goatcounter) count(userReq *http.Request, title string) error { +func (gc *Goatcounter) count(userReq *http.Request, title string) errors.E { body, err := json.Marshal(&countBody{ NoSessions: true, Hits: []hit{
M internal/storage/file.go → internal/storage/file.go
@@ -29,13 +29,13 @@ return encs } -func (f *File) CalculateStyleHash() (err error) { +func (f *File) CalculateStyleHash() (err errors.E) { buf := f.Encodings["identity"] if buf == nil { return errors.New("buffer not initialised") } - _, err = buf.Seek(0, io.SeekStart) - if err != nil { + + if _, err := buf.Seek(0, io.SeekStart); err != nil { return errors.WithMessage(err, "could not seek buffer") }@@ -49,8 +49,7 @@ } default: return } - _, err = buf.Seek(0, io.SeekStart) - if err != nil { + if _, err := buf.Seek(0, io.SeekStart); err != nil { return errors.WithMessage(err, "could not seek buffer") }
M internal/storage/files/file.go → internal/storage/files/file.go
@@ -20,7 +20,7 @@ "gzip": ".gz", "zstd": ".zstd", } -func (r *Reader) OpenFile(path string, filename string) (*storage.File, error) { +func (r *Reader) OpenFile(path string, filename string) (*storage.File, errors.E) { f, err := os.Open(filename) if err != nil { return nil, errors.WithMessage(err, "could not open file for reading")@@ -53,7 +53,7 @@ } err = file.CalculateStyleHash() if err != nil { - return nil, err + return nil, errors.WithStack(err) } for enc, suffix := range encodings {@@ -76,7 +76,7 @@ return file, nil } -func etag(f io.Reader) (string, error) { +func etag(f io.Reader) (string, errors.E) { hash := fnv.New64a() if _, err := io.Copy(hash, f); err != nil { return "", errors.WithMessage(err, "could not hash file")
M internal/storage/files/reader.go → internal/storage/files/reader.go
@@ -17,7 +17,7 @@ log *log.Logger files map[string]*storage.File } -func NewReader(path string, log *log.Logger) (*Reader, error) { +func NewReader(path string, log *log.Logger) (*Reader, errors.E) { r := &Reader{ root: path, log: log,@@ -30,7 +30,7 @@ return r, nil } -func (r *Reader) registerFile(urlpath string, filepath string) error { +func (r *Reader) registerFile(urlpath string, filepath string) errors.E { file, err := r.OpenFile(urlpath, filepath) if err != nil { return errors.WithMessagef(err, "could not register file %s", filepath)@@ -41,7 +41,7 @@ return nil } -func (r *Reader) registerContentFiles() error { +func (r *Reader) registerContentFiles() errors.E { err := filepath.WalkDir(r.root, func(filePath string, f fs.DirEntry, err error) error { if err != nil { return errors.WithMessagef(err, "failed to access path %s", filePath)@@ -72,7 +72,7 @@ return nil } -func (r *Reader) GetFile(urlPath string) (*storage.File, error) { +func (r *Reader) GetFile(urlPath string) (*storage.File, errors.E) { return r.files[urlPath], nil }
M internal/storage/files/writer.go → internal/storage/files/writer.go
@@ -32,7 +32,7 @@ type Options struct { Compress bool } -func NewWriter(outputDirectory string, logger *log.Logger, opts *Options) (*Files, error) { +func NewWriter(outputDirectory string, logger *log.Logger, opts *Options) (*Files, errors.E) { return &Files{ outputDirectory: outputDirectory, options: opts,@@ -40,28 +40,33 @@ log: logger, }, nil } -func (f *Files) OpenRead(filename string) (io.ReadCloser, error) { - return os.Open(f.join(filename)) +func (f *Files) OpenRead(filename string) (io.ReadCloser, errors.E) { + file, err := os.Open(f.join(filename)) + if err != nil { + return nil, errors.WithStack(err) + } + + return file, nil } -func (f *Files) OpenWrite(filename string) (io.WriteCloser, error) { +func (f *Files) OpenWrite(filename string) (io.WriteCloser, errors.E) { return openFileWrite(f.join(filename)) } -func (f *Files) WritePost(post *content.Post, content *buffer.Buffer) error { +func (f *Files) WritePost(post *content.Post, content *buffer.Buffer) errors.E { fd, err := f.write(post.URL, content) if err != nil { return err } - err = fd.Close() - if err != nil { - return err + + if err := fd.Close(); err != nil { + return errors.WithStack(err) } if mf, isMultifile := fd.(*multifile.MultiFile); isMultifile { err = mf.Chtimes(post.Date) } else { - err = os.Chtimes(fd.Name(), post.Date, post.Date) + err = errors.WithStack(os.Chtimes(fd.Name(), post.Date, post.Date)) } if err != nil { return errors.WithMessage(err, "could not set file times")@@ -70,7 +75,7 @@ return nil } -func (f *Files) Write(pathname string, _ string, content *buffer.Buffer) error { +func (f *Files) Write(pathname string, _ string, content *buffer.Buffer) errors.E { fd, err := f.write(pathname, content) if err != nil { return err@@ -87,11 +92,11 @@ Encodings: map[string]*buffer.Buffer{}, } } -func (f *Files) WriteFile(file *storage.File, content *buffer.Buffer) error { +func (f *Files) WriteFile(file *storage.File, content *buffer.Buffer) errors.E { return f.Write(file.Path, file.Title, content) } -func (f *Files) write(pathname string, content *buffer.Buffer) (multifile.FileLike, error) { +func (f *Files) write(pathname string, content *buffer.Buffer) (multifile.FileLike, errors.E) { filename := pathNameToFileName(pathname) err := f.Mkdirp(filepath.Dir(filename)) if err != nil {@@ -103,33 +108,39 @@ if err != nil { return nil, errors.WithMessagef(err, "could not open output file") } - _, err = fd.Write(content.Bytes()) - if err != nil { + if _, err := fd.Write(content.Bytes()); err != nil { return nil, errors.WithMessage(err, "could not write output file") } return fd, nil } -func openFileWrite(filename string) (*os.File, error) { - return os.OpenFile(filename, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0644) +func openFileWrite(filename string) (*os.File, errors.E) { + f, err := os.OpenFile(filename, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0644) + if err != nil { + return nil, errors.WithMessage(err, "could not open output file") + } + + return f, nil } -func openFileGz(filename string) (*multifile.CompressWriter, error) { +func openFileGz(filename string) (*multifile.CompressWriter, errors.E) { filenameGz := filename + ".gz" f, err := openFileWrite(filenameGz) if err != nil { return nil, err } - w, err := gzip.NewWriterLevel(f, gzipLevel) - if err != nil { - return nil, err + var w io.WriteCloser + var baseErr error + w, baseErr = gzip.NewWriterLevel(f, gzipLevel) + if baseErr != nil { + return nil, errors.WithStack(baseErr) } return multifile.NewCompressWriter(f, w), err } -func openFileBrotli(filename string) (*multifile.CompressWriter, error) { +func openFileBrotli(filename string) (*multifile.CompressWriter, errors.E) { filenameBrotli := filename + ".br" f, err := openFileWrite(filenameBrotli) if err != nil {@@ -139,21 +150,23 @@ return multifile.NewCompressWriter(f, brotli.NewWriterLevel(f, brotliLevel)), nil } -func openFileZstd(filename string) (*multifile.CompressWriter, error) { +func openFileZstd(filename string) (*multifile.CompressWriter, errors.E) { f, err := openFileWrite(filename + ".zstd") if err != nil { return nil, err } - w, err := zstd.NewWriter(f) - if err != nil { - return nil, err + var w io.WriteCloser + var baseErr error + w, baseErr = zstd.NewWriter(f) + if baseErr != nil { + return nil, errors.WithStack(baseErr) } return multifile.NewCompressWriter(f, w), nil } -func multiOpenFile(filename string) (*multifile.MultiFile, error) { +func multiOpenFile(filename string) (*multifile.MultiFile, errors.E) { r, err := openFileWrite(filename) if err != nil { return nil, err@@ -174,7 +187,7 @@ return multifile.NewMultiFile(r, gz, br, zst), nil } -func (f *Files) OpenFileAndVariants(filename string) (multifile.FileLike, error) { +func (f *Files) OpenFileAndVariants(filename string) (multifile.FileLike, errors.E) { if f.options.Compress { return multiOpenFile(f.join(filename)) }@@ -182,7 +195,7 @@ return openFileWrite(f.join(filename)) } -func (f *Files) Mkdirp(dir string) error { +func (f *Files) Mkdirp(dir string) errors.E { err := os.MkdirAll(f.join(dir), 0755) if err != nil { return errors.WithMessage(err, "could not create directory")
M internal/storage/interface.go → internal/storage/interface.go
@@ -1,21 +1,22 @@ package storage import ( + "gitlab.com/tozd/go/errors" "go.alanpearce.eu/homestead/internal/buffer" "go.alanpearce.eu/homestead/internal/content" ) type Reader interface { - GetFile(path string) (*File, error) + GetFile(path string) (*File, errors.E) CanonicalisePath(path string) (string, bool) } type Writer interface { - Mkdirp(path string) error + Mkdirp(path string) errors.E NewFileFromPost(post *content.Post) *File - Write(pathname string, title string, content *buffer.Buffer) error - WritePost(post *content.Post, content *buffer.Buffer) error - WriteFile(file *File, content *buffer.Buffer) error + Write(pathname string, title string, content *buffer.Buffer) errors.E + WritePost(post *content.Post, content *buffer.Buffer) errors.E + WriteFile(file *File, content *buffer.Buffer) errors.E }
M internal/storage/sqlite/reader.go → internal/storage/sqlite/reader.go
@@ -20,11 +20,12 @@ checkPath *sql.Stmt } } -func NewReader(db *sql.DB, log *log.Logger) (r *Reader, err error) { - r = &Reader{ +func NewReader(db *sql.DB, log *log.Logger) (*Reader, errors.E) { + r := &Reader{ log: log, db: db, } + var err error r.queries.getFile, err = r.db.Prepare(` SELECT file.content_type,@@ -60,7 +61,7 @@ return r, nil } -func (r *Reader) GetFile(filename string) (*storage.File, error) { +func (r *Reader) GetFile(filename string) (*storage.File, errors.E) { file := &storage.File{ Encodings: make(map[string]*buffer.Buffer, 1), }
M internal/storage/sqlite/writer.go → internal/storage/sqlite/writer.go
@@ -40,8 +40,8 @@ type Options struct { Compress bool } -func OpenDB(dbPath string) (*sql.DB, error) { - return sql.Open( +func OpenDB(dbPath string) (*sql.DB, errors.E) { + db, err := sql.Open( "sqlite", fmt.Sprintf( "file:%s?mode=%s&_pragma=foreign_keys(1)&_pragma=mmap_size(%d)",@@ -50,9 +50,14 @@ "rwc", 16*1024*1024, ), ) + if err != nil { + return nil, errors.WithStack(err) + } + + return db, nil } -func NewWriter(db *sql.DB, logger *log.Logger, opts *Options) (*Writer, error) { +func NewWriter(db *sql.DB, logger *log.Logger, opts *Options) (*Writer, errors.E) { _, err := db.Exec(` CREATE TABLE IF NOT EXISTS url ( url_id INTEGER PRIMARY KEY,@@ -118,20 +123,25 @@ return w, nil } -func (s *Writer) Mkdirp(string) error { +func (s *Writer) Mkdirp(string) errors.E { return nil } -func (s *Writer) storeURL(path string) (int64, error) { +func (s *Writer) storeURL(path string) (int64, errors.E) { r, err := s.queries.insertURL.Exec(path) if err != nil { return 0, errors.WithMessagef(err, "inserting URL %s into database", path) } - return r.LastInsertId() + id, err := r.LastInsertId() + if err != nil { + return 0, errors.WithStack(err) + } + + return id, nil } -func (s *Writer) storeFile(urlID int64, file *storage.File) (int64, error) { +func (s *Writer) storeFile(urlID int64, file *storage.File) (int64, errors.E) { if file.ContentType == "" { file.ContentType = http.DetectContentType(file.Encodings["identity"].Bytes()) s.log.Warn(@@ -154,10 +164,15 @@ if err != nil { return 0, errors.WithMessage(err, "inserting file into database") } - return r.LastInsertId() + id, err := r.LastInsertId() + if err != nil { + return 0, errors.WithStack(err) + } + + return id, nil } -func (s *Writer) storeEncoding(fileID int64, encoding string, data []byte) error { +func (s *Writer) storeEncoding(fileID int64, encoding string, data []byte) errors.E { _, err := s.queries.insertContent.Exec( sql.Named("file_id", fileID), sql.Named("encoding", encoding),@@ -175,7 +190,7 @@ return nil } -func etag(content []byte) (string, error) { +func etag(content []byte) (string, errors.E) { hash := fnv.New64a() hash.Write(content)@@ -197,13 +212,13 @@ return file } -func (s *Writer) WritePost(post *content.Post, content *buffer.Buffer) error { +func (s *Writer) WritePost(post *content.Post, content *buffer.Buffer) errors.E { s.log.Debug("storing post", "title", post.Title) return s.WriteFile(s.NewFileFromPost(post), content) } -func (s *Writer) Write(pathname string, title string, content *buffer.Buffer) error { +func (s *Writer) Write(pathname string, title string, content *buffer.Buffer) errors.E { file := &storage.File{ Title: title, Path: pathname,@@ -214,7 +229,7 @@ return s.WriteFile(file, content) } -func (s *Writer) WriteFile(file *storage.File, content *buffer.Buffer) error { +func (s *Writer) WriteFile(file *storage.File, content *buffer.Buffer) errors.E { s.log.Debug("storing content", "pathname", file.Path) urlID, err := s.storeURL(file.Path)@@ -271,15 +286,16 @@ return nil } -func compress(encoding string, content *buffer.Buffer) (compressed *buffer.Buffer, err error) { +func compress(encoding string, content *buffer.Buffer) (*buffer.Buffer, errors.E) { var w io.WriteCloser - compressed = new(buffer.Buffer) + compressed := new(buffer.Buffer) switch encoding { case "gzip": w = gzip.NewWriter(compressed) case "br": w = brotli.NewWriter(compressed) case "zstd": + var err error w, err = zstd.NewWriter(compressed) if err != nil { return nil, errors.WithMessage(err, "could not create zstd writer")@@ -287,12 +303,10 @@ } } defer w.Close() - err = content.SeekStart() - if err != nil { + if err := content.SeekStart(); err != nil { return nil, errors.WithMessage(err, "seeking to start of content buffer") } - _, err = io.Copy(w, content) - if err != nil { + if _, err := io.Copy(w, content); err != nil { return nil, errors.WithMessage(err, "compressing file") }
M internal/vcs/filelog.go → internal/vcs/filelog.go
@@ -31,8 +31,10 @@ return u } -func (r *Repository) GetFileLog(filename string) (cs []*Commit, err error) { - fl, err := r.repo.Log(&git.LogOptions{ +func (r *Repository) GetFileLog(filename string) ([]*Commit, errors.E) { + var fl object.CommitIter + var err error + fl, err = r.repo.Log(&git.LogOptions{ Order: git.LogOrderCommitterTime, FileName: &filename, })@@ -41,6 +43,7 @@ return nil, errors.WithMessagef(err, "could not get git log for file %s", filename) } defer fl.Close() + cs := make([]*Commit, 0) err = fl.ForEach(func(c *object.Commit) error { cs = append(cs, &Commit{ Hash: c.Hash.String(),
M internal/vcs/repository.go → internal/vcs/repository.go
@@ -1,10 +1,10 @@ package vcs import ( - "io/fs" "os" "go.alanpearce.eu/homestead/internal/config" + "go.alanpearce.eu/homestead/internal/file" "go.alanpearce.eu/x/log" "github.com/go-git/go-git/v5"@@ -24,31 +24,32 @@ log *log.Logger remoteURL config.URL } -func CloneOrOpen(cfg *Options, log *log.Logger) (repo *Repository, exists bool, err error) { - stat, err := os.Stat(cfg.LocalPath) - exists = err == nil - if err != nil && !errors.Is(err, fs.ErrNotExist) { - return - } - repo = &Repository{ - log: log, - remoteURL: cfg.RemoteURL, - } - if stat == nil { - repo.repo, err = git.PlainClone(cfg.LocalPath, false, &git.CloneOptions{ +func CloneOrOpen(cfg *Options, log *log.Logger) (*Repository, bool, errors.E) { + var r *git.Repository + var err error + exists := file.Exists(cfg.LocalPath) + if !exists { + r, err = git.PlainClone(cfg.LocalPath, false, &git.CloneOptions{ URL: cfg.RemoteURL.String(), Progress: os.Stdout, }) } else { - repo.repo, err = git.PlainOpen(cfg.LocalPath) + r, err = git.PlainOpen(cfg.LocalPath) + } + if err != nil { + return nil, exists, errors.WithStack(err) } - return + return &Repository{ + log: log, + remoteURL: cfg.RemoteURL, + repo: r, + }, exists, nil } -func (r *Repository) Update(rev string) (updated bool, err error) { +func (r *Repository) Update(rev string) (bool, errors.E) { r.log.Info("updating repository", "from", r.HeadSHA()) - err = r.repo.Fetch(&git.FetchOptions{ + err := r.repo.Fetch(&git.FetchOptions{ Prune: true, }) if err != nil {@@ -58,18 +59,18 @@ return true, nil } - return false, err + return false, errors.WithStack(err) } rem, err := r.repo.Remote("origin") if err != nil { - return false, err + return false, errors.WithStack(err) } refs, err := rem.List(&git.ListOptions{ Timeout: 5, }) if err != nil { - return false, err + return false, errors.WithStack(err) } var hash plumbing.Hash@@ -87,14 +88,14 @@ } wt, err := r.repo.Worktree() if err != nil { - return false, err + return false, errors.WithStack(err) } err = wt.Checkout(&git.CheckoutOptions{ Hash: hash, Force: true, }) if err != nil { - return false, err + return false, errors.WithStack(err) } r.log.Info("updated to", "rev", hash)@@ -102,10 +103,10 @@ return true, r.Clean(wt) } -func (r *Repository) Clean(wt *git.Worktree) error { +func (r *Repository) Clean(wt *git.Worktree) errors.E { st, err := wt.Status() if err != nil { - return err + return errors.WithStack(err) } if !st.IsClean() {@@ -113,7 +114,7 @@ err = wt.Clean(&git.CleanOptions{ Dir: true, }) if err != nil { - return err + return errors.WithStack(err) } }
M internal/website/website.go → internal/website/website.go
@@ -50,7 +50,7 @@ func New( opts *Options, log *log.Logger, -) (*Website, error) { +) (*Website, errors.E) { website := &Website{ log: log, App: &server.App{@@ -159,7 +159,7 @@ return website, nil } -func prepareRootDirectory(dataRoot, root string) error { +func prepareRootDirectory(dataRoot, root string) errors.E { if !file.Exists(root) { err := os.MkdirAll(root, 0755) if err != nil {