Commit e0db8f8
Changed files (17)
internal/generator/blob.go
@@ -0,0 +1,259 @@
+package generator
+
+import (
+ "bytes"
+ "context"
+ "fmt"
+ "html/template"
+ "os"
+ "path/filepath"
+ "runtime"
+ "strings"
+ "sync"
+
+ "github.com/alecthomas/chroma/v2/formatters/html"
+ "github.com/alecthomas/chroma/v2/lexers"
+ "github.com/alecthomas/chroma/v2/styles"
+
+ "mokhan.ca/antonmedv/gitmal/pkg/git"
+ "mokhan.ca/antonmedv/gitmal/pkg/links"
+ "mokhan.ca/antonmedv/gitmal/pkg/progress_bar"
+ "mokhan.ca/antonmedv/gitmal/pkg/templates"
+)
+
+func GenerateBlobs(files []git.Blob, params Params) error {
+ // Prepare shared, read-only resources
+ var css strings.Builder
+ style := styles.Get(params.Style)
+ if style == nil {
+ return fmt.Errorf("unknown style: %s", params.Style)
+ }
+
+ formatterOptions := []html.Option{
+ html.WithLineNumbers(true),
+ html.WithLinkableLineNumbers(true, "L"),
+ html.WithClasses(true),
+ html.WithCSSComments(false),
+ }
+
+ // Use a temporary formatter to render CSS once
+ if err := html.New(formatterOptions...).WriteCSS(&css, style); err != nil {
+ return err
+ }
+
+ dirsSet := links.BuildDirSet(files)
+ filesSet := links.BuildFileSet(files)
+
+ // Bounded worker pool
+ workers := runtime.NumCPU()
+ if workers < 1 {
+ workers = 1
+ }
+
+ ctx, cancel := context.WithCancel(context.Background())
+ defer cancel()
+
+ jobs := make(chan git.Blob)
+ errCh := make(chan error, 1)
+ var wg sync.WaitGroup
+
+ p := progress_bar.NewProgressBar("blobs for "+params.Ref.String(), len(files))
+
+ workerFn := func() {
+ defer wg.Done()
+
+ // Per-worker instances
+ md := createMarkdown(params.Style)
+ formatter := html.New(formatterOptions...)
+
+ check := func(err error) bool {
+ if err != nil {
+ select {
+ case errCh <- err:
+ cancel()
+ default:
+ }
+ return true
+ }
+ return false
+ }
+
+ for {
+ select {
+ case <-ctx.Done():
+ return
+ case blob, ok := <-jobs:
+ if !ok {
+ return
+ }
+ func() {
+ var content string
+ data, isBin, err := git.BlobContent(params.Ref, blob.Path, params.RepoDir)
+ if check(err) {
+ return
+ }
+
+ isImg := isImage(blob.Path)
+ if !isBin {
+ content = string(data)
+ }
+
+ outPath := filepath.Join(params.OutputDir, "blob", params.Ref.DirName(), blob.Path) + ".html"
+ if err := os.MkdirAll(filepath.Dir(outPath), 0o755); check(err) {
+ return
+ }
+
+ f, err := os.Create(outPath)
+ if check(err) {
+ return
+ }
+ defer func() {
+ _ = f.Close()
+ }()
+
+ depth := 0
+ if strings.Contains(blob.Path, "/") {
+ depth = len(strings.Split(blob.Path, "/")) - 1
+ }
+ rootHref := strings.Repeat("../", depth+2)
+
+ if isMarkdown(blob.Path) {
+ var b bytes.Buffer
+ if err := md.Convert([]byte(content), &b); check(err) {
+ return
+ }
+
+ contentHTML := links.Resolve(
+ b.String(),
+ blob.Path,
+ rootHref,
+ params.Ref.DirName(),
+ dirsSet,
+ filesSet,
+ )
+
+ err = templates.MarkdownTemplate.ExecuteTemplate(f, "layout.gohtml", templates.MarkdownParams{
+ LayoutParams: templates.LayoutParams{
+ Title: fmt.Sprintf("%s/%s at %s", params.Name, blob.Path, params.Ref),
+ Dark: params.Dark,
+ CSSMarkdown: cssMarkdown(params.Dark),
+ Name: params.Name,
+ RootHref: rootHref,
+ CurrentRefDir: params.Ref.DirName(),
+ Selected: "code",
+ },
+ HeaderParams: templates.HeaderParams{
+ Ref: params.Ref,
+ Breadcrumbs: breadcrumbs(params.Name, blob.Path, true),
+ },
+ Blob: blob,
+ Content: template.HTML(contentHTML),
+ })
+ if check(err) {
+ return
+ }
+
+ } else {
+
+ var contentHTML template.HTML
+ if !isBin {
+ var b bytes.Buffer
+ lx := lexers.Match(blob.Path)
+ if lx == nil {
+ lx = lexers.Fallback
+ }
+ iterator, _ := lx.Tokenise(nil, content)
+ if err := formatter.Format(&b, style, iterator); check(err) {
+ return
+ }
+ contentHTML = template.HTML(b.String())
+
+ } else if isImg {
+
+ rawPath := filepath.Join(params.OutputDir, "raw", params.Ref.DirName(), blob.Path)
+ if err := os.MkdirAll(filepath.Dir(rawPath), 0o755); check(err) {
+ return
+ }
+
+ rf, err := os.Create(rawPath)
+ if check(err) {
+ return
+ }
+ defer func() {
+ _ = rf.Close()
+ }()
+
+ if _, err := rf.Write(data); check(err) {
+ return
+ }
+
+ relativeRawPath := filepath.Join(rootHref, "raw", params.Ref.DirName(), blob.Path)
+ contentHTML = template.HTML(fmt.Sprintf(`<img src="%s" alt="%s" />`, relativeRawPath, blob.FileName))
+ }
+
+ err = templates.BlobTemplate.ExecuteTemplate(f, "layout.gohtml", templates.BlobParams{
+ LayoutParams: templates.LayoutParams{
+ Title: fmt.Sprintf("%s/%s at %s", params.Name, blob.Path, params.Ref),
+ Dark: params.Dark,
+ Name: params.Name,
+ RootHref: rootHref,
+ CurrentRefDir: params.Ref.DirName(),
+ Selected: "code",
+ },
+ HeaderParams: templates.HeaderParams{
+ Ref: params.Ref,
+ Breadcrumbs: breadcrumbs(params.Name, blob.Path, true),
+ },
+ CSS: template.CSS(css.String()),
+ Blob: blob,
+ IsBinary: isBin,
+ IsImage: isImg,
+ Content: contentHTML,
+ })
+ if check(err) {
+ return
+ }
+ }
+ }()
+
+ p.Inc()
+ }
+ }
+ }
+
+ // Start workers
+ wg.Add(workers)
+ for i := 0; i < workers; i++ {
+ go workerFn()
+ }
+
+ // Feed jobs
+ go func() {
+ defer close(jobs)
+ for _, b := range files {
+ select {
+ case <-ctx.Done():
+ return
+ case jobs <- b:
+ }
+ }
+ }()
+
+ // Wait for workers
+ doneCh := make(chan struct{})
+ go func() {
+ wg.Wait()
+ close(doneCh)
+ }()
+
+ var runErr error
+ select {
+ case runErr = <-errCh:
+ // error occurred, wait workers to finish
+ <-doneCh
+ case <-doneCh:
+ }
+
+ p.Done()
+ return runErr
+}
internal/generator/branches.go
@@ -0,0 +1,63 @@
+package generator
+
+import (
+ "fmt"
+ "os"
+ "path/filepath"
+ "sort"
+
+ "mokhan.ca/antonmedv/gitmal/pkg/git"
+ "mokhan.ca/antonmedv/gitmal/pkg/templates"
+)
+
+func GenerateBranches(branches []git.Ref, defaultBranch string, params Params) error {
+ outDir := params.OutputDir
+ if err := os.MkdirAll(outDir, 0o755); err != nil {
+ return err
+ }
+
+ entries := make([]templates.BranchEntry, 0, len(branches))
+ for _, b := range branches {
+ entries = append(entries, templates.BranchEntry{
+ Name: b.String(),
+ Href: filepath.ToSlash(filepath.Join("blob", b.DirName()) + "/index.html"),
+ IsDefault: b.String() == defaultBranch,
+ CommitsHref: filepath.ToSlash(filepath.Join("commits", b.DirName(), "index.html")),
+ })
+ }
+
+ // Ensure default branch is shown at the top of the list.
+ // Keep remaining branches sorted alphabetically for determinism.
+ sort.SliceStable(entries, func(i, j int) bool {
+ if entries[i].IsDefault != entries[j].IsDefault {
+ return entries[i].IsDefault && !entries[j].IsDefault
+ }
+ return entries[i].Name < entries[j].Name
+ })
+
+ f, err := os.Create(filepath.Join(outDir, "branches.html"))
+ if err != nil {
+ return err
+ }
+ defer f.Close()
+
+ // RootHref from root page is just ./
+ rootHref := "./"
+
+ err = templates.BranchesTemplate.ExecuteTemplate(f, "layout.gohtml", templates.BranchesParams{
+ LayoutParams: templates.LayoutParams{
+ Title: fmt.Sprintf("Branches %s %s", Dot, params.Name),
+ Name: params.Name,
+ Dark: params.Dark,
+ RootHref: rootHref,
+ CurrentRefDir: params.DefaultRef.DirName(),
+ Selected: "branches",
+ },
+ Branches: entries,
+ })
+ if err != nil {
+ return err
+ }
+
+ return nil
+}
internal/generator/branches_json.go
@@ -0,0 +1,43 @@
+package generator
+
+import (
+ "encoding/json"
+ "os"
+ "path/filepath"
+
+ "mokhan.ca/antonmedv/gitmal/pkg/git"
+)
+
+type BranchJSON struct {
+ Name string `json:"name"`
+ Commit BranchHead `json:"commit"`
+}
+
+type BranchHead struct {
+ SHA string `json:"sha"`
+}
+
+func GenerateBranchesJSON(branches []git.Ref, commitsFor map[git.Ref][]git.Commit, params Params) error {
+ list := make([]BranchJSON, 0, len(branches))
+ for _, branch := range branches {
+ commits := commitsFor[branch]
+ var sha string
+ if len(commits) > 0 {
+ sha = commits[0].Hash
+ }
+ list = append(list, BranchJSON{
+ Name: branch.String(),
+ Commit: BranchHead{SHA: sha},
+ })
+ }
+
+ outPath := filepath.Join(params.OutputDir, "branches.json")
+ f, err := os.Create(outPath)
+ if err != nil {
+ return err
+ }
+ defer f.Close()
+ encoder := json.NewEncoder(f)
+ encoder.SetIndent("", " ")
+ return encoder.Encode(list)
+}
internal/generator/commit.go
@@ -0,0 +1,335 @@
+package generator
+
+import (
+ "bytes"
+ "context"
+ "fmt"
+ "html/template"
+ "os"
+ "path/filepath"
+ "runtime"
+ "sort"
+ "strings"
+ "sync"
+
+ "github.com/alecthomas/chroma/v2"
+ "github.com/alecthomas/chroma/v2/formatters/html"
+ "github.com/alecthomas/chroma/v2/lexers"
+ "github.com/alecthomas/chroma/v2/styles"
+
+ "mokhan.ca/antonmedv/gitmal/pkg/git"
+ "mokhan.ca/antonmedv/gitmal/pkg/gitdiff"
+ "mokhan.ca/antonmedv/gitmal/pkg/progress_bar"
+ "mokhan.ca/antonmedv/gitmal/pkg/templates"
+)
+
+func GenerateCommits(commits map[string]git.Commit, params Params) error {
+ outDir := filepath.Join(params.OutputDir, "commit")
+ if err := os.MkdirAll(outDir, 0o755); err != nil {
+ return err
+ }
+
+ list := make([]git.Commit, 0, len(commits))
+ for _, c := range commits {
+ list = append(list, c)
+ }
+
+ workers := runtime.NumCPU()
+ if workers < 1 {
+ workers = 1
+ }
+
+ ctx, cancel := context.WithCancel(context.Background())
+ defer cancel()
+
+ jobs := make(chan git.Commit)
+ errCh := make(chan error, 1)
+ var wg sync.WaitGroup
+
+ p := progress_bar.NewProgressBar("commits", len(list))
+
+ workerFn := func() {
+ defer wg.Done()
+ for {
+ select {
+ case <-ctx.Done():
+ return
+ case c, ok := <-jobs:
+ if !ok {
+ return
+ }
+ if err := generateCommitPage(c, params); err != nil {
+ select {
+ case errCh <- err:
+ cancel()
+ default:
+ }
+ return
+ }
+ p.Inc()
+ }
+ }
+ }
+
+ wg.Add(workers)
+ for i := 0; i < workers; i++ {
+ go workerFn()
+ }
+
+ go func() {
+ defer close(jobs)
+ for _, c := range list {
+ select {
+ case <-ctx.Done():
+ return
+ case jobs <- c:
+ }
+ }
+ }()
+
+ done := make(chan struct{})
+ go func() {
+ wg.Wait()
+ close(done)
+ }()
+
+ var err error
+ select {
+ case err = <-errCh:
+ cancel()
+ <-done
+ case <-done:
+ }
+
+ p.Done()
+ return err
+}
+
+func generateCommitPage(commit git.Commit, params Params) error {
+ diff, err := git.CommitDiff(commit.Hash, params.RepoDir)
+ if err != nil {
+ return err
+ }
+
+ files, _, err := gitdiff.Parse(strings.NewReader(diff))
+ if err != nil {
+ return err
+ }
+
+ style := styles.Get(params.Style)
+ if style == nil {
+ return fmt.Errorf("unknown style: %s", params.Style)
+ }
+
+ formatter := html.New(
+ html.WithClasses(true),
+ html.WithCSSComments(false),
+ html.WithCustomCSS(map[chroma.TokenType]string{
+ chroma.GenericInserted: "display: block;",
+ chroma.GenericDeleted: "display: block;",
+ }),
+ )
+
+ var cssBuf bytes.Buffer
+ if err := formatter.WriteCSS(&cssBuf, style); err != nil {
+ return err
+ }
+
+ lexer := lexers.Get("diff")
+ if lexer == nil {
+ return fmt.Errorf("failed to get lexer for diff")
+ }
+
+ outPath := filepath.Join(params.OutputDir, "commit", commit.Hash+".html")
+
+ f, err := os.Create(outPath)
+ if err != nil {
+ return err
+ }
+ rootHref := filepath.ToSlash("../")
+
+ fileTree := buildFileTree(files)
+
+ // Create a stable order for files that matches the file tree traversal
+ // so that the per-file views appear in the same order as the sidebar tree.
+ fileOrder := make(map[string]int)
+ {
+ // Preorder traversal (dirs first, then files), respecting sortNode ordering
+ var idx int
+ var walk func(nodes []*templates.FileTree)
+ walk = func(nodes []*templates.FileTree) {
+ for _, n := range nodes {
+ if n.IsDir {
+ // Children are already sorted by sortNode
+ walk(n.Children)
+ continue
+ }
+ if n.Path == "" {
+ continue
+ }
+ if _, ok := fileOrder[n.Path]; !ok {
+ fileOrder[n.Path] = idx
+ idx++
+ }
+ }
+ }
+ walk(fileTree)
+ }
+
+ // Prepare per-file views
+ var filesViews []templates.FileView
+ for _, f := range files {
+ path := f.NewName
+ if f.IsDelete {
+ path = f.OldName
+ }
+ if path == "" {
+ continue
+ }
+
+ var fileDiff strings.Builder
+ for _, frag := range f.TextFragments {
+ fileDiff.WriteString(frag.String())
+ }
+
+ it, err := lexer.Tokenise(nil, fileDiff.String())
+ if err != nil {
+ return err
+ }
+ var buf bytes.Buffer
+ if err := formatter.Format(&buf, style, it); err != nil {
+ return err
+ }
+
+ filesViews = append(filesViews, templates.FileView{
+ Path: path,
+ OldName: f.OldName,
+ NewName: f.NewName,
+ IsNew: f.IsNew,
+ IsDelete: f.IsDelete,
+ IsRename: f.IsRename,
+ IsBinary: f.IsBinary,
+ HasChanges: f.TextFragments != nil,
+ HTML: template.HTML(buf.String()),
+ })
+ }
+
+ // Sort file views to match the file tree order. If for some reason a path
+ // is missing in the order map (shouldn't happen), fall back to case-insensitive
+ // alphabetical order by full path.
+ sort.Slice(filesViews, func(i, j int) bool {
+ oi, iok := fileOrder[filesViews[i].Path]
+ oj, jok := fileOrder[filesViews[j].Path]
+ if iok && jok {
+ return oi < oj
+ }
+ if iok != jok {
+ return iok // known order first
+ }
+ return filesViews[i].Path < filesViews[j].Path
+ })
+
+ currentRef := params.DefaultRef
+ if !commit.Branch.IsEmpty() {
+ currentRef = commit.Branch
+ }
+
+ err = templates.CommitTemplate.ExecuteTemplate(f, "layout.gohtml", templates.CommitParams{
+ LayoutParams: templates.LayoutParams{
+ Title: fmt.Sprintf("%s %s %s@%s", commit.Subject, Dot, params.Name, commit.ShortHash),
+ Name: params.Name,
+ Dark: params.Dark,
+ RootHref: rootHref,
+ CurrentRefDir: currentRef.DirName(),
+ Selected: "commits",
+ },
+ Commit: commit,
+ DiffCSS: template.CSS(cssBuf.String()),
+ FileTree: fileTree,
+ FileViews: filesViews,
+ })
+ if err != nil {
+ _ = f.Close()
+ return err
+ }
+ if err := f.Close(); err != nil {
+ return err
+ }
+ return nil
+}
+
+func buildFileTree(files []*gitdiff.File) []*templates.FileTree {
+ // Use a synthetic root (not rendered), collect top-level nodes in a map first.
+ root := &templates.FileTree{IsDir: true, Name: "", Path: "", Children: nil}
+
+ for _, f := range files {
+ path := f.NewName
+ if f.IsDelete {
+ path = f.OldName
+ }
+
+ path = filepath.ToSlash(strings.TrimPrefix(path, "./"))
+ if path == "" {
+ continue
+ }
+ parts := strings.Split(path, "/")
+
+ parent := root
+ accum := ""
+ if len(parts) > 1 {
+ for i := 0; i < len(parts)-1; i++ {
+ if accum == "" {
+ accum = parts[i]
+ } else {
+ accum = accum + "/" + parts[i]
+ }
+ parent = findOrCreateDir(parent, parts[i], accum)
+ }
+ }
+
+ fileName := parts[len(parts)-1]
+ node := &templates.FileTree{
+ Name: fileName,
+ Path: path,
+ IsDir: false,
+ IsNew: f.IsNew,
+ IsDelete: f.IsDelete,
+ IsRename: f.IsRename,
+ OldName: f.OldName,
+ NewName: f.NewName,
+ }
+ parent.Children = append(parent.Children, node)
+ }
+
+ sortNode(root)
+ return root.Children
+}
+
+func findOrCreateDir(parent *templates.FileTree, name, path string) *templates.FileTree {
+ for _, ch := range parent.Children {
+ if ch.IsDir && ch.Name == name {
+ return ch
+ }
+ }
+ node := &templates.FileTree{IsDir: true, Name: name, Path: path}
+ parent.Children = append(parent.Children, node)
+ return node
+}
+
+func sortNode(n *templates.FileTree) {
+ if len(n.Children) == 0 {
+ return
+ }
+ sort.Slice(n.Children, func(i, j int) bool {
+ a, b := n.Children[i], n.Children[j]
+ if a.IsDir != b.IsDir {
+ return a.IsDir && !b.IsDir // dirs first
+ }
+ return strings.ToLower(a.Name) < strings.ToLower(b.Name)
+ })
+ for _, ch := range n.Children {
+ if ch.IsDir {
+ sortNode(ch)
+ }
+ }
+}
internal/generator/commits_atom.go
@@ -0,0 +1,96 @@
+package generator
+
+import (
+ "encoding/xml"
+ "os"
+ "path/filepath"
+
+ "mokhan.ca/antonmedv/gitmal/pkg/git"
+)
+
+type AtomFeed struct {
+ XMLName xml.Name `xml:"feed"`
+ XMLNS string `xml:"xmlns,attr"`
+ ID string `xml:"id"`
+ Title string `xml:"title"`
+ Updated string `xml:"updated"`
+ Link []AtomLink `xml:"link"`
+ Entries []AtomEntry `xml:"entry"`
+}
+
+type AtomLink struct {
+ Rel string `xml:"rel,attr"`
+ Type string `xml:"type,attr"`
+ Href string `xml:"href,attr"`
+}
+
+type AtomEntry struct {
+ ID string `xml:"id"`
+ Title string `xml:"title"`
+ Updated string `xml:"updated"`
+ Author AtomAuthor `xml:"author"`
+ Content string `xml:"content"`
+ Link AtomLink `xml:"link"`
+}
+
+type AtomAuthor struct {
+ Name string `xml:"name"`
+ Email string `xml:"email,omitempty"`
+}
+
+func GenerateCommitsAtom(commits []git.Commit, params Params) error {
+ outDir := filepath.Join(params.OutputDir, "commits")
+ if err := os.MkdirAll(outDir, 0o755); err != nil {
+ return err
+ }
+
+ var updated string
+ if len(commits) > 0 {
+ updated = commits[0].Date.Format("2006-01-02T15:04:05Z")
+ }
+
+ entries := make([]AtomEntry, len(commits))
+ for i, c := range commits {
+ content := c.Subject
+ if c.Body != "" {
+ content = c.Subject + "\n\n" + c.Body
+ }
+ entries[i] = AtomEntry{
+ ID: "urn:sha:" + c.Hash,
+ Title: c.Subject,
+ Updated: c.Date.Format("2006-01-02T15:04:05Z"),
+ Author: AtomAuthor{Name: c.Author, Email: c.Email},
+ Content: content,
+ Link: AtomLink{
+ Rel: "alternate",
+ Type: "text/html",
+ Href: "commit/" + c.Hash + ".html",
+ },
+ }
+ }
+
+ feed := AtomFeed{
+ XMLNS: "http://www.w3.org/2005/Atom",
+ ID: "urn:gitmal:" + params.Name + ":" + params.Ref.String(),
+ Title: params.Name + " commits on " + params.Ref.String(),
+ Updated: updated,
+ Link: []AtomLink{
+ {Rel: "self", Type: "application/atom+xml", Href: "commits/" + params.Ref.DirName() + ".atom"},
+ },
+ Entries: entries,
+ }
+
+ outPath := filepath.Join(outDir, params.Ref.DirName()+".atom")
+ f, err := os.Create(outPath)
+ if err != nil {
+ return err
+ }
+ defer f.Close()
+
+ if _, err := f.WriteString(xml.Header); err != nil {
+ return err
+ }
+ encoder := xml.NewEncoder(f)
+ encoder.Indent("", " ")
+ return encoder.Encode(feed)
+}
internal/generator/commits_json.go
@@ -0,0 +1,71 @@
+package generator
+
+import (
+ "encoding/json"
+ "os"
+ "path/filepath"
+
+ "mokhan.ca/antonmedv/gitmal/pkg/git"
+)
+
+type CommitJSON struct {
+ SHA string `json:"sha"`
+ Commit CommitDetail `json:"commit"`
+ Parents []ParentRef `json:"parents"`
+}
+
+type CommitDetail struct {
+ Author PersonInfo `json:"author"`
+ Committer PersonInfo `json:"committer"`
+ Message string `json:"message"`
+}
+
+type PersonInfo struct {
+ Name string `json:"name"`
+ Email string `json:"email"`
+ Date string `json:"date"`
+}
+
+type ParentRef struct {
+ SHA string `json:"sha"`
+}
+
+func toCommitJSON(c git.Commit) CommitJSON {
+ message := c.Subject
+ if c.Body != "" {
+ message = c.Subject + "\n\n" + c.Body
+ }
+ parents := make([]ParentRef, len(c.Parents))
+ for i, p := range c.Parents {
+ parents[i] = ParentRef{SHA: p}
+ }
+ return CommitJSON{
+ SHA: c.Hash,
+ Commit: CommitDetail{
+ Author: PersonInfo{Name: c.Author, Email: c.Email, Date: c.Date.Format("2006-01-02T15:04:05Z")},
+ Committer: PersonInfo{Name: c.CommitterName, Email: c.CommitterEmail, Date: c.CommitterDate.Format("2006-01-02T15:04:05Z")},
+ Message: message,
+ },
+ Parents: parents,
+ }
+}
+
+func GenerateCommitsJSON(commits []git.Commit, params Params) error {
+ outDir := filepath.Join(params.OutputDir, "commits")
+ if err := os.MkdirAll(outDir, 0o755); err != nil {
+ return err
+ }
+ list := make([]CommitJSON, len(commits))
+ for i, c := range commits {
+ list[i] = toCommitJSON(c)
+ }
+ outPath := filepath.Join(outDir, params.Ref.DirName()+".json")
+ f, err := os.Create(outPath)
+ if err != nil {
+ return err
+ }
+ defer f.Close()
+ encoder := json.NewEncoder(f)
+ encoder.SetIndent("", " ")
+ return encoder.Encode(list)
+}
internal/generator/commits_list.go
@@ -0,0 +1,101 @@
+package generator
+
+import (
+ "fmt"
+ "os"
+ "path/filepath"
+ "slices"
+
+ "mokhan.ca/antonmedv/gitmal/pkg/git"
+ "mokhan.ca/antonmedv/gitmal/pkg/progress_bar"
+ "mokhan.ca/antonmedv/gitmal/pkg/templates"
+)
+
+const commitsPerPage = 100
+
+func GenerateLogForBranch(allCommits []git.Commit, params Params) error {
+ total := len(allCommits)
+ totalPages := (total + commitsPerPage - 1) / commitsPerPage
+
+ // RootHref from commits/<branch>/... => ../../
+ rootHref := "../../"
+ outBase := filepath.Join(params.OutputDir, "commits", params.Ref.DirName())
+ if err := os.MkdirAll(outBase, 0o755); err != nil {
+ return err
+ }
+
+ p := progress_bar.NewProgressBar("commits for "+params.Ref.String(), totalPages)
+
+ page := 1
+ for pageCommits := range slices.Chunk(allCommits, commitsPerPage) {
+ for i := range pageCommits {
+ pageCommits[i].Href = filepath.ToSlash(filepath.Join(rootHref, "commit", pageCommits[i].Hash+".html"))
+ }
+
+ fileName := "index.html"
+ if page > 1 {
+ fileName = fmt.Sprintf("page-%d.html", page)
+ }
+
+ outPath := filepath.Join(outBase, fileName)
+ f, err := os.Create(outPath)
+ if err != nil {
+ return err
+ }
+
+ var prevHref, nextHref, firstHref, lastHref string
+ if page > 1 {
+ if page-1 == 1 {
+ prevHref = "index.html"
+ } else {
+ prevHref = fmt.Sprintf("page-%d.html", page-1)
+ }
+ firstHref = "index.html"
+ }
+
+ if page < totalPages {
+ nextHref = fmt.Sprintf("page-%d.html", page+1)
+ if totalPages > 1 {
+ lastHref = fmt.Sprintf("page-%d.html", totalPages)
+ }
+ }
+
+ err = templates.CommitsListTemplate.ExecuteTemplate(f, "layout.gohtml", templates.CommitsListParams{
+ LayoutParams: templates.LayoutParams{
+ Title: fmt.Sprintf("Commits %s %s", Dot, params.Name),
+ Name: params.Name,
+ Dark: params.Dark,
+ RootHref: rootHref,
+ CurrentRefDir: params.Ref.DirName(),
+ Selected: "commits",
+ },
+ HeaderParams: templates.HeaderParams{
+ Header: "Commits",
+ },
+ Ref: params.Ref,
+ Commits: pageCommits,
+ Page: templates.Pagination{
+ Page: page,
+ TotalPages: totalPages,
+ PrevHref: prevHref,
+ NextHref: nextHref,
+ FirstHref: firstHref,
+ LastHref: lastHref,
+ },
+ })
+ if err != nil {
+ _ = f.Close()
+ return err
+ }
+ if err := f.Close(); err != nil {
+ return err
+ }
+
+ page++
+ p.Inc()
+ }
+
+ p.Done()
+
+ return nil
+}
internal/generator/index.go
@@ -0,0 +1,129 @@
+package generator
+
+import (
+ "os"
+ "path/filepath"
+ "sort"
+ "strings"
+
+ "mokhan.ca/antonmedv/gitmal/pkg/git"
+ "mokhan.ca/antonmedv/gitmal/pkg/links"
+ "mokhan.ca/antonmedv/gitmal/pkg/templates"
+)
+
+func GenerateIndex(files []git.Blob, params Params) error {
+ // Build directory indexes
+ type dirInfo struct {
+ subdirs map[string]struct{}
+ files []git.Blob
+ }
+ dirs := map[string]*dirInfo{}
+
+ ensureDir := func(p string) *dirInfo {
+ if di, ok := dirs[p]; ok {
+ return di
+ }
+ di := &dirInfo{subdirs: map[string]struct{}{}, files: []git.Blob{}}
+ dirs[p] = di
+ return di
+ }
+
+ dirsSet := links.BuildDirSet(files)
+ filesSet := links.BuildFileSet(files)
+
+ for _, b := range files {
+ // Normalize to forward slash paths for URL construction
+ p := b.Path
+ parts := strings.Split(p, "/")
+ // walk directories
+ cur := ""
+ for i := 0; i < len(parts)-1; i++ {
+ child := parts[i]
+ ensureDir(cur).subdirs[child] = struct{}{}
+ if cur == "" {
+ cur = child
+ } else {
+ cur = cur + "/" + child
+ }
+ ensureDir(cur) // ensure it exists
+ }
+ ensureDir(cur).files = append(ensureDir(cur).files, b)
+ }
+
+ di := dirs[""] // root
+
+ outDir := params.OutputDir
+ if err := os.MkdirAll(outDir, 0o755); err != nil {
+ return err
+ }
+
+ // Build entries
+ dirNames := make([]string, 0, len(di.subdirs))
+ for name := range di.subdirs {
+ dirNames = append(dirNames, name)
+ }
+ // Sort for stable output
+ sort.Strings(dirNames)
+ sort.Slice(di.files, func(i, j int) bool {
+ return di.files[i].FileName < di.files[j].FileName
+ })
+
+ subdirEntries := make([]templates.ListEntry, 0, len(dirNames))
+ for _, name := range dirNames {
+ subdirEntries = append(subdirEntries, templates.ListEntry{
+ Name: name + "/",
+ Href: "blob/" + params.Ref.DirName() + "/" + name + "/index.html",
+ IsDir: true,
+ })
+ }
+
+ fileEntries := make([]templates.ListEntry, 0, len(di.files))
+ for _, b := range di.files {
+ fileEntries = append(fileEntries, templates.ListEntry{
+ Name: b.FileName + "",
+ Href: "blob/" + params.Ref.DirName() + "/" + b.FileName + ".html",
+ Mode: b.Mode,
+ Size: humanizeSize(b.Size),
+ })
+ }
+
+ // Title and current path label
+ title := params.Name
+
+ f, err := os.Create(filepath.Join(outDir, "index.html"))
+ if err != nil {
+ return err
+ }
+
+ rootHref := "./"
+ readmeHTML := readme(di.files, dirsSet, filesSet, params, rootHref)
+
+ err = templates.ListTemplate.ExecuteTemplate(f, "layout.gohtml", templates.ListParams{
+ LayoutParams: templates.LayoutParams{
+ Title: title,
+ Name: params.Name,
+ Dark: params.Dark,
+ CSSMarkdown: cssMarkdown(params.Dark),
+ RootHref: rootHref,
+ CurrentRefDir: params.Ref.DirName(),
+ Selected: "code",
+ },
+ HeaderParams: templates.HeaderParams{
+ Ref: params.Ref,
+ Breadcrumbs: breadcrumbs(params.Name, "", false),
+ },
+ Ref: params.Ref,
+ Dirs: subdirEntries,
+ Files: fileEntries,
+ Readme: readmeHTML,
+ })
+ if err != nil {
+ _ = f.Close()
+ return err
+ }
+ if err := f.Close(); err != nil {
+ return err
+ }
+
+ return nil
+}
internal/generator/list.go
@@ -0,0 +1,249 @@
+package generator
+
+import (
+ "context"
+ "fmt"
+ "html/template"
+ "os"
+ "path/filepath"
+ "runtime"
+ "sort"
+ "strings"
+ "sync"
+
+ "mokhan.ca/antonmedv/gitmal/pkg/git"
+ "mokhan.ca/antonmedv/gitmal/pkg/links"
+ "mokhan.ca/antonmedv/gitmal/pkg/progress_bar"
+ "mokhan.ca/antonmedv/gitmal/pkg/templates"
+)
+
+func GenerateLists(files []git.Blob, params Params) error {
+ // Build directory indexes
+ type dirInfo struct {
+ subdirs map[string]struct{}
+ files []git.Blob
+ }
+ dirs := map[string]*dirInfo{}
+
+ ensureDir := func(p string) *dirInfo {
+ if di, ok := dirs[p]; ok {
+ return di
+ }
+ di := &dirInfo{subdirs: map[string]struct{}{}, files: []git.Blob{}}
+ dirs[p] = di
+ return di
+ }
+
+ dirsSet := links.BuildDirSet(files)
+ filesSet := links.BuildFileSet(files)
+
+ for _, b := range files {
+ // Normalize to forward slash paths for URL construction
+ p := b.Path
+ parts := strings.Split(p, "/")
+ // walk directories
+ cur := ""
+ for i := 0; i < len(parts)-1; i++ {
+ child := parts[i]
+ ensureDir(cur).subdirs[child] = struct{}{}
+ if cur == "" {
+ cur = child
+ } else {
+ cur = cur + "/" + child
+ }
+ ensureDir(cur) // ensure it exists
+ }
+ ensureDir(cur).files = append(ensureDir(cur).files, b)
+ }
+
+ // Prepare jobs slice to have stable iteration order (optional)
+ type job struct {
+ dirPath string
+ di *dirInfo
+ }
+ jobsSlice := make([]job, 0, len(dirs))
+ for dp, di := range dirs {
+ jobsSlice = append(jobsSlice, job{dirPath: dp, di: di})
+ }
+ // Sort by dirPath for determinism
+ sort.Slice(jobsSlice, func(i, j int) bool { return jobsSlice[i].dirPath < jobsSlice[j].dirPath })
+
+ // Worker pool similar to generateBlobs
+ workers := runtime.NumCPU()
+ if workers < 1 {
+ workers = 1
+ }
+
+ ctx, cancel := context.WithCancel(context.Background())
+ defer cancel()
+
+ jobCh := make(chan job)
+ errCh := make(chan error, 1)
+ var wg sync.WaitGroup
+
+ p := progress_bar.NewProgressBar("lists for "+params.Ref.String(), len(jobsSlice))
+
+ check := func(err error) bool {
+ if err != nil {
+ select {
+ case errCh <- err:
+ cancel()
+ default:
+ }
+ return true
+ }
+ return false
+ }
+
+ workerFn := func() {
+ defer wg.Done()
+ for {
+ select {
+ case <-ctx.Done():
+ return
+ case jb, ok := <-jobCh:
+ if !ok {
+ return
+ }
+ func() {
+ dirPath := jb.dirPath
+ di := jb.di
+
+ outDir := filepath.Join(params.OutputDir, "blob", params.Ref.DirName())
+ if dirPath != "" {
+ // convert forward slash path into OS path
+ outDir = filepath.Join(outDir, filepath.FromSlash(dirPath))
+ }
+ if err := os.MkdirAll(outDir, 0o755); check(err) {
+ return
+ }
+
+ // Build entries
+ dirNames := make([]string, 0, len(di.subdirs))
+ for name := range di.subdirs {
+ dirNames = append(dirNames, name)
+ }
+
+ // Sort for stable output
+ sort.Strings(dirNames)
+ sort.Slice(di.files, func(i, j int) bool {
+ return di.files[i].FileName < di.files[j].FileName
+ })
+
+ subdirEntries := make([]templates.ListEntry, 0, len(dirNames))
+ for _, name := range dirNames {
+ subdirEntries = append(subdirEntries, templates.ListEntry{
+ Name: name + "/",
+ Href: name + "/index.html",
+ IsDir: true,
+ })
+ }
+
+ fileEntries := make([]templates.ListEntry, 0, len(di.files))
+ for _, b := range di.files {
+ fileEntries = append(fileEntries, templates.ListEntry{
+ Name: b.FileName + "",
+ Href: b.FileName + ".html",
+ Mode: b.Mode,
+ Size: humanizeSize(b.Size),
+ })
+ }
+
+ // Title and current path label
+ title := fmt.Sprintf("%s/%s at %s", params.Name, dirPath, params.Ref)
+ if dirPath == "" {
+ title = fmt.Sprintf("%s at %s", params.Name, params.Ref)
+ }
+
+ f, err := os.Create(filepath.Join(outDir, "index.html"))
+ if check(err) {
+ return
+ }
+ defer func() {
+ _ = f.Close()
+ }()
+
+ // parent link is not shown for root
+ parent := "../index.html"
+ if dirPath == "" {
+ parent = ""
+ }
+
+ depth := 0
+ if dirPath != "" {
+ depth = len(strings.Split(dirPath, "/"))
+ }
+ rootHref := strings.Repeat("../", depth+2)
+
+ readmeHTML := readme(di.files, dirsSet, filesSet, params, rootHref)
+ var CSSMarkdown template.CSS
+ if readmeHTML != "" {
+ CSSMarkdown = cssMarkdown(params.Dark)
+ }
+
+ err = templates.ListTemplate.ExecuteTemplate(f, "layout.gohtml", templates.ListParams{
+ LayoutParams: templates.LayoutParams{
+ Title: title,
+ Name: params.Name,
+ Dark: params.Dark,
+ CSSMarkdown: CSSMarkdown,
+ RootHref: rootHref,
+ CurrentRefDir: params.Ref.DirName(),
+ Selected: "code",
+ },
+ HeaderParams: templates.HeaderParams{
+ Ref: params.Ref,
+ Breadcrumbs: breadcrumbs(params.Name, dirPath, false),
+ },
+ Ref: params.Ref,
+ ParentHref: parent,
+ Dirs: subdirEntries,
+ Files: fileEntries,
+ Readme: readmeHTML,
+ })
+ if check(err) {
+ return
+ }
+ }()
+
+ p.Inc()
+ }
+ }
+ }
+
+ // Start workers
+ wg.Add(workers)
+ for i := 0; i < workers; i++ {
+ go workerFn()
+ }
+
+ // Feed jobs
+ go func() {
+ defer close(jobCh)
+ for _, jb := range jobsSlice {
+ select {
+ case <-ctx.Done():
+ return
+ case jobCh <- jb:
+ }
+ }
+ }()
+
+ // Wait for workers or first error
+ doneCh := make(chan struct{})
+ go func() {
+ wg.Wait()
+ close(doneCh)
+ }()
+
+ var runErr error
+ select {
+ case runErr = <-errCh:
+ <-doneCh
+ case <-doneCh:
+ }
+
+ p.Done()
+
+ return runErr
+}
internal/generator/markdown.go
@@ -0,0 +1,38 @@
+package generator
+
+import (
+ "html/template"
+
+ "github.com/yuin/goldmark"
+ highlighting "github.com/yuin/goldmark-highlighting/v2"
+ "github.com/yuin/goldmark/extension"
+ "github.com/yuin/goldmark/parser"
+ gmhtml "github.com/yuin/goldmark/renderer/html"
+
+ "mokhan.ca/antonmedv/gitmal/pkg/templates"
+)
+
+func createMarkdown(style string) goldmark.Markdown {
+ return goldmark.New(
+ goldmark.WithExtensions(
+ extension.GFM,
+ extension.Typographer,
+ highlighting.NewHighlighting(
+ highlighting.WithStyle(style),
+ ),
+ ),
+ goldmark.WithParserOptions(
+ parser.WithAutoHeadingID(),
+ ),
+ goldmark.WithRendererOptions(
+ gmhtml.WithUnsafe(),
+ ),
+ )
+}
+
+func cssMarkdown(dark bool) template.CSS {
+ if dark {
+ return template.CSS(templates.CSSMarkdownDark)
+ }
+ return template.CSS(templates.CSSMarkdownLight)
+}
internal/generator/params.go
@@ -0,0 +1,14 @@
+package generator
+
+import "mokhan.ca/antonmedv/gitmal/pkg/git"
+
+type Params struct {
+ Owner string
+ Name string
+ RepoDir string
+ Ref git.Ref
+ OutputDir string
+ Style string
+ Dark bool
+ DefaultRef git.Ref
+}
internal/generator/post_process.go
@@ -0,0 +1,139 @@
+package generator
+
+import (
+ "bytes"
+ "compress/gzip"
+ "io"
+ "io/fs"
+ "os"
+ "path/filepath"
+ "runtime"
+ "strings"
+ "sync"
+
+ "github.com/tdewolff/minify/v2"
+ "github.com/tdewolff/minify/v2/css"
+ "github.com/tdewolff/minify/v2/html"
+ "github.com/tdewolff/minify/v2/svg"
+
+ "mokhan.ca/antonmedv/gitmal/pkg/progress_bar"
+)
+
+func PostProcessHTML(root string, doMinify bool, doGzip bool) error {
+ // 1) Collect all HTML files first
+ var files []string
+ if err := filepath.WalkDir(root, func(path string, d fs.DirEntry, err error) error {
+ if err != nil {
+ return err
+ }
+ if d.IsDir() {
+ return nil
+ }
+ if strings.HasSuffix(d.Name(), ".html") {
+ files = append(files, path)
+ }
+ return nil
+ }); err != nil {
+ return err
+ }
+
+ if len(files) == 0 {
+ return nil
+ }
+
+ // 2) Setup progress bar
+ labels := []string{}
+ if doMinify {
+ labels = append(labels, "minify")
+ }
+ if doGzip {
+ labels = append(labels, "gzip")
+ }
+ pb := progress_bar.NewProgressBar(strings.Join(labels, " + "), len(files))
+ defer pb.Done()
+
+ // 3) Worker pool
+ workers := runtime.NumCPU()
+ if workers < 1 {
+ workers = 1
+ }
+ jobs := make(chan string, workers*2)
+ var wg sync.WaitGroup
+ var mu sync.Mutex
+ var firstErr error
+
+ workerFn := func() {
+ defer wg.Done()
+ var m *minify.M
+ if doMinify {
+ m = minify.New()
+ m.AddFunc("text/html", html.Minify)
+ m.AddFunc("text/css", css.Minify)
+ m.AddFunc("image/svg+xml", svg.Minify)
+ }
+ for path := range jobs {
+ data, err := os.ReadFile(path)
+ if err == nil && doMinify {
+ if md, e := m.Bytes("text/html", data); e == nil {
+ data = md
+ } else {
+ err = e
+ }
+ }
+ if err == nil {
+ if doGzip {
+ // write to file.html.gz
+ gzPath := path + ".gz"
+ if e := writeGzip(gzPath, data); e != nil {
+ err = e
+ } else if e := os.Remove(path); e != nil { // remove original .html
+ err = e
+ }
+ } else {
+ if e := os.WriteFile(path, data, 0o644); e != nil {
+ err = e
+ }
+ }
+ }
+
+ if err != nil {
+ mu.Lock()
+ if firstErr == nil {
+ firstErr = err
+ }
+ mu.Unlock()
+ }
+ pb.Inc()
+ }
+ }
+
+ wg.Add(workers)
+ for i := 0; i < workers; i++ {
+ go workerFn()
+ }
+ for _, f := range files {
+ jobs <- f
+ }
+ close(jobs)
+ wg.Wait()
+
+ return firstErr
+}
+
+func writeGzip(path string, data []byte) error {
+ f, err := os.Create(path)
+ if err != nil {
+ return err
+ }
+ defer func() { _ = f.Close() }()
+ gw := gzip.NewWriter(f)
+ gw.Name = filepath.Base(strings.TrimSuffix(path, ".gz"))
+ if _, err := io.Copy(gw, bytes.NewReader(data)); err != nil {
+ _ = gw.Close()
+ return err
+ }
+ if err := gw.Close(); err != nil {
+ return err
+ }
+ return nil
+}
internal/generator/readme.go
@@ -0,0 +1,45 @@
+package generator
+
+import (
+ "bytes"
+ "html/template"
+ "strings"
+
+ "mokhan.ca/antonmedv/gitmal/pkg/git"
+ "mokhan.ca/antonmedv/gitmal/pkg/links"
+)
+
+func readme(files []git.Blob, dirsSet, filesSet links.Set, params Params, rootHref string) template.HTML {
+ var readmeHTML template.HTML
+
+ md := createMarkdown(params.Style)
+
+ for _, b := range files {
+ nameLower := strings.ToLower(b.FileName)
+ if strings.HasPrefix(nameLower, "readme") && isMarkdown(b.Path) {
+ data, isBin, err := git.BlobContent(params.Ref, b.Path, params.RepoDir)
+ if err != nil || isBin {
+ break
+ }
+ var buf bytes.Buffer
+ if err := md.Convert(data, &buf); err != nil {
+ break
+ }
+
+ // Fix links/images relative to README location
+ htmlStr := links.Resolve(
+ buf.String(),
+ b.Path,
+ rootHref,
+ params.Ref.DirName(),
+ dirsSet,
+ filesSet,
+ )
+
+ readmeHTML = template.HTML(htmlStr)
+ break
+ }
+ }
+
+ return readmeHTML
+}
internal/generator/themes.go
@@ -0,0 +1,156 @@
+package generator
+
+import (
+ "html/template"
+ "net"
+ "net/http"
+ "sort"
+ "strings"
+
+ "github.com/alecthomas/chroma/v2/formatters/html"
+ "github.com/alecthomas/chroma/v2/lexers"
+ "github.com/alecthomas/chroma/v2/styles"
+
+ "mokhan.ca/antonmedv/gitmal/pkg/templates"
+)
+
+var ThemeStyles = map[string]string{
+ "abap": "light",
+ "algol": "light",
+ "arduino": "light",
+ "autumn": "light",
+ "average": "dark",
+ "base16-snazzy": "dark",
+ "borland": "light",
+ "bw": "light",
+ "catppuccin-frappe": "dark",
+ "catppuccin-latte": "light",
+ "catppuccin-macchiato": "dark",
+ "catppuccin-mocha": "dark",
+ "colorful": "light",
+ "doom-one": "dark",
+ "doom-one2": "dark",
+ "dracula": "dark",
+ "emacs": "light",
+ "evergarden": "dark",
+ "friendly": "light",
+ "fruity": "dark",
+ "github-dark": "dark",
+ "github": "light",
+ "gruvbox-light": "light",
+ "gruvbox": "dark",
+ "hrdark": "dark",
+ "igor": "light",
+ "lovelace": "light",
+ "manni": "light",
+ "modus-operandi": "light",
+ "modus-vivendi": "dark",
+ "monokai": "dark",
+ "monokailight": "light",
+ "murphy": "light",
+ "native": "dark",
+ "nord": "dark",
+ "nordic": "dark",
+ "onedark": "dark",
+ "onesenterprise": "dark",
+ "paraiso-dark": "dark",
+ "paraiso-light": "light",
+ "pastie": "light",
+ "perldoc": "light",
+ "pygments": "light",
+ "rainbow_dash": "light",
+ "rose-pine-dawn": "light",
+ "rose-pine-moon": "dark",
+ "rose-pine": "dark",
+ "rpgle": "dark",
+ "rrt": "dark",
+ "solarized-dark": "dark",
+ "solarized-dark256": "dark",
+ "solarized-light": "light",
+ "swapoff": "dark",
+ "tango": "light",
+ "tokyonight-day": "light",
+ "tokyonight-moon": "dark",
+ "tokyonight-night": "dark",
+ "tokyonight-storm": "dark",
+ "trac": "light",
+ "vim": "dark",
+ "vs": "light",
+ "vulcan": "dark",
+ "witchhazel": "dark",
+ "xcode-dark": "dark",
+ "xcode": "light",
+}
+
+func PreviewThemes() {
+ handler := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ names := make([]string, 0, len(ThemeStyles))
+ for name := range ThemeStyles {
+ names = append(names, name)
+ }
+ sort.Strings(names)
+
+ sampleLang := "javascript"
+ sampleCode := `function fib(n) {
+ if (n <= 1) {
+ return n;
+ }
+ return fib(n - 1) + fib(n - 2);
+}
+
+// Print n Fibonacci numbers.
+const n = 10;
+
+for (let i = 0; i < n; i++) {
+ console.log(fib(i));
+}`
+
+ formatter := html.New(
+ html.WithClasses(false),
+ )
+
+ // Generate cards
+ cards := make([]templates.PreviewCard, 0, len(names))
+ for _, theme := range names {
+ style := styles.Get(theme)
+ if style == nil {
+ continue
+ }
+ lexer := lexers.Get(sampleLang)
+ if lexer == nil {
+ continue
+ }
+ it, err := lexer.Tokenise(nil, sampleCode)
+ if err != nil {
+ continue
+ }
+ var sb strings.Builder
+ if err := formatter.Format(&sb, style, it); err != nil {
+ continue
+ }
+ cards = append(cards, templates.PreviewCard{
+ Name: theme,
+ Tone: ThemeStyles[theme],
+ HTML: template.HTML(sb.String()),
+ })
+ }
+
+ w.Header().Set("Content-Type", "text/html; charset=utf-8")
+ _ = templates.PreviewTemplate.Execute(w, templates.PreviewParams{
+ Count: len(cards),
+ Themes: cards,
+ })
+ })
+
+ ln, err := net.Listen("tcp", "127.0.0.1:0")
+ if err != nil {
+ panic(err)
+ }
+
+ addr := ln.Addr().String()
+ Echo("Preview themes at http://" + addr)
+
+ if err := http.Serve(ln, handler); err != nil && err != http.ErrServerClosed {
+ panic(err)
+ }
+}
internal/generator/utils.go
@@ -0,0 +1,126 @@
+package generator
+
+import (
+ "fmt"
+ "os"
+ "path/filepath"
+ "strings"
+
+ "mokhan.ca/antonmedv/gitmal/pkg/git"
+ "mokhan.ca/antonmedv/gitmal/pkg/templates"
+)
+
+const Dot = "·"
+
+func Echo(a ...any) {
+ _, _ = fmt.Fprintln(os.Stderr, a...)
+}
+
+func breadcrumbs(rootName string, path string, isFile bool) []templates.Breadcrumb {
+ // Root list
+ if path == "" {
+ return []templates.Breadcrumb{
+ {
+ Name: rootName,
+ Href: "./index.html",
+ IsDir: true,
+ },
+ }
+ }
+
+ // Paths from git are already with '/'
+ parts := strings.Split(path, "/")
+
+ // Build breadcrumbs relative to the file location so links work in static output
+ // Example: for a/b/c.txt, at /blob/<ref>/a/b/c.txt.html
+ // - root: ../../index.html
+ // - a: ../index.html
+ // - b: index.html
+ // - c.txt: (no link)
+ d := len(parts)
+
+ // current directory depth relative to ref
+ if isFile {
+ d -= 1
+ }
+
+ crumbs := make([]templates.Breadcrumb, 0, len(parts))
+
+ // root
+ crumbs = append(crumbs, templates.Breadcrumb{
+ Name: rootName,
+ Href: "./" + strings.Repeat("../", d) + "index.html",
+ IsDir: true,
+ })
+
+ // intermediate directories
+ for i := 0; i < len(parts)-1; i++ {
+ name := parts[i]
+ // target directory depth t = i+1
+ up := d - (i + 1)
+ href := "./" + strings.Repeat("../", up) + "index.html"
+ crumbs = append(crumbs, templates.Breadcrumb{
+ Name: name,
+ Href: href,
+ IsDir: true,
+ })
+ }
+
+ // final file (no link)
+ crumbs = append(crumbs, templates.Breadcrumb{
+ Name: parts[len(parts)-1],
+ IsDir: !isFile,
+ })
+
+ return crumbs
+}
+
+func humanizeSize(size int64) string {
+ const unit = 1024
+ if size < unit {
+ return fmt.Sprintf("%d B", size)
+ }
+ div, exp := int64(unit), 0
+ for n := size / unit; n >= unit; n /= unit {
+ div *= unit
+ exp++
+ }
+ return fmt.Sprintf("%.1f %cB", float64(size)/float64(div), "KMGTPE"[exp])
+}
+
+func isMarkdown(path string) bool {
+ lower := strings.ToLower(path)
+ if strings.HasSuffix(lower, ".md") || strings.HasSuffix(lower, ".markdown") || strings.HasSuffix(lower, ".mdown") || strings.HasSuffix(lower, ".mkd") || strings.HasSuffix(lower, ".mkdown") {
+ return true
+ }
+ return false
+}
+
+func isImage(path string) bool {
+ switch filepath.Ext(path) {
+ case ".png", ".jpg", ".jpeg", ".gif", ".webp":
+ return true
+ default:
+ return false
+ }
+}
+
+func ContainsBranch(branches []git.Ref, branch string) bool {
+ for _, b := range branches {
+ if b.String() == branch {
+ return true
+ }
+ }
+ return false
+}
+
+func HasConflictingBranchNames(branches []git.Ref) (bool, git.Ref, git.Ref) {
+ uniq := make(map[string]git.Ref, len(branches))
+ for _, b := range branches {
+ if a, exists := uniq[b.DirName()]; exists {
+ return true, a, b
+ }
+ uniq[b.DirName()] = b
+ }
+ return false, git.Ref{}, git.Ref{}
+}