pagr

A 'static site generator', built using dati.
Log | Files | Refs | Atom

commit 99546e3ca81bd6c33cd016db51348a6935e19405
parent a8111417f4273d28b8a6a9b5c875abc60d78bde0
Author: gearsix <gearsix@tuta.io>
Date:   Mon, 14 Mar 2022 11:12:34 +0000

tidyup: moved BuildSitemap, page.go -> sitemap.go

Split it into more than one func for readability. Added BuildCrumbs
as a public function.

Diffstat:
Mpage.go | 59-----------------------------------------------------------
Asitemap.go | 81+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
2 files changed, 81 insertions(+), 59 deletions(-)

diff --git a/page.go b/page.go @@ -20,65 +20,6 @@ import ( const timefmt = "2006-01-02" -// Sitemap parses `pages` to determine the `.Nav` values for each element in `pages` -// based on their `.Path` value. These values will be set in the returned Content -func BuildSitemap(pages []Page) []Page { - var root *Page - for i, p := range pages { - if p.Path == "/" { - root = &pages[i] - break - } - } - - for i, p := range pages { - p.Nav.Root = root - - pdepth := len(strings.Split(p.Path, "/")[1:]) - if p.Path == "/" { - pdepth = 0 - } - - if pdepth == 1 && p.Path != "/" { - p.Nav.Parent = root - } - - for j, pp := range pages { - ppdepth := len(strings.Split(pp.Path, "/")[1:]) - if pp.Path == "/" { - ppdepth = 0 - } - - p.Nav.All = append(p.Nav.All, &pages[j]) - if p.Nav.Parent == nil && ppdepth == pdepth-1 && strings.Contains(p.Path, pp.Path) { - p.Nav.Parent = &pages[j] - } - if ppdepth == pdepth+1 && strings.Contains(pp.Path, p.Path) { - p.Nav.Children = append(p.Nav.Children, &pages[j]) - } - } - - sort.SliceStable(p.Nav.Children, func(i, j int) bool { - return sort.StringsAreSorted([]string{p.Nav.Children[i].Path, p.Nav.Children[j].Path}) - }) - - var crumb string - for _, c := range strings.Split(p.Path, "/")[1:] { - crumb += "/" + c - for j, pp := range pages { - if pp.Path == crumb { - p.Nav.Crumbs = append(p.Nav.Crumbs, &pages[j]) - break - } - } - } - - pages[i] = p - } - - return pages -} - func lastFileMod(fpath string) time.Time { t := time.Now() // default/error ret if fd, e := os.Stat(fpath); e != nil { diff --git a/sitemap.go b/sitemap.go @@ -0,0 +1,81 @@ +package main + +import ( + "strings" + "sort" +) + +func findRootPage(pages []Page) (root *Page) { + for i, p := range pages { + if p.Path == "/" { + root = &pages[i] + break + } + } + return +} + +func readPageDepth(p Page) (depth int) { + if p.Path == "/" { + depth = 0 + } else { + depth = len(strings.Split(p.Path, "/")[1:]) + } + return +} + +// BuildCrumbs will generate a `[]*Page`, where each item is a pointer to the Page +// found `pages`, who's `.Path` matches a crumb in `p.Path`. +// "crumbs" referring to https://en.wikipedia.org/wiki/Breadcrumb_navigation +func BuildCrumbs(p Page, pages []Page) (crumbs []*Page) { + var path string + for _, c := range strings.Split(p.Path, "/")[1:] { + path += "/" + c + for j, pp := range pages { + if pp.Path == path { + crumbs = append(p.Nav.Crumbs, &pages[j]) + break + } + } + } + return +} + +// Sitemap parses `pages` to determine the `.Nav` values for each element in `pages` +// based on their `.Path` value. These values will be set in the returned Content +func BuildSitemap(pages []Page) []Page { + root := findRootPage(pages) + + for i, p := range pages { + pdepth := readPageDepth(p) + + p.Nav.Root = root + + if pdepth == 1 && p.Path != "/" { + p.Nav.Parent = root + } + + for j, pp := range pages { + ppdepth := readPageDepth(pp) + + p.Nav.All = append(p.Nav.All, &pages[j]) + + if p.Nav.Parent == nil && ppdepth == pdepth-1 && strings.Contains(p.Path, pp.Path) { + p.Nav.Parent = &pages[j] + } + if ppdepth == pdepth+1 && strings.Contains(pp.Path, p.Path) { + p.Nav.Children = append(p.Nav.Children, &pages[j]) + } + } + + sort.SliceStable(p.Nav.Children, func(i, j int) bool { + return sort.StringsAreSorted([]string{p.Nav.Children[j].Path, p.Nav.Children[j].Path}) + }) + + p.Nav.Crumbs = BuildCrumbs(p, pages) + + pages[i] = p + } + + return pages +}