pagr

A 'static site generator', built using dati.
Log | Files | Refs | Atom

commit c95a59b9958924eaddddcf2d5e8034eba02b80e0
parent 1af2a8a7220a63ce728e320591bbc53eb2a45d84
Author: gearsix <gearsix@tuta.io>
Date:   Wed,  4 Aug 2021 18:56:10 +0100

made Page.Updated an RFC822 string; goroutine'd BuildSitemap loop

Diffstat:
Mpage.go | 77++++++++++++++++++++++++++++++++++++++++++++---------------------------------
Mpage_test.go | 9+++++++--
2 files changed, 51 insertions(+), 35 deletions(-)

diff --git a/page.go b/page.go @@ -18,6 +18,8 @@ import ( "sort" ) +const timefmt = time.RFC822 + // Sitemap parses `pages` to determine the `.Nav` values for each element in `pages` // based on their `.Path` value. These values will be set in the returned Content func BuildSitemap(pages []Page) []Page { @@ -30,44 +32,46 @@ func BuildSitemap(pages []Page) []Page { } for i, p := range pages { - p.Nav.Root = root - - pdepth := len(strings.Split(p.Path, "/")[1:]) - if p.Path == "/" { - pdepth = 0 - } - - if pdepth == 1 && p.Path != "/" { - p.Nav.Parent = root - } + go func(i int, p Page) { + p.Nav.Root = root - for j, pp := range pages { - ppdepth := len(strings.Split(pp.Path, "/")[1:]) - if pp.Path == "/" { - ppdepth = 0 + pdepth := len(strings.Split(p.Path, "/")[1:]) + if p.Path == "/" { + pdepth = 0 } - p.Nav.All = append(p.Nav.All, &pages[j]) - if p.Nav.Parent == nil && ppdepth == pdepth - 1 && strings.Contains(p.Path, pp.Path) { - p.Nav.Parent = &pages[j] - } - if ppdepth == pdepth + 1 && strings.Contains(pp.Path, p.Path) { - p.Nav.Children = append(p.Nav.Children, &pages[j]) + if pdepth == 1 && p.Path != "/" { + p.Nav.Parent = root } - } - var crumb string - for _, c := range strings.Split(p.Path, "/")[1:] { - crumb += "/" + c for j, pp := range pages { - if pp.Path == crumb { - p.Nav.Crumbs = append(p.Nav.Crumbs, &pages[j]) - break + ppdepth := len(strings.Split(pp.Path, "/")[1:]) + if pp.Path == "/" { + ppdepth = 0 + } + + p.Nav.All = append(p.Nav.All, &pages[j]) + if p.Nav.Parent == nil && ppdepth == pdepth - 1 && strings.Contains(p.Path, pp.Path) { + p.Nav.Parent = &pages[j] + } + if ppdepth == pdepth + 1 && strings.Contains(pp.Path, p.Path) { + p.Nav.Children = append(p.Nav.Children, &pages[j]) } } - } - pages[i] = p + var crumb string + for _, c := range strings.Split(p.Path, "/")[1:] { + crumb += "/" + c + for j, pp := range pages { + if pp.Path == crumb { + p.Nav.Crumbs = append(p.Nav.Crumbs, &pages[j]) + break + } + } + } + + pages[i] = p + }(i, p) } return pages @@ -78,7 +82,7 @@ func titleFromPath(path string) (title string) { title = "Home" } title = strings.TrimSuffix(title, filepath.Ext(title)) - title = strings.ReplaceAll(title, "-", " ") + //title = strings.ReplaceAll(title, "-", " ") //title = strings.Title(title) return } @@ -178,7 +182,14 @@ func LoadPagesDir(dir string) (p []Page, e error) { p = append(p, page) } - sort.SliceStable(p, func(i, j int) bool { return p[i].Updated.Before(p[j].Updated) }) + sort.SliceStable(p, func(i, j int) bool { + if it, err := time.Parse(timefmt, p[i].Updated); err == nil { + if jt, err := time.Parse(timefmt, p[j].Updated); err == nil { + return it.After(jt) + } + } + return false + }) p = BuildSitemap(p) @@ -218,7 +229,7 @@ type Page struct { Meta Meta Contents []string Assets []string - Updated time.Time + Updated string } // Nav is a struct that provides a set of pointers for navigating a @@ -243,7 +254,7 @@ func NewPage(path string, updated time.Time) Page { Meta: make(Meta), Contents: make([]string, 0), Assets: make([]string, 0), - Updated: updated, + Updated: updated.Format(timefmt), } } diff --git a/page_test.go b/page_test.go @@ -51,9 +51,14 @@ func validateContents(t *testing.T, pages []Page, e error) { t.Error("empty Assets for page:", p.Path) } + var pt time.Time + if pt, e = time.Parse(timefmt, p.Updated); e != nil { + t.Fatal(e) + } + if i == 0 { - last = p.Updated - } else if p.Updated.Before(last) { + last = pt + } else if pt.Before(last) { for _, pp := range pages { t.Log(pp.Updated) }