hswg: actually use templates for output files
This commit is contained in:
parent
d47a8d2237
commit
63d18d068d
78
hswg/main.go
78
hswg/main.go
|
@ -134,17 +134,18 @@ func Render(r io.Reader, config configuration.Configuration) (
|
|||
|
||||
// Entry contains all context information about a single page.
|
||||
type Entry struct {
|
||||
PathSource string // path to source AsciiDoc
|
||||
PathDestination string // path to destination HTML
|
||||
mtime time.Time // modification time
|
||||
Metadata Metadata // metadata
|
||||
document []byte // inner document with expanded LinkWords
|
||||
backlinks []string // what documents link back here
|
||||
Metadata // metadata
|
||||
PathSource string // path to source AsciiDoc
|
||||
PathDestination string // path to destination HTML
|
||||
mtime time.Time // modification time
|
||||
Content template.HTML // inner document with expanded LinkWords
|
||||
backlinks []string // what documents link back here
|
||||
Backlinks []template.HTML
|
||||
}
|
||||
|
||||
// Published returns the date when the entry was published, or nil if unknown.
|
||||
func (e *Entry) Published() *time.Time {
|
||||
if d, _, err := e.Metadata.Attributes.GetAsString("date"); err != nil {
|
||||
if d, _, err := e.Attributes.GetAsString("date"); err != nil {
|
||||
return nil
|
||||
} else if t, err := time.Parse(time.RFC3339, d); err == nil {
|
||||
return &t
|
||||
|
@ -178,7 +179,7 @@ var linkWordRE = regexp.MustCompile(`\b\p{Lu}\p{L}*\b`)
|
|||
func expand(m *map[string]*Entry, name string, chunk []byte) []byte {
|
||||
return linkWordRE.ReplaceAllFunc(chunk, func(match []byte) []byte {
|
||||
if link, ok := (*m)[string(match)]; ok && string(match) != name &&
|
||||
!link.Metadata.IsDraft() {
|
||||
!link.IsDraft() {
|
||||
link.backlinks = append(link.backlinks, name)
|
||||
return []byte(makeLink(m, string(match)))
|
||||
}
|
||||
|
@ -213,6 +214,11 @@ func main() {
|
|||
log.Fatalln(err)
|
||||
}
|
||||
|
||||
t, err := template.New("page").Parse(string(header))
|
||||
if err != nil {
|
||||
log.Fatalln(err)
|
||||
}
|
||||
|
||||
// Create a map from document names to their page entries.
|
||||
entries := map[string]*Entry{}
|
||||
for _, glob := range os.Args[2:] {
|
||||
|
@ -253,6 +259,11 @@ func main() {
|
|||
log.Fatalln(err)
|
||||
}
|
||||
|
||||
// Every page needs to have a title.
|
||||
if e.Title == "" {
|
||||
e.Title = name
|
||||
}
|
||||
|
||||
// Expand LinkWords anywhere between <tags>.
|
||||
// We want something like the inverse of Regexp.ReplaceAllStringFunc.
|
||||
raw, last, expanded := html.Bytes(), 0, bytes.NewBuffer(nil)
|
||||
|
@ -262,45 +273,34 @@ func main() {
|
|||
last = where[1]
|
||||
}
|
||||
_, _ = expanded.Write(expand(&entries, name, raw[last:]))
|
||||
e.document = expanded.Bytes()
|
||||
e.Content = template.HTML(expanded.String())
|
||||
}
|
||||
|
||||
// TODO(p): These should be run through html/template.
|
||||
for name, e := range entries {
|
||||
for _, e := range entries {
|
||||
sort.Strings(e.backlinks)
|
||||
|
||||
last, uniq := "", []string{}
|
||||
for _, name := range e.backlinks {
|
||||
if name != last {
|
||||
uniq = append(uniq, name)
|
||||
}
|
||||
last = name
|
||||
}
|
||||
e.backlinks = uniq
|
||||
}
|
||||
|
||||
for _, e := range entries {
|
||||
f, err := os.Create(e.PathDestination)
|
||||
if err != nil {
|
||||
log.Fatalln(err)
|
||||
}
|
||||
|
||||
_, _ = f.Write(header)
|
||||
|
||||
title := e.Metadata.Title
|
||||
if title == "" {
|
||||
title = name
|
||||
}
|
||||
|
||||
_, _ = f.WriteString(fmt.Sprintf("<title>%s</title>\n", title))
|
||||
_, _ = f.WriteString(fmt.Sprintf("<h1>%s</h1>\n", title))
|
||||
|
||||
sort.Strings(e.backlinks)
|
||||
|
||||
backlinks, last := []string{}, ""
|
||||
for _, name := range e.backlinks {
|
||||
if name != last {
|
||||
backlinks = append(backlinks, makeLink(&entries, name))
|
||||
}
|
||||
last = name
|
||||
e.Backlinks = append(e.Backlinks,
|
||||
template.HTML(makeLink(&entries, name)))
|
||||
}
|
||||
|
||||
if len(backlinks) > 0 {
|
||||
_, _ = f.WriteString(fmt.Sprintf("<p id=links>Links here: %s</p>\n",
|
||||
strings.Join(backlinks, ", ")))
|
||||
if err = t.Execute(f, e); err != nil {
|
||||
log.Fatalln(err)
|
||||
}
|
||||
|
||||
_, _ = f.Write(e.document)
|
||||
_, _ = f.WriteString(fmt.Sprintf("<p id=footer>Last updated: %s"+
|
||||
" — <a href='%s'>Source</p>\n",
|
||||
e.Metadata.LastUpdated, e.PathSource))
|
||||
}
|
||||
|
||||
// Reorder entries reversely, primarily by date, secondarily by filename.
|
||||
|
@ -334,7 +334,7 @@ func main() {
|
|||
}
|
||||
|
||||
// TODO(p): Splitting content to categories would be nice.
|
||||
t, err := template.New("-").Parse(string(input))
|
||||
t, err = template.New("-").Parse(string(input))
|
||||
if err = t.Execute(os.Stdout, ordered); err != nil {
|
||||
log.Fatalln(err)
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue