hswg: an initial attempt at refactoring main()
This commit is contained in:
parent
dd5c583e8b
commit
8276f6bcb9
233
hswg/main.go
233
hswg/main.go
@ -187,6 +187,127 @@ func expand(m *map[string]*Entry, name string, chunk []byte) []byte {
|
||||
})
|
||||
}
|
||||
|
||||
var tagRE = regexp.MustCompile(`<[^<>]+>`)
|
||||
|
||||
func renderEntry(name string, entries *map[string]*Entry) error {
|
||||
e := (*entries)[name]
|
||||
|
||||
f, err := os.Open(e.PathSource)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if i, err := f.Stat(); err != nil {
|
||||
return err
|
||||
} else {
|
||||
e.mtime = i.ModTime()
|
||||
}
|
||||
|
||||
var html *bytes.Buffer
|
||||
if html, e.Metadata, err = Render(f, configuration.NewConfiguration(
|
||||
configuration.WithFilename(e.PathSource),
|
||||
configuration.WithLastUpdated(e.mtime),
|
||||
)); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Every page needs to have a title.
|
||||
if e.Title == "" {
|
||||
e.Title = name
|
||||
}
|
||||
|
||||
// Expand LinkWords anywhere between <tags>.
|
||||
// We want something like the inverse of Regexp.ReplaceAllStringFunc.
|
||||
raw, last, expanded := html.Bytes(), 0, bytes.NewBuffer(nil)
|
||||
for _, where := range tagRE.FindAllIndex(raw, -1) {
|
||||
_, _ = expanded.Write(expand(entries, name, raw[last:where[0]]))
|
||||
_, _ = expanded.Write(raw[where[0]:where[1]])
|
||||
last = where[1]
|
||||
}
|
||||
_, _ = expanded.Write(expand(entries, name, raw[last:]))
|
||||
e.Content = template.HTML(expanded.String())
|
||||
return nil
|
||||
}
|
||||
|
||||
func loadEntries(globs []string) (map[string]*Entry, error) {
|
||||
// Create a map from document names to their page entries.
|
||||
entries := map[string]*Entry{}
|
||||
for _, glob := range globs {
|
||||
matches, err := filepath.Glob(glob)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("%s: %s\n", glob, err)
|
||||
}
|
||||
for _, path := range matches {
|
||||
name := stripExtension(filepath.Base(path))
|
||||
if conflict, ok := entries[name]; ok {
|
||||
return nil, fmt.Errorf("%s: conflicts with %s\n",
|
||||
name, conflict.PathSource)
|
||||
}
|
||||
entries[name] = &Entry{
|
||||
PathSource: path,
|
||||
PathDestination: resultPath(path),
|
||||
backlinks: map[string]bool{},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for name := range entries {
|
||||
if err := renderEntry(name, &entries); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
return entries, nil
|
||||
}
|
||||
|
||||
func writeEntry(e *Entry, t *template.Template,
|
||||
entries *map[string]*Entry) error {
|
||||
f, err := os.Create(e.PathDestination)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
backlinks := []string{}
|
||||
for name := range e.backlinks {
|
||||
backlinks = append(backlinks, name)
|
||||
}
|
||||
sort.Strings(backlinks)
|
||||
for _, name := range backlinks {
|
||||
e.Backlinks =
|
||||
append(e.Backlinks, template.HTML(makeLink(entries, name)))
|
||||
}
|
||||
|
||||
return t.Execute(f, e)
|
||||
}
|
||||
|
||||
func writeIndex(t *template.Template, entries *map[string]*Entry) error {
|
||||
// Reorder entries reversely, primarily by date, secondarily by filename.
|
||||
ordered := []*Entry{}
|
||||
for _, e := range *entries {
|
||||
ordered = append(ordered, e)
|
||||
}
|
||||
|
||||
sort.Slice(ordered, func(i, j int) bool {
|
||||
a, b := ordered[i], ordered[j]
|
||||
p1, p2 := a.Published(), b.Published()
|
||||
if p1 == nil && p2 != nil {
|
||||
return true
|
||||
}
|
||||
if p1 == nil && p2 == nil {
|
||||
return a.PathSource > b.PathSource
|
||||
}
|
||||
if p2 == nil {
|
||||
return false
|
||||
}
|
||||
if p1.Equal(*p2) {
|
||||
return a.PathSource > b.PathSource
|
||||
}
|
||||
return p2.Before(*p1)
|
||||
})
|
||||
|
||||
// TODO(p): Splitting content to categories would be nice.
|
||||
return t.Execute(os.Stdout, ordered)
|
||||
}
|
||||
|
||||
func singleFile() {
|
||||
html, meta, err := Render(os.Stdin, configuration.NewConfiguration())
|
||||
if err != nil {
|
||||
@ -213,124 +334,32 @@ func main() {
|
||||
if err != nil {
|
||||
log.Fatalln(err)
|
||||
}
|
||||
|
||||
t, err := template.New("page").Parse(string(header))
|
||||
if err != nil {
|
||||
log.Fatalln(err)
|
||||
}
|
||||
|
||||
// Create a map from document names to their page entries.
|
||||
entries := map[string]*Entry{}
|
||||
for _, glob := range os.Args[2:] {
|
||||
matches, err := filepath.Glob(glob)
|
||||
if err != nil {
|
||||
log.Fatalf("%s: %s\n", glob, err)
|
||||
}
|
||||
for _, path := range matches {
|
||||
name := stripExtension(filepath.Base(path))
|
||||
if conflict, ok := entries[name]; ok {
|
||||
log.Fatalf("%s: conflicts with %s\n", name, conflict.PathSource)
|
||||
}
|
||||
entries[name] = &Entry{
|
||||
PathSource: path,
|
||||
PathDestination: resultPath(path),
|
||||
backlinks: map[string]bool{},
|
||||
}
|
||||
}
|
||||
// Process all entries.
|
||||
entries, err := loadEntries(os.Args[2:])
|
||||
if err != nil {
|
||||
log.Fatalln(err)
|
||||
}
|
||||
|
||||
tagRE := regexp.MustCompile(`<[^<>]+>`)
|
||||
for name, e := range entries {
|
||||
f, err := os.Open(e.PathSource)
|
||||
if err != nil {
|
||||
log.Fatalln(err)
|
||||
}
|
||||
|
||||
if i, err := f.Stat(); err != nil {
|
||||
log.Fatalln(err)
|
||||
} else {
|
||||
e.mtime = i.ModTime()
|
||||
}
|
||||
|
||||
var html *bytes.Buffer
|
||||
if html, e.Metadata, err = Render(f, configuration.NewConfiguration(
|
||||
configuration.WithFilename(e.PathSource),
|
||||
configuration.WithLastUpdated(e.mtime),
|
||||
)); err != nil {
|
||||
log.Fatalln(err)
|
||||
}
|
||||
|
||||
// Every page needs to have a title.
|
||||
if e.Title == "" {
|
||||
e.Title = name
|
||||
}
|
||||
|
||||
// Expand LinkWords anywhere between <tags>.
|
||||
// We want something like the inverse of Regexp.ReplaceAllStringFunc.
|
||||
raw, last, expanded := html.Bytes(), 0, bytes.NewBuffer(nil)
|
||||
for _, where := range tagRE.FindAllIndex(raw, -1) {
|
||||
_, _ = expanded.Write(expand(&entries, name, raw[last:where[0]]))
|
||||
_, _ = expanded.Write(raw[where[0]:where[1]])
|
||||
last = where[1]
|
||||
}
|
||||
_, _ = expanded.Write(expand(&entries, name, raw[last:]))
|
||||
e.Content = template.HTML(expanded.String())
|
||||
}
|
||||
|
||||
for _, e := range entries {
|
||||
f, err := os.Create(e.PathDestination)
|
||||
if err != nil {
|
||||
log.Fatalln(err)
|
||||
}
|
||||
|
||||
backlinks := []string{}
|
||||
for name := range e.backlinks {
|
||||
backlinks = append(backlinks, name)
|
||||
}
|
||||
sort.Strings(backlinks)
|
||||
for _, name := range backlinks {
|
||||
e.Backlinks =
|
||||
append(e.Backlinks, template.HTML(makeLink(&entries, name)))
|
||||
}
|
||||
|
||||
if err = t.Execute(f, e); err != nil {
|
||||
if err := writeEntry(e, t, &entries); err != nil {
|
||||
log.Fatalln(err)
|
||||
}
|
||||
}
|
||||
|
||||
// Reorder entries reversely, primarily by date, secondarily by filename.
|
||||
ordered := []*Entry{}
|
||||
for _, e := range entries {
|
||||
ordered = append(ordered, e)
|
||||
}
|
||||
|
||||
sort.Slice(ordered, func(i, j int) bool {
|
||||
a, b := ordered[i], ordered[j]
|
||||
p1, p2 := a.Published(), b.Published()
|
||||
if p1 == nil && p2 != nil {
|
||||
return true
|
||||
}
|
||||
if p1 == nil && p2 == nil {
|
||||
return a.PathSource > b.PathSource
|
||||
}
|
||||
if p2 == nil {
|
||||
return false
|
||||
}
|
||||
if p1.Equal(*p2) {
|
||||
return a.PathSource > b.PathSource
|
||||
}
|
||||
return p2.Before(*p1)
|
||||
})
|
||||
|
||||
// Execute a template from the standard input.
|
||||
// Read a template from the standard input, write an index.
|
||||
var input []byte
|
||||
if input, err = ioutil.ReadAll(os.Stdin); err != nil {
|
||||
log.Fatalln(err)
|
||||
}
|
||||
|
||||
// TODO(p): Splitting content to categories would be nice.
|
||||
t, err = template.New("-").Parse(string(input))
|
||||
if err = t.Execute(os.Stdout, ordered); err != nil {
|
||||
if err != nil {
|
||||
log.Fatalln(err)
|
||||
}
|
||||
if err := writeIndex(t, &entries); err != nil {
|
||||
log.Fatalln(err)
|
||||
}
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user