summaryrefslogtreecommitdiffstats
path: root/internal
diff options
context:
space:
mode:
authorClaude <noreply@anthropic.com>2026-03-31 12:11:18 +0200
committerClaude <noreply@anthropic.com>2026-03-31 12:11:18 +0200
commit3cb7c82cf7c4e050148f69be23590a7fbe587a27 (patch)
treed2b6506db2de72b3a6982cfbe69925b88936de90 /internal
parent33f214f6cd9729473bb55fd7b3b923d5d960bb98 (diff)
downloadnebbet.no-3cb7c82cf7c4e050148f69be23590a7fbe587a27.tar.xz
nebbet.no-3cb7c82cf7c4e050148f69be23590a7fbe587a27.zip
Add static site builder: SQLite-backed MD→HTML pipeline
- cmd/nebbet: CLI with build [--watch] and user add/passwd/delete/list - internal/builder: markdown→HTML, component injection via HTML comments, auto importmap from lib/, fsnotify watch with 150ms debounce - internal/db: meta.db (page index, tag queries) + search.db (FTS5) - internal/sqlitedrv: minimal CGO database/sql driver for system libsqlite3 - internal/auth: htpasswd-compatible bcrypt password file management - templates/base.html + admin.html, styles/main.css + admin.css - nginx.conf with auth_basic for /admin, clean URLs, gzip - nebbet.service systemd unit for watch daemon - Example content/index.md and components/site-greeting.js https://claude.ai/code/session_01HTc1BCBCiMTEB54XQP1Wz9
Diffstat (limited to 'internal')
-rw-r--r--internal/auth/auth.go159
-rw-r--r--internal/builder/builder.go283
-rw-r--r--internal/builder/components.go97
-rw-r--r--internal/builder/frontmatter.go64
-rw-r--r--internal/builder/importmap.go58
-rw-r--r--internal/builder/markdown.go49
-rw-r--r--internal/db/meta.go117
-rw-r--r--internal/db/search.go113
-rw-r--r--internal/sqlitedrv/driver.go263
9 files changed, 1203 insertions, 0 deletions
diff --git a/internal/auth/auth.go b/internal/auth/auth.go
new file mode 100644
index 0000000..b0de7d9
--- /dev/null
+++ b/internal/auth/auth.go
@@ -0,0 +1,159 @@
+// Package auth manages a htpasswd-compatible password file (bcrypt entries).
+// The file format is one "username:$2a$..." entry per line.
+// nginx auth_basic accepts this file directly via auth_basic_user_file.
+package auth
+
+import (
+ "bufio"
+ "fmt"
+ "os"
+ "strings"
+ "syscall"
+
+ "golang.org/x/crypto/bcrypt"
+ "golang.org/x/term"
+)
+
+type Auth struct {
+ path string
+}
+
+func New(path string) *Auth { return &Auth{path: path} }
+
+func (a *Auth) AddUser(username string) error {
+ users, err := a.read()
+ if err != nil && !os.IsNotExist(err) {
+ return err
+ }
+ if _, exists := users[username]; exists {
+ return fmt.Errorf("user %q already exists", username)
+ }
+ pw, err := readPassword("Password: ")
+ if err != nil {
+ return err
+ }
+ confirm, err := readPassword("Confirm: ")
+ if err != nil {
+ return err
+ }
+ if pw != confirm {
+ return fmt.Errorf("passwords do not match")
+ }
+ hash, err := bcrypt.GenerateFromPassword([]byte(pw), bcrypt.DefaultCost)
+ if err != nil {
+ return err
+ }
+ users[username] = string(hash)
+ return a.write(users)
+}
+
+func (a *Auth) ChangePassword(username string) error {
+ users, err := a.read()
+ if err != nil {
+ return err
+ }
+ if _, exists := users[username]; !exists {
+ return fmt.Errorf("user %q not found", username)
+ }
+ pw, err := readPassword("New password: ")
+ if err != nil {
+ return err
+ }
+ confirm, err := readPassword("Confirm: ")
+ if err != nil {
+ return err
+ }
+ if pw != confirm {
+ return fmt.Errorf("passwords do not match")
+ }
+ hash, err := bcrypt.GenerateFromPassword([]byte(pw), bcrypt.DefaultCost)
+ if err != nil {
+ return err
+ }
+ users[username] = string(hash)
+ return a.write(users)
+}
+
+func (a *Auth) DeleteUser(username string) error {
+ users, err := a.read()
+ if err != nil {
+ return err
+ }
+ if _, exists := users[username]; !exists {
+ return fmt.Errorf("user %q not found", username)
+ }
+ delete(users, username)
+ return a.write(users)
+}
+
+func (a *Auth) ListUsers() ([]string, error) {
+ users, err := a.read()
+ if err != nil {
+ if os.IsNotExist(err) {
+ return nil, nil
+ }
+ return nil, err
+ }
+ names := make([]string, 0, len(users))
+ for k := range users {
+ names = append(names, k)
+ }
+ return names, nil
+}
+
+func (a *Auth) Verify(username, password string) (bool, error) {
+ users, err := a.read()
+ if err != nil {
+ return false, err
+ }
+ hash, ok := users[username]
+ if !ok {
+ return false, nil
+ }
+ err = bcrypt.CompareHashAndPassword([]byte(hash), []byte(password))
+ if err == bcrypt.ErrMismatchedHashAndPassword {
+ return false, nil
+ }
+ return err == nil, err
+}
+
+func (a *Auth) read() (map[string]string, error) {
+ f, err := os.Open(a.path)
+ if err != nil {
+ return nil, err
+ }
+ defer f.Close()
+ users := make(map[string]string)
+ scanner := bufio.NewScanner(f)
+ for scanner.Scan() {
+ line := strings.TrimSpace(scanner.Text())
+ if line == "" || strings.HasPrefix(line, "#") {
+ continue
+ }
+ user, hash, ok := strings.Cut(line, ":")
+ if ok {
+ users[user] = hash
+ }
+ }
+ return users, scanner.Err()
+}
+
+func (a *Auth) write(users map[string]string) error {
+ f, err := os.OpenFile(a.path, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0600)
+ if err != nil {
+ return err
+ }
+ defer f.Close()
+ w := bufio.NewWriter(f)
+ for user, hash := range users {
+ fmt.Fprintf(w, "%s:%s\n", user, hash)
+ }
+ return w.Flush()
+}
+
+func readPassword(prompt string) (string, error) {
+ fmt.Print(prompt)
+ b, err := term.ReadPassword(int(syscall.Stdin))
+ fmt.Println()
+ return string(b), err
+}
diff --git a/internal/builder/builder.go b/internal/builder/builder.go
new file mode 100644
index 0000000..40be377
--- /dev/null
+++ b/internal/builder/builder.go
@@ -0,0 +1,283 @@
+package builder
+
+import (
+ "fmt"
+ "html/template"
+ "os"
+ "path/filepath"
+ "strings"
+ "time"
+
+ "github.com/fsnotify/fsnotify"
+ "nebbet.no/internal/db"
+)
+
+// Builder orchestrates the markdown → HTML build pipeline.
+type Builder struct {
+ ContentDir string
+ OutputDir string
+ TemplateDir string
+ ComponentDir string
+ LibDir string
+ MetaDB *db.MetaDB
+ SearchDB *db.SearchDB
+ tmpl *template.Template
+}
+
+func New(contentDir, outputDir string, meta *db.MetaDB, search *db.SearchDB) *Builder {
+ return &Builder{
+ ContentDir: contentDir,
+ OutputDir: outputDir,
+ TemplateDir: "templates",
+ ComponentDir: "components",
+ LibDir: "lib",
+ MetaDB: meta,
+ SearchDB: search,
+ }
+}
+
+// PageData is passed to HTML templates.
+type PageData struct {
+ Title string
+ Content template.HTML
+ // ImportMapTag is the full <script type="importmap">…</script> block,
+ // pre-rendered as safe HTML so the JSON inside is never entity-escaped.
+ ImportMapTag template.HTML
+ ComponentScripts []string
+ Date string
+ Tags []string
+ Path string
+}
+
+// BuildAll performs a full site build.
+func (b *Builder) BuildAll() error {
+ if err := b.loadTemplates(); err != nil {
+ return fmt.Errorf("load templates: %w", err)
+ }
+ importMap, err := GenerateImportMap(b.LibDir)
+ if err != nil {
+ return fmt.Errorf("importmap: %w", err)
+ }
+ return filepath.WalkDir(b.ContentDir, func(path string, d os.DirEntry, err error) error {
+ if err != nil || d.IsDir() || !strings.HasSuffix(path, ".md") {
+ return err
+ }
+ return b.BuildFile(path, importMap)
+ })
+}
+
+// BuildFile converts a single markdown file and updates both databases.
+func (b *Builder) BuildFile(mdPath, importMap string) error {
+ data, err := os.ReadFile(mdPath)
+ if err != nil {
+ return err
+ }
+
+ fm, body := ParseFrontmatter(string(data))
+ if fm.Draft {
+ fmt.Printf("skip draft: %s\n", mdPath)
+ return nil
+ }
+
+ htmlBody, err := MarkdownToHTML(body)
+ if err != nil {
+ return fmt.Errorf("markdown: %w", err)
+ }
+ htmlBody = ProcessComponents(htmlBody)
+ scripts := FindComponentScripts(htmlBody, b.ComponentDir)
+
+ // Derive URL path and output file path from content-relative path.
+ rel, _ := filepath.Rel(b.ContentDir, mdPath)
+ urlPath := "/" + filepath.ToSlash(strings.TrimSuffix(rel, ".md"))
+ // /index → / and /section/index → /section
+ switch {
+ case urlPath == "/index":
+ urlPath = "/"
+ case strings.HasSuffix(urlPath, "/index"):
+ urlPath = strings.TrimSuffix(urlPath, "/index")
+ }
+ outPath := filepath.Join(b.OutputDir, filepath.FromSlash(
+ strings.TrimSuffix(filepath.ToSlash(rel), ".md")+".html"))
+
+ if err := os.MkdirAll(filepath.Dir(outPath), 0755); err != nil {
+ return err
+ }
+
+ var importMapTag template.HTML
+ if importMap != "" {
+ importMapTag = template.HTML(
+ "<script type=\"importmap\">" + importMap + "</script>")
+ }
+ page := PageData{
+ Title: fm.Title,
+ Content: template.HTML(htmlBody),
+ ImportMapTag: importMapTag,
+ ComponentScripts: scripts,
+ Date: fm.Date,
+ Tags: fm.Tags,
+ Path: urlPath,
+ }
+
+ tmplName := fm.Layout + ".html"
+ f, err := os.Create(outPath)
+ if err != nil {
+ return err
+ }
+ defer f.Close()
+ if err := b.tmpl.ExecuteTemplate(f, tmplName, page); err != nil {
+ return fmt.Errorf("template %s: %w", tmplName, err)
+ }
+
+ if err := b.MetaDB.UpsertPage(db.PageMeta{
+ Path: urlPath,
+ HTMLPath: outPath,
+ Title: fm.Title,
+ Date: fm.Date,
+ Tags: fm.Tags,
+ UpdatedAt: time.Now(),
+ }); err != nil {
+ return fmt.Errorf("meta db: %w", err)
+ }
+ if err := b.SearchDB.IndexPage(db.SearchPage{
+ Path: urlPath,
+ Title: fm.Title,
+ Content: StripHTML(htmlBody),
+ }); err != nil {
+ return fmt.Errorf("search db: %w", err)
+ }
+
+ fmt.Printf("built %s → %s\n", mdPath, outPath)
+ return nil
+}
+
+// RemovePage deletes the built HTML and removes the page from both databases.
+func (b *Builder) RemovePage(mdPath string) error {
+ rel, _ := filepath.Rel(b.ContentDir, mdPath)
+ urlPath := "/" + filepath.ToSlash(strings.TrimSuffix(rel, ".md"))
+ switch {
+ case urlPath == "/index":
+ urlPath = "/"
+ case strings.HasSuffix(urlPath, "/index"):
+ urlPath = strings.TrimSuffix(urlPath, "/index")
+ }
+ outPath := filepath.Join(b.OutputDir, filepath.FromSlash(
+ strings.TrimSuffix(filepath.ToSlash(rel), ".md")+".html"))
+
+ _ = os.Remove(outPath)
+ _ = b.MetaDB.DeletePage(urlPath)
+ _ = b.SearchDB.DeletePage(urlPath)
+ fmt.Printf("removed %s\n", outPath)
+ return nil
+}
+
+func (b *Builder) loadTemplates() error {
+ tmpl, err := template.ParseGlob(filepath.Join(b.TemplateDir, "*.html"))
+ if err != nil {
+ return err
+ }
+ b.tmpl = tmpl
+ return nil
+}
+
+// Watch monitors source directories and rebuilds on changes.
+// A 150 ms debounce prevents redundant rebuilds when many files change at once.
+func (b *Builder) Watch() error {
+ watcher, err := fsnotify.NewWatcher()
+ if err != nil {
+ return err
+ }
+ defer watcher.Close()
+
+ // Add all dirs (including nested content subdirs) to watcher.
+ watchDirs := []string{b.ContentDir, b.TemplateDir, b.ComponentDir, b.LibDir, "styles"}
+ for _, dir := range watchDirs {
+ if err := addDirRecursive(watcher, dir); err != nil && !os.IsNotExist(err) {
+ return err
+ }
+ }
+
+ fmt.Println("watching for changes — Ctrl+C to stop")
+
+ var (
+ debounce = time.NewTimer(0)
+ pendingMD = "" // non-empty → rebuild only this file
+ fullBuild = false
+ )
+ <-debounce.C // drain initial tick
+
+ for {
+ select {
+ case event, ok := <-watcher.Events:
+ if !ok {
+ return nil
+ }
+ if !event.Has(fsnotify.Write) && !event.Has(fsnotify.Create) && !event.Has(fsnotify.Remove) {
+ continue
+ }
+
+ // If a new directory appears, start watching it.
+ if event.Has(fsnotify.Create) {
+ if info, err := os.Stat(event.Name); err == nil && info.IsDir() {
+ _ = watcher.Add(event.Name)
+ }
+ }
+
+ isMD := strings.HasSuffix(event.Name, ".md")
+ isContentMD := isMD && strings.HasPrefix(
+ filepath.ToSlash(event.Name),
+ filepath.ToSlash(b.ContentDir),
+ )
+
+ if isContentMD && !fullBuild {
+ if event.Has(fsnotify.Remove) {
+ b.RemovePage(event.Name)
+ pendingMD = ""
+ } else if pendingMD == "" {
+ pendingMD = event.Name
+ } else if pendingMD != event.Name {
+ // Multiple different md files → full rebuild.
+ fullBuild = true
+ pendingMD = ""
+ }
+ } else {
+ // Templates, styles, components, lib, or multiple md changed.
+ fullBuild = true
+ pendingMD = ""
+ }
+
+ debounce.Reset(150 * time.Millisecond)
+
+ case <-debounce.C:
+ importMap, _ := GenerateImportMap(b.LibDir)
+ if fullBuild {
+ if err := b.loadTemplates(); err == nil {
+ _ = b.BuildAll()
+ }
+ fullBuild = false
+ } else if pendingMD != "" {
+ if err := b.loadTemplates(); err == nil {
+ _ = b.BuildFile(pendingMD, importMap)
+ }
+ pendingMD = ""
+ }
+
+ case err, ok := <-watcher.Errors:
+ if !ok {
+ return nil
+ }
+ fmt.Fprintf(os.Stderr, "watch error: %v\n", err)
+ }
+ }
+}
+
+func addDirRecursive(w *fsnotify.Watcher, root string) error {
+ return filepath.WalkDir(root, func(path string, d os.DirEntry, err error) error {
+ if err != nil {
+ return nil // skip unreadable entries
+ }
+ if d.IsDir() {
+ return w.Add(path)
+ }
+ return nil
+ })
+}
diff --git a/internal/builder/components.go b/internal/builder/components.go
new file mode 100644
index 0000000..54a226a
--- /dev/null
+++ b/internal/builder/components.go
@@ -0,0 +1,97 @@
+package builder
+
+import (
+ "encoding/json"
+ "fmt"
+ "os"
+ "path/filepath"
+ "regexp"
+ "sort"
+ "strings"
+)
+
+// componentRe matches <!-- component:tag-name { ...json... } -->
+// Props JSON is optional.
+var componentRe = regexp.MustCompile(
+ `<!--\s*component:([a-z][a-z0-9-]*)\s*(\{[^}]*\})?\s*-->`)
+
+// customElementRe matches opening tags for custom elements (name must contain a hyphen).
+var customElementRe = regexp.MustCompile(`<([a-z][a-z0-9]*(?:-[a-z0-9]+)+)[\s/>]`)
+
+// ProcessComponents replaces HTML comment component directives with custom element tags.
+//
+// <!-- component:my-counter {"start": 5, "label": "Count"} -->
+// → <my-counter start="5" label="Count"></my-counter>
+func ProcessComponents(html string) string {
+ return componentRe.ReplaceAllStringFunc(html, func(match string) string {
+ subs := componentRe.FindStringSubmatch(match)
+ if len(subs) < 2 {
+ return match
+ }
+ tagName := subs[1]
+ attrs := ""
+ if len(subs) > 2 && subs[2] != "" {
+ var props map[string]any
+ if err := json.Unmarshal([]byte(subs[2]), &props); err == nil {
+ attrs = propsToAttrs(props)
+ }
+ }
+ if attrs != "" {
+ return fmt.Sprintf(`<%s %s></%s>`, tagName, attrs, tagName)
+ }
+ return fmt.Sprintf(`<%s></%s>`, tagName, tagName)
+ })
+}
+
+// propsToAttrs converts a JSON props map to an HTML attribute string.
+// Keys are emitted in sorted order for deterministic output.
+func propsToAttrs(props map[string]any) string {
+ keys := make([]string, 0, len(props))
+ for k := range props {
+ keys = append(keys, k)
+ }
+ sort.Strings(keys)
+
+ var parts []string
+ for _, k := range keys {
+ v := props[k]
+ switch val := v.(type) {
+ case string:
+ parts = append(parts, fmt.Sprintf(`%s="%s"`, k, strings.ReplaceAll(val, `"`, `&quot;`)))
+ case bool:
+ if val {
+ parts = append(parts, k) // boolean attribute, no value
+ }
+ case float64:
+ if val == float64(int64(val)) {
+ parts = append(parts, fmt.Sprintf(`%s="%d"`, k, int64(val)))
+ } else {
+ parts = append(parts, fmt.Sprintf(`%s="%g"`, k, val))
+ }
+ default:
+ // Complex value → JSON-encode into single-quoted attribute.
+ b, _ := json.Marshal(v)
+ parts = append(parts, fmt.Sprintf(`%s='%s'`, k, string(b)))
+ }
+ }
+ return strings.Join(parts, " ")
+}
+
+// FindComponentScripts scans HTML for used custom elements and returns
+// /components/<name>.js paths for any that exist on disk.
+func FindComponentScripts(html, componentsDir string) []string {
+ matches := customElementRe.FindAllStringSubmatch(html, -1)
+ seen := make(map[string]bool)
+ var scripts []string
+ for _, m := range matches {
+ if len(m) < 2 || seen[m[1]] {
+ continue
+ }
+ seen[m[1]] = true
+ jsPath := filepath.Join(componentsDir, m[1]+".js")
+ if _, err := os.Stat(jsPath); err == nil {
+ scripts = append(scripts, "/components/"+m[1]+".js")
+ }
+ }
+ return scripts
+}
diff --git a/internal/builder/frontmatter.go b/internal/builder/frontmatter.go
new file mode 100644
index 0000000..34de484
--- /dev/null
+++ b/internal/builder/frontmatter.go
@@ -0,0 +1,64 @@
+package builder
+
+import (
+ "strings"
+)
+
+// Frontmatter holds parsed page metadata from YAML-style front matter.
+type Frontmatter struct {
+ Title string
+ Date string
+ Tags []string
+ Layout string // template name without extension, default "base"
+ Draft bool
+}
+
+// ParseFrontmatter splits the optional ---...--- block from the markdown body.
+// Supports: title, date, tags (comma-list or [a, b]), layout, draft.
+func ParseFrontmatter(content string) (Frontmatter, string) {
+ fm := Frontmatter{Layout: "base"}
+ if !strings.HasPrefix(content, "---") {
+ return fm, content
+ }
+ // Find closing ---
+ rest := content[3:]
+ end := strings.Index(rest, "\n---")
+ if end == -1 {
+ return fm, content
+ }
+ block := strings.TrimSpace(rest[:end])
+ body := strings.TrimSpace(rest[end+4:]) // skip \n---
+
+ for _, line := range strings.Split(block, "\n") {
+ k, v, ok := strings.Cut(strings.TrimSpace(line), ":")
+ if !ok {
+ continue
+ }
+ k = strings.TrimSpace(k)
+ v = strings.TrimSpace(v)
+ switch k {
+ case "title":
+ fm.Title = strings.Trim(v, `"'`)
+ case "date":
+ fm.Date = v
+ case "layout":
+ fm.Layout = strings.Trim(v, `"'`)
+ case "draft":
+ fm.Draft = v == "true"
+ case "tags":
+ fm.Tags = parseTags(v)
+ }
+ }
+ return fm, body
+}
+
+func parseTags(v string) []string {
+ v = strings.Trim(v, "[] ")
+ var tags []string
+ for _, p := range strings.Split(v, ",") {
+ if t := strings.Trim(strings.TrimSpace(p), `"'`); t != "" {
+ tags = append(tags, t)
+ }
+ }
+ return tags
+}
diff --git a/internal/builder/importmap.go b/internal/builder/importmap.go
new file mode 100644
index 0000000..8445411
--- /dev/null
+++ b/internal/builder/importmap.go
@@ -0,0 +1,58 @@
+package builder
+
+import (
+ "encoding/json"
+ "os"
+ "path/filepath"
+ "strings"
+)
+
+// ImportMap represents a browser importmap.
+type ImportMap struct {
+ Imports map[string]string `json:"imports"`
+}
+
+// GenerateImportMap scans libDir for .js files and produces an importmap JSON string.
+//
+// Naming rules:
+// - lib/chart.js → "chart"
+// - lib/icons/index.js → "icons"
+// - lib/utils/helpers.js → "utils/helpers"
+func GenerateImportMap(libDir string) (string, error) {
+ imports := make(map[string]string)
+
+ if _, err := os.Stat(libDir); os.IsNotExist(err) {
+ b, _ := json.MarshalIndent(ImportMap{Imports: imports}, "", " ")
+ return string(b), nil
+ }
+
+ err := filepath.WalkDir(libDir, func(path string, d os.DirEntry, err error) error {
+ if err != nil || d.IsDir() || !strings.HasSuffix(path, ".js") {
+ return err
+ }
+ rel, _ := filepath.Rel(libDir, path)
+ rel = filepath.ToSlash(rel)
+
+ dir := filepath.ToSlash(filepath.Dir(rel))
+ base := strings.TrimSuffix(filepath.Base(rel), ".js")
+
+ var importName string
+ switch {
+ case dir == ".":
+ importName = base
+ case base == "index":
+ importName = dir
+ default:
+ importName = dir + "/" + base
+ }
+
+ imports[importName] = "/lib/" + rel
+ return nil
+ })
+ if err != nil {
+ return "", err
+ }
+
+ b, err := json.MarshalIndent(ImportMap{Imports: imports}, "", " ")
+ return string(b), err
+}
diff --git a/internal/builder/markdown.go b/internal/builder/markdown.go
new file mode 100644
index 0000000..4e00ca3
--- /dev/null
+++ b/internal/builder/markdown.go
@@ -0,0 +1,49 @@
+package builder
+
+import (
+ "bytes"
+ "regexp"
+ "strings"
+
+ "github.com/yuin/goldmark"
+ "github.com/yuin/goldmark/extension"
+ "github.com/yuin/goldmark/parser"
+ "github.com/yuin/goldmark/renderer/html"
+)
+
+var md = goldmark.New(
+ goldmark.WithExtensions(
+ extension.GFM,
+ extension.Table,
+ extension.Strikethrough,
+ extension.TaskList,
+ ),
+ goldmark.WithParserOptions(
+ parser.WithAutoHeadingID(),
+ ),
+ goldmark.WithRendererOptions(
+ // Allow raw HTML pass-through so component tags survive round-trip.
+ html.WithUnsafe(),
+ ),
+)
+
+// MarkdownToHTML converts a markdown string to an HTML fragment.
+func MarkdownToHTML(body string) (string, error) {
+ var buf bytes.Buffer
+ if err := md.Convert([]byte(body), &buf); err != nil {
+ return "", err
+ }
+ return buf.String(), nil
+}
+
+var (
+ htmlTagRe = regexp.MustCompile(`<[^>]+>`)
+ multiSpaceRe = regexp.MustCompile(`\s+`)
+)
+
+// StripHTML removes HTML tags and normalises whitespace for search indexing.
+func StripHTML(h string) string {
+ plain := htmlTagRe.ReplaceAllString(h, " ")
+ plain = multiSpaceRe.ReplaceAllString(plain, " ")
+ return strings.TrimSpace(plain)
+}
diff --git a/internal/db/meta.go b/internal/db/meta.go
new file mode 100644
index 0000000..4857234
--- /dev/null
+++ b/internal/db/meta.go
@@ -0,0 +1,117 @@
+package db
+
+import (
+ "database/sql"
+ "encoding/json"
+ "strings"
+ "time"
+
+ _ "nebbet.no/internal/sqlitedrv"
+)
+
+type MetaDB struct {
+ db *sql.DB
+}
+
+type PageMeta struct {
+ Path string
+ HTMLPath string
+ Title string
+ Date string
+ Tags []string
+ UpdatedAt time.Time
+}
+
+func OpenMeta(path string) (*MetaDB, error) {
+ db, err := sql.Open("sqlite", path)
+ if err != nil {
+ return nil, err
+ }
+ _, err = db.Exec(`
+ CREATE TABLE IF NOT EXISTS pages (
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
+ path TEXT NOT NULL UNIQUE,
+ html_path TEXT NOT NULL,
+ title TEXT NOT NULL DEFAULT '',
+ date TEXT DEFAULT '',
+ tags TEXT DEFAULT '[]',
+ updated_at DATETIME DEFAULT CURRENT_TIMESTAMP
+ );
+ CREATE INDEX IF NOT EXISTS idx_pages_path ON pages(path);
+ CREATE INDEX IF NOT EXISTS idx_pages_date ON pages(date);
+ `)
+ if err != nil {
+ return nil, err
+ }
+ return &MetaDB{db: db}, nil
+}
+
+func (m *MetaDB) Close() error { return m.db.Close() }
+
+func (m *MetaDB) UpsertPage(p PageMeta) error {
+ tags, _ := json.Marshal(p.Tags)
+ _, err := m.db.Exec(`
+ INSERT INTO pages (path, html_path, title, date, tags, updated_at)
+ VALUES (?, ?, ?, ?, ?, ?)
+ ON CONFLICT(path) DO UPDATE SET
+ html_path = excluded.html_path,
+ title = excluded.title,
+ date = excluded.date,
+ tags = excluded.tags,
+ updated_at = excluded.updated_at
+ `, p.Path, p.HTMLPath, p.Title, p.Date, string(tags), p.UpdatedAt.UTC())
+ return err
+}
+
+func (m *MetaDB) DeletePage(path string) error {
+ _, err := m.db.Exec(`DELETE FROM pages WHERE path = ?`, path)
+ return err
+}
+
+func (m *MetaDB) GetPage(path string) (*PageMeta, error) {
+ row := m.db.QueryRow(
+ `SELECT path, html_path, title, date, tags FROM pages WHERE path = ?`, path)
+ var p PageMeta
+ var tagsJSON string
+ if err := row.Scan(&p.Path, &p.HTMLPath, &p.Title, &p.Date, &tagsJSON); err != nil {
+ return nil, err
+ }
+ _ = json.Unmarshal([]byte(tagsJSON), &p.Tags)
+ return &p, nil
+}
+
+func (m *MetaDB) ListPages() ([]PageMeta, error) {
+ rows, err := m.db.Query(
+ `SELECT path, html_path, title, date, tags FROM pages ORDER BY date DESC, path`)
+ if err != nil {
+ return nil, err
+ }
+ defer rows.Close()
+ return scanPages(rows)
+}
+
+func (m *MetaDB) ListByTag(tag string) ([]PageMeta, error) {
+ // JSON array contains check via LIKE — sufficient for simple tag strings.
+ needle := `%"` + strings.ReplaceAll(tag, `"`, `\"`) + `"%`
+ rows, err := m.db.Query(
+ `SELECT path, html_path, title, date, tags FROM pages WHERE tags LIKE ? ORDER BY date DESC`, needle)
+ if err != nil {
+ return nil, err
+ }
+ defer rows.Close()
+ return scanPages(rows)
+}
+
+func scanPages(rows *sql.Rows) ([]PageMeta, error) {
+ var pages []PageMeta
+ for rows.Next() {
+ var p PageMeta
+ var tagsJSON string
+ if err := rows.Scan(&p.Path, &p.HTMLPath, &p.Title, &p.Date, &tagsJSON); err != nil {
+ return nil, err
+ }
+ _ = json.Unmarshal([]byte(tagsJSON), &p.Tags)
+ pages = append(pages, p)
+ }
+ return pages, rows.Err()
+}
diff --git a/internal/db/search.go b/internal/db/search.go
new file mode 100644
index 0000000..b2c9b49
--- /dev/null
+++ b/internal/db/search.go
@@ -0,0 +1,113 @@
+package db
+
+import (
+ "database/sql"
+
+ _ "nebbet.no/internal/sqlitedrv"
+)
+
+type SearchDB struct {
+ db *sql.DB
+}
+
+type SearchPage struct {
+ Path string
+ Title string
+ Content string
+}
+
+type SearchResult struct {
+ Path string
+ Title string
+ Snippet string
+}
+
+func OpenSearch(path string) (*SearchDB, error) {
+ db, err := sql.Open("sqlite", path)
+ if err != nil {
+ return nil, err
+ }
+ _, err = db.Exec(`
+ CREATE TABLE IF NOT EXISTS indexed_pages (
+ path TEXT NOT NULL PRIMARY KEY,
+ updated_at DATETIME DEFAULT CURRENT_TIMESTAMP
+ );
+ CREATE VIRTUAL TABLE IF NOT EXISTS pages_fts USING fts5(
+ path UNINDEXED,
+ title,
+ content,
+ tokenize = 'porter unicode61'
+ );
+ `)
+ if err != nil {
+ return nil, err
+ }
+ return &SearchDB{db: db}, nil
+}
+
+func (s *SearchDB) Close() error { return s.db.Close() }
+
+func (s *SearchDB) IndexPage(p SearchPage) error {
+ tx, err := s.db.Begin()
+ if err != nil {
+ return err
+ }
+ defer tx.Rollback()
+
+ if _, err = tx.Exec(`DELETE FROM pages_fts WHERE path = ?`, p.Path); err != nil {
+ return err
+ }
+ if _, err = tx.Exec(
+ `INSERT INTO pages_fts (path, title, content) VALUES (?, ?, ?)`,
+ p.Path, p.Title, p.Content,
+ ); err != nil {
+ return err
+ }
+ if _, err = tx.Exec(`
+ INSERT INTO indexed_pages (path, updated_at) VALUES (?, CURRENT_TIMESTAMP)
+ ON CONFLICT(path) DO UPDATE SET updated_at = CURRENT_TIMESTAMP
+ `, p.Path); err != nil {
+ return err
+ }
+ return tx.Commit()
+}
+
+func (s *SearchDB) DeletePage(path string) error {
+ tx, err := s.db.Begin()
+ if err != nil {
+ return err
+ }
+ defer tx.Rollback()
+ if _, err = tx.Exec(`DELETE FROM pages_fts WHERE path = ?`, path); err != nil {
+ return err
+ }
+ if _, err = tx.Exec(`DELETE FROM indexed_pages WHERE path = ?`, path); err != nil {
+ return err
+ }
+ return tx.Commit()
+}
+
+// Search runs a full-text query and returns up to 20 results with snippets.
+func (s *SearchDB) Search(query string) ([]SearchResult, error) {
+ rows, err := s.db.Query(`
+ SELECT path, title,
+ snippet(pages_fts, 2, '<mark>', '</mark>', '...', 20)
+ FROM pages_fts
+ WHERE pages_fts MATCH ?
+ ORDER BY rank
+ LIMIT 20
+ `, query)
+ if err != nil {
+ return nil, err
+ }
+ defer rows.Close()
+ var results []SearchResult
+ for rows.Next() {
+ var r SearchResult
+ if err := rows.Scan(&r.Path, &r.Title, &r.Snippet); err != nil {
+ return nil, err
+ }
+ results = append(results, r)
+ }
+ return results, rows.Err()
+}
diff --git a/internal/sqlitedrv/driver.go b/internal/sqlitedrv/driver.go
new file mode 100644
index 0000000..c9efff4
--- /dev/null
+++ b/internal/sqlitedrv/driver.go
@@ -0,0 +1,263 @@
+// Package sqlitedrv registers a minimal "sqlite" driver for database/sql
+// that wraps the system libsqlite3 via CGO.
+// Import with: _ "nebbet.no/internal/sqlitedrv"
+package sqlitedrv
+
+/*
+#cgo pkg-config: sqlite3
+#include <sqlite3.h>
+#include <stdlib.h>
+
+static int bind_text(sqlite3_stmt *s, int i, const char *v) {
+ return sqlite3_bind_text(s, i, v, -1, SQLITE_TRANSIENT);
+}
+static void enable_wal(sqlite3 *db) {
+ sqlite3_exec(db, "PRAGMA journal_mode=WAL", NULL, NULL, NULL);
+ sqlite3_exec(db, "PRAGMA synchronous=NORMAL", NULL, NULL, NULL);
+}
+*/
+import "C"
+
+import (
+ "database/sql"
+ "database/sql/driver"
+ "errors"
+ "fmt"
+ "io"
+ "time"
+ "unsafe"
+)
+
+func init() {
+ sql.Register("sqlite", &sqliteDriver{})
+}
+
+// ── Driver ────────────────────────────────────────────────────────────────────
+
+type sqliteDriver struct{}
+
+func (*sqliteDriver) Open(name string) (driver.Conn, error) {
+ cname := C.CString(name)
+ defer C.free(unsafe.Pointer(cname))
+
+ var db *C.sqlite3
+ flags := C.int(C.SQLITE_OPEN_READWRITE | C.SQLITE_OPEN_CREATE | C.SQLITE_OPEN_FULLMUTEX)
+ if rc := C.sqlite3_open_v2(cname, &db, flags, nil); rc != C.SQLITE_OK {
+ msg := C.GoString(C.sqlite3_errmsg(db))
+ C.sqlite3_close(db)
+ return nil, fmt.Errorf("sqlite open %s: %s", name, msg)
+ }
+ C.enable_wal(db)
+ return &conn{db: db}, nil
+}
+
+// ── Conn ─────────────────────────────────────────────────────────────────────
+
+type conn struct{ db *C.sqlite3 }
+
+func (c *conn) Close() error {
+ C.sqlite3_close(c.db)
+ return nil
+}
+
+func (c *conn) Begin() (driver.Tx, error) {
+ if err := c.execRaw("BEGIN"); err != nil {
+ return nil, err
+ }
+ return &tx{c}, nil
+}
+
+// Exec implements driver.Execer so multi-statement DDL (no args) works.
+// database/sql calls this when args is empty before falling back to Prepare.
+func (c *conn) Exec(query string, args []driver.Value) (driver.Result, error) {
+ if len(args) == 0 {
+ cq := C.CString(query)
+ defer C.free(unsafe.Pointer(cq))
+ var cerr *C.char
+ if rc := C.sqlite3_exec(c.db, cq, nil, nil, &cerr); rc != C.SQLITE_OK {
+ msg := C.GoString(cerr)
+ C.sqlite3_free(unsafe.Pointer(cerr))
+ return nil, errors.New(msg)
+ }
+ return &result{
+ lastID: int64(C.sqlite3_last_insert_rowid(c.db)),
+ affected: int64(C.sqlite3_changes(c.db)),
+ }, nil
+ }
+ st, err := c.Prepare(query)
+ if err != nil {
+ return nil, err
+ }
+ defer st.Close()
+ return st.Exec(args)
+}
+
+func (c *conn) Prepare(query string) (driver.Stmt, error) {
+ cq := C.CString(query)
+ defer C.free(unsafe.Pointer(cq))
+ var s *C.sqlite3_stmt
+ if rc := C.sqlite3_prepare_v2(c.db, cq, -1, &s, nil); rc != C.SQLITE_OK {
+ return nil, fmt.Errorf("prepare: %s", C.GoString(C.sqlite3_errmsg(c.db)))
+ }
+ return &stmt{c: c, s: s}, nil
+}
+
+func (c *conn) execRaw(q string) error {
+ cq := C.CString(q)
+ defer C.free(unsafe.Pointer(cq))
+ var cerr *C.char
+ if rc := C.sqlite3_exec(c.db, cq, nil, nil, &cerr); rc != C.SQLITE_OK {
+ msg := C.GoString(cerr)
+ C.sqlite3_free(unsafe.Pointer(cerr))
+ return errors.New(msg)
+ }
+ return nil
+}
+
+// ── Tx ───────────────────────────────────────────────────────────────────────
+
+type tx struct{ c *conn }
+
+func (t *tx) Commit() error { return t.c.execRaw("COMMIT") }
+func (t *tx) Rollback() error { return t.c.execRaw("ROLLBACK") }
+
+// ── Stmt ─────────────────────────────────────────────────────────────────────
+
+type stmt struct {
+ c *conn
+ s *C.sqlite3_stmt
+}
+
+func (st *stmt) Close() error {
+ C.sqlite3_finalize(st.s)
+ return nil
+}
+
+func (st *stmt) NumInput() int { return int(C.sqlite3_bind_parameter_count(st.s)) }
+
+func (st *stmt) Exec(args []driver.Value) (driver.Result, error) {
+ C.sqlite3_reset(st.s)
+ if err := st.bind(args); err != nil {
+ return nil, err
+ }
+ rc := C.sqlite3_step(st.s)
+ if rc != C.SQLITE_DONE && rc != C.SQLITE_ROW {
+ return nil, fmt.Errorf("exec: %s", C.GoString(C.sqlite3_errmsg(st.c.db)))
+ }
+ return &result{
+ lastID: int64(C.sqlite3_last_insert_rowid(st.c.db)),
+ affected: int64(C.sqlite3_changes(st.c.db)),
+ }, nil
+}
+
+func (st *stmt) Query(args []driver.Value) (driver.Rows, error) {
+ C.sqlite3_reset(st.s)
+ if err := st.bind(args); err != nil {
+ return nil, err
+ }
+ ncols := int(C.sqlite3_column_count(st.s))
+ cols := make([]string, ncols)
+ for i := range cols {
+ cols[i] = C.GoString(C.sqlite3_column_name(st.s, C.int(i)))
+ }
+ return &rows{st: st, cols: cols}, nil
+}
+
+func (st *stmt) bind(args []driver.Value) error {
+ for i, arg := range args {
+ n := C.int(i + 1)
+ var rc C.int
+ switch v := arg.(type) {
+ case nil:
+ rc = C.sqlite3_bind_null(st.s, n)
+ case int64:
+ rc = C.sqlite3_bind_int64(st.s, n, C.sqlite3_int64(v))
+ case float64:
+ rc = C.sqlite3_bind_double(st.s, n, C.double(v))
+ case bool:
+ b := C.int(0)
+ if v {
+ b = 1
+ }
+ rc = C.sqlite3_bind_int(st.s, n, b)
+ case string:
+ cs := C.CString(v)
+ rc = C.bind_text(st.s, n, cs)
+ C.free(unsafe.Pointer(cs))
+ case []byte:
+ if len(v) == 0 {
+ rc = C.sqlite3_bind_null(st.s, n)
+ } else {
+ rc = C.sqlite3_bind_blob(st.s, n,
+ unsafe.Pointer(&v[0]), C.int(len(v)), C.SQLITE_TRANSIENT)
+ }
+ case time.Time:
+ s := v.UTC().Format(time.RFC3339)
+ cs := C.CString(s)
+ rc = C.bind_text(st.s, n, cs)
+ C.free(unsafe.Pointer(cs))
+ default:
+ return fmt.Errorf("unsupported bind type %T at index %d", arg, i)
+ }
+ if rc != C.SQLITE_OK {
+ return fmt.Errorf("bind[%d]: %s", i, C.GoString(C.sqlite3_errmsg(st.c.db)))
+ }
+ }
+ return nil
+}
+
+// ── Rows ─────────────────────────────────────────────────────────────────────
+
+type rows struct {
+ st *stmt
+ cols []string
+}
+
+func (r *rows) Columns() []string { return r.cols }
+
+func (r *rows) Close() error {
+ C.sqlite3_reset(r.st.s)
+ return nil
+}
+
+func (r *rows) Next(dest []driver.Value) error {
+ rc := C.sqlite3_step(r.st.s)
+ if rc == C.SQLITE_DONE {
+ return io.EOF
+ }
+ if rc != C.SQLITE_ROW {
+ return fmt.Errorf("next: %s", C.GoString(C.sqlite3_errmsg(r.st.c.db)))
+ }
+ for i := range dest {
+ switch C.sqlite3_column_type(r.st.s, C.int(i)) {
+ case C.SQLITE_INTEGER:
+ dest[i] = int64(C.sqlite3_column_int64(r.st.s, C.int(i)))
+ case C.SQLITE_FLOAT:
+ dest[i] = float64(C.sqlite3_column_double(r.st.s, C.int(i)))
+ case C.SQLITE_TEXT:
+ dest[i] = C.GoString((*C.char)(unsafe.Pointer(
+ C.sqlite3_column_text(r.st.s, C.int(i)))))
+ case C.SQLITE_BLOB:
+ sz := int(C.sqlite3_column_bytes(r.st.s, C.int(i)))
+ b := make([]byte, sz)
+ if sz > 0 {
+ ptr := C.sqlite3_column_blob(r.st.s, C.int(i))
+ copy(b, (*[1 << 28]byte)(ptr)[:sz:sz])
+ }
+ dest[i] = b
+ default: // SQLITE_NULL
+ dest[i] = nil
+ }
+ }
+ return nil
+}
+
+// ── Result ────────────────────────────────────────────────────────────────────
+
+type result struct {
+ lastID int64
+ affected int64
+}
+
+func (r *result) LastInsertId() (int64, error) { return r.lastID, nil }
+func (r *result) RowsAffected() (int64, error) { return r.affected, nil }