From 7497e759be92c17941543f6ae2c389bd2178dc38 Mon Sep 17 00:00:00 2001 From: ssavutu Date: Thu, 12 Mar 2026 19:37:45 -0400 Subject: [PATCH 01/10] add types --- server/internal/models/types.go | 190 ++++++++++++++++++++++++++++++++ 1 file changed, 190 insertions(+) create mode 100644 server/internal/models/types.go diff --git a/server/internal/models/types.go b/server/internal/models/types.go new file mode 100644 index 0000000..4cbd694 --- /dev/null +++ b/server/internal/models/types.go @@ -0,0 +1,190 @@ +package models + +import "time" + +type SortDirection string + +const ( + SortDirectionAscending SortDirection = "asc" + SortDirectionDescending SortDirection = "desc" +) + +type AuthorSortBy string + +const ( + AuthorSortByDisplayName AuthorSortBy = "display_name" + AuthorSortByCreatedAt AuthorSortBy = "created_at" + AuthorSortByUpdatedAt AuthorSortBy = "updated_at" +) + +type ArticleSortBy string + +const ( + ArticleSortByTitle ArticleSortBy = "title" + ArticleSortBySlug ArticleSortBy = "slug" + ArticleSortByCreatedAt ArticleSortBy = "created_at" + ArticleSortByPublishedAt ArticleSortBy = "published_at" + ArticleSortByStatus ArticleSortBy = "status" +) + +type MediaSortBy string + +const ( + MediaSortByFileName MediaSortBy = "file_name" + MediaSortByCreatedAt MediaSortBy = "created_at" + MediaSortByUpdatedAt MediaSortBy = "updated_at" + MediaSortBySizeBytes MediaSortBy = "size_bytes" +) + +type ArticleStatus string + +const ( + ArticleStatusDraft ArticleStatus = "draft" + ArticleStatusPublished ArticleStatus = "published" +) + +type Author struct { + ID int64 `json:"id"` + DisplayName string `json:"display_name"` + FirstName string `json:"first_name,omitempty"` + LastName string `json:"last_name,omitempty"` + Email string `json:"email,omitempty"` + CreatedAt *time.Time `json:"created_at,omitempty"` + UpdatedAt *time.Time `json:"updated_at,omitempty"` +} + +type AuthorOverview struct { + ID int64 `json:"id"` + DisplayName string `json:"display_name"` + FirstName string `json:"first_name,omitempty"` + LastName string `json:"last_name,omitempty"` +} + +type AuthorInput struct { + DisplayName string `json:"display_name"` + FirstName string `json:"first_name,omitempty"` + LastName string `json:"last_name,omitempty"` + Email string `json:"email,omitempty"` +} + +type AuthorPatch struct { + DisplayName *string `json:"display_name,omitempty"` + FirstName *string `json:"first_name,omitempty"` + LastName *string `json:"last_name,omitempty"` + Email *string `json:"email,omitempty"` +} + +type AuthorListParams struct { + Limit int + Offset int + SortBy AuthorSortBy + SortDirection SortDirection + ArticleID *int64 +} + +type Article struct { + Title string `json:"title"` + ID int64 `json:"id"` + Authors []AuthorOverview `json:"authors"` + Content string `json:"content"` + Categories []string `json:"categories"` + Excerpt string `json:"excerpt"` + Slug string `json:"slug"` + PhotoURL string `json:"photo_url"` + IsFeatured bool `json:"is_featured"` + Status ArticleStatus `json:"status"` + CreatedAt *time.Time `json:"created_at,omitempty"` + PublishedAt *time.Time `json:"published_at,omitempty"` +} + +type ArticleOverview struct { + Title string `json:"title"` + ID int64 `json:"id"` + Authors []AuthorOverview `json:"authors"` + PublishedAt *time.Time `json:"published_at,omitempty"` + Categories []string `json:"categories"` + Excerpt string `json:"excerpt"` + Slug string `json:"slug"` + Status ArticleStatus `json:"status"` + PhotoURL string `json:"photo_url"` + IsFeatured bool `json:"is_featured"` +} + +type ArticleInput struct { + Title string `json:"title"` + Authors []int64 `json:"authors"` + Content string `json:"content"` + Categories []string `json:"categories"` + PhotoURL string `json:"photo_url"` + IsFeatured bool `json:"is_featured"` + Status ArticleStatus `json:"status"` +} + +type ArticlePatch struct { + Title *string `json:"title,omitempty"` + Authors *[]int64 `json:"authors,omitempty"` + Content *string `json:"content,omitempty"` + Categories *[]string `json:"categories,omitempty"` + Excerpt *string `json:"excerpt,omitempty"` + PhotoURL *string `json:"photo_url,omitempty"` + IsFeatured *bool `json:"is_featured,omitempty"` + Status *ArticleStatus `json:"status,omitempty"` +} + +type ArticleListParams struct { + Limit int + Offset int + Categories []string + SortBy ArticleSortBy + SortDirection SortDirection + AuthorID *int64 + Status ArticleStatus +} + +type Media struct { + ID int64 `json:"id"` + FileName string `json:"file_name"` + URL string `json:"url"` + MimeType string `json:"mime_type"` + SizeBytes int64 `json:"size_bytes"` + Width *int `json:"width,omitempty"` + Height *int `json:"height,omitempty"` + AltText string `json:"alt_text,omitempty"` + Caption string `json:"caption,omitempty"` + CreatedAt *time.Time `json:"created_at,omitempty"` + UpdatedAt *time.Time `json:"updated_at,omitempty"` +} + +type MediaOverview struct { + ID int64 `json:"id"` + FileName string `json:"file_name"` + URL string `json:"url"` + MimeType string `json:"mime_type"` + SizeBytes int64 `json:"size_bytes"` + Width *int `json:"width,omitempty"` + Height *int `json:"height,omitempty"` + AltText string `json:"alt_text,omitempty"` +} + +type MediaInput struct { + FileName string `json:"file_name"` + URL string `json:"url"` + MimeType string `json:"mime_type"` + AltText string `json:"alt_text,omitempty"` + Caption string `json:"caption,omitempty"` +} + +type MediaPatch struct { + FileName *string `json:"file_name,omitempty"` + AltText *string `json:"alt_text,omitempty"` + Caption *string `json:"caption,omitempty"` +} + +type MediaListParams struct { + Limit int + Offset int + Query string + MimeType string + SortBy MediaSortBy + SortDirection SortDirection +} From e7ff1f56919edb0036949db53366a77d76f69b98 Mon Sep 17 00:00:00 2001 From: ssavutu Date: Thu, 19 Mar 2026 03:10:33 -0400 Subject: [PATCH 02/10] Add Loki logging capability, http models, routes, setup scripts --- docker-compose.yml | 82 +++++ .../grafana/provisioning/datasources/loki.yml | 9 + observability/loki-config.yml | 44 +++ observability/promtail-config.yml | 27 ++ scripts/generate_wordpress_sql.sh | 78 +++++ scripts/setup-containers.sh | 42 +++ server/Dockerfile | 17 + server/internal/database/crud.go | 69 +++++ server/internal/database/http_models.go | 197 ++++++++++++ server/internal/handlers/handlers.go | 290 ++++++++---------- server/internal/routes/routes.go | 35 ++- server/internal/routes/routes_test.go | 2 +- server/main.go | 76 +++-- server/main_test.go | 10 +- 14 files changed, 786 insertions(+), 192 deletions(-) create mode 100644 docker-compose.yml create mode 100644 observability/grafana/provisioning/datasources/loki.yml create mode 100644 observability/loki-config.yml create mode 100644 observability/promtail-config.yml create mode 100755 scripts/generate_wordpress_sql.sh create mode 100755 scripts/setup-containers.sh create mode 100644 server/Dockerfile create mode 100644 server/internal/database/crud.go create mode 100644 server/internal/database/http_models.go diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..e1a13b3 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,82 @@ +services: + mariadb: + image: mariadb:11.7 + restart: unless-stopped + environment: + MARIADB_ROOT_PASSWORD: ${MARIADB_ROOT_PASSWORD:-rootpassword} + MARIADB_DATABASE: ${MARIADB_DATABASE:-triangle} + MARIADB_USER: ${MARIADB_USER:-triangle_user} + MARIADB_PASSWORD: ${MARIADB_PASSWORD:-triangle_password} + volumes: + - mariadb_data:/var/lib/mysql + - ./server/internal/database/wordpress_etl/01-authors.sql:/docker-entrypoint-initdb.d/01-authors.sql:ro,z + - ./server/internal/database/wordpress_etl/02-articles.sql:/docker-entrypoint-initdb.d/02-articles.sql:ro,z + - ./server/internal/database/wordpress_etl/03-articles-authors.sql:/docker-entrypoint-initdb.d/03-articles-authors.sql:ro,z + - ./server/internal/database/wordpress_etl/04-seo.sql:/docker-entrypoint-initdb.d/04-seo.sql:ro,z + healthcheck: + test: ["CMD", "healthcheck.sh", "--connect", "--innodb_initialized"] + interval: 10s + timeout: 5s + retries: 10 + start_period: 20s + + cms: + build: + context: ./server + dockerfile: Dockerfile + restart: unless-stopped + environment: + DB_NAME: ${MARIADB_DATABASE:-triangle} + DB_USER: ${MARIADB_USER:-triangle_user} + DB_PASSWORD: ${MARIADB_PASSWORD:-triangle_password} + DB_HOST: mariadb + DB_PORT: 3306 + TLS_CERT_FILE: /app/certs/localhost.crt + TLS_KEY_FILE: /app/certs/localhost.key + depends_on: + mariadb: + condition: service_healthy + ports: + - "8080:8080" + + loki: + image: grafana/loki:3.5.6 + restart: unless-stopped + command: ["-config.file=/etc/loki/config.yaml"] + volumes: + - ./observability/loki-config.yml:/etc/loki/config.yaml:ro,z + - loki_data:/loki + ports: + - "3100:3100" + + promtail: + image: grafana/promtail:3.5.6 + restart: unless-stopped + user: "0:0" + command: ["-config.file=/etc/promtail/config.yml"] + depends_on: + - loki + volumes: + - ./observability/promtail-config.yml:/etc/promtail/config.yml:ro,z + - promtail_positions:/tmp + - /var/lib/docker/containers:/var/lib/docker/containers:ro,z + + grafana: + image: grafana/grafana:12.2.0 + restart: unless-stopped + depends_on: + - loki + environment: + GF_SECURITY_ADMIN_USER: ${GRAFANA_ADMIN_USER:-admin} + GF_SECURITY_ADMIN_PASSWORD: ${GRAFANA_ADMIN_PASSWORD:-admin} + ports: + - "3000:3000" + volumes: + - grafana_data:/var/lib/grafana + - ./observability/grafana/provisioning/datasources/loki.yml:/etc/grafana/provisioning/datasources/loki.yml:ro,z + +volumes: + mariadb_data: + loki_data: + promtail_positions: + grafana_data: diff --git a/observability/grafana/provisioning/datasources/loki.yml b/observability/grafana/provisioning/datasources/loki.yml new file mode 100644 index 0000000..050b3c4 --- /dev/null +++ b/observability/grafana/provisioning/datasources/loki.yml @@ -0,0 +1,9 @@ +apiVersion: 1 + +datasources: + - name: Loki + type: loki + access: proxy + url: http://loki:3100 + isDefault: true + editable: false diff --git a/observability/loki-config.yml b/observability/loki-config.yml new file mode 100644 index 0000000..69c73ea --- /dev/null +++ b/observability/loki-config.yml @@ -0,0 +1,44 @@ +auth_enabled: false + +server: + http_listen_port: 3100 + grpc_listen_port: 9096 + +common: + instance_addr: 127.0.0.1 + path_prefix: /loki + storage: + filesystem: + chunks_directory: /loki/chunks + rules_directory: /loki/rules + replication_factor: 1 + ring: + kvstore: + store: inmemory + +schema_config: + configs: + - from: 2024-01-01 + store: tsdb + object_store: filesystem + schema: v13 + index: + prefix: index_ + period: 24h + +ruler: + alertmanager_url: http://localhost:9093 + +limits_config: + allow_structured_metadata: true + volume_enabled: true + +pattern_ingester: + enabled: true + +query_range: + results_cache: + cache: + embedded_cache: + enabled: true + max_size_mb: 100 diff --git a/observability/promtail-config.yml b/observability/promtail-config.yml new file mode 100644 index 0000000..81ab3ec --- /dev/null +++ b/observability/promtail-config.yml @@ -0,0 +1,27 @@ +server: + http_listen_port: 9080 + grpc_listen_port: 0 + +positions: + filename: /tmp/positions.yaml + +clients: + - url: http://loki:3100/loki/api/v1/push + +scrape_configs: + - job_name: docker + static_configs: + - targets: [localhost] + labels: + job: docker + __path__: /var/lib/docker/containers/*/*-json.log + pipeline_stages: + - docker: {} + - regex: + expression: ".*(?:service=|\\\"service\\\":\\\")(?P[a-zA-Z0-9_-]+).*" + - labels: + service: + relabel_configs: + - source_labels: [__path__] + regex: /var/lib/docker/containers/([a-f0-9]{64})/.*-json.log + target_label: container diff --git a/scripts/generate_wordpress_sql.sh b/scripts/generate_wordpress_sql.sh new file mode 100755 index 0000000..b90f7d1 --- /dev/null +++ b/scripts/generate_wordpress_sql.sh @@ -0,0 +1,78 @@ +#!/usr/bin/env bash +set -euo pipefail + +SRC_DIR="${1:-/home/sachin/Documents/Coding/wordpress-etl/logs/sql}" +OUT_DIR="${2:-server/internal/database/wordpress_etl}" + +mkdir -p "$OUT_DIR" + +# Authors: source CREATE statement from ETL intent, but skip malformed INSERT rows. +{ + cat <<'SQL' +DROP TABLE IF EXISTS authors; +CREATE TABLE authors ( + id BIGINT PRIMARY KEY, + display_name VARCHAR(255) NOT NULL, + first_name VARCHAR(255), + last_name VARCHAR(255), + email VARCHAR(255), + login VARCHAR(255) +); +SQL +} > "$OUT_DIR/01-authors.sql" + +# Articles: normalize column names expected by the CMS handlers. +{ + cat <<'SQL' +DROP TABLE IF EXISTS articles; +CREATE TABLE articles ( + id BIGINT PRIMARY KEY, + author_ids LONGTEXT, + authors LONGTEXT, + breaking_news BOOL, + comment_status VARCHAR(255), + description LONGTEXT, + featured_img_id BIGINT, + priority BOOL, + mod_date DATETIME, + photo_url LONGTEXT, + pub_date DATETIME, + tags LONGTEXT, + categories LONGTEXT, + metadata LONGTEXT, + `text` LONGTEXT, + title LONGTEXT +); +SQL + + grep '^INSERT INTO articles ' "$SRC_DIR/articles.sql" \ + | perl -pe 's/`authorIDs`/`author_ids`/g; s/`breakingNews`/`breaking_news`/g; s/`commentStatus`/`comment_status`/g; s/`featuredImgID`/`featured_img_id`/g; s/`modDate`/`mod_date`/g; s/`photoURL`/`photo_url`/g; s/`pubDate`/`pub_date`/g; s/'\''0000-00-00 00:00:00'\''/NULL/g' +} > "$OUT_DIR/02-articles.sql" + +# Article-author joins: fix the broken CREATE TABLE statement name. +{ + cat <<'SQL' +DROP TABLE IF EXISTS articles_authors; +CREATE TABLE articles_authors ( + id BIGINT PRIMARY KEY, + author_id BIGINT NOT NULL, + articles_id BIGINT NOT NULL +); +SQL + + grep '^INSERT INTO articles_authors ' "$SRC_DIR/articles_authors.sql" +} > "$OUT_DIR/03-articles-authors.sql" + +# SEO: source CREATE statement from ETL intent, but skip malformed INSERT rows. +{ + cat <<'SQL' +DROP TABLE IF EXISTS seo; +CREATE TABLE seo ( + id BIGINT PRIMARY KEY, + article_id BIGINT NOT NULL, + yoast_tag_data LONGTEXT +); +SQL +} > "$OUT_DIR/04-seo.sql" + +echo "Generated SQL in: $OUT_DIR" diff --git a/scripts/setup-containers.sh b/scripts/setup-containers.sh new file mode 100755 index 0000000..92b9248 --- /dev/null +++ b/scripts/setup-containers.sh @@ -0,0 +1,42 @@ +#!/usr/bin/env bash +set -euo pipefail + +ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)" +cd "$ROOT_DIR" + +RESET_DATA=0 +if [[ "${1:-}" == "--reset-data" ]]; then + RESET_DATA=1 +fi + +if command -v docker >/dev/null 2>&1; then + : +else + echo "docker is not installed or not on PATH" >&2 + exit 1 +fi + +if docker compose version >/dev/null 2>&1; then + COMPOSE_CMD=(docker compose) +elif command -v docker-compose >/dev/null 2>&1; then + COMPOSE_CMD=(docker-compose) +else + echo "docker compose plugin (or docker-compose) is required" >&2 + exit 1 +fi + +if [[ "$RESET_DATA" -eq 1 ]]; then + echo "Resetting compose services and volumes..." + "${COMPOSE_CMD[@]}" down -v --remove-orphans +fi + +echo "Starting mariadb, cms, loki, and promtail..." +"${COMPOSE_CMD[@]}" up -d --build --remove-orphans + +echo +echo "Stack is up. Useful commands:" +echo " ${COMPOSE_CMD[*]} ps" +echo " ${COMPOSE_CMD[*]} logs -f cms" +echo " ${COMPOSE_CMD[*]} logs -f promtail" +echo " ${COMPOSE_CMD[*]} down" +echo " ${COMPOSE_CMD[*]} down -v # remove volumes (DB/Loki data)" diff --git a/server/Dockerfile b/server/Dockerfile new file mode 100644 index 0000000..2a26073 --- /dev/null +++ b/server/Dockerfile @@ -0,0 +1,17 @@ +FROM golang:1.24-alpine AS builder +WORKDIR /app + +COPY go.mod go.sum ./ +RUN go mod download + +COPY . . +RUN CGO_ENABLED=0 GOOS=linux GOARCH=amd64 go build -o /bin/cms ./main.go + +FROM alpine:3.21 +WORKDIR /app + +COPY --from=builder /bin/cms /app/cms +COPY certs /app/certs + +EXPOSE 8080 +ENTRYPOINT ["/app/cms"] diff --git a/server/internal/database/crud.go b/server/internal/database/crud.go new file mode 100644 index 0000000..dd3d7d0 --- /dev/null +++ b/server/internal/database/crud.go @@ -0,0 +1,69 @@ +package database + +import ( + "context" + "database/sql" + "fmt" + "strings" +) + +func Select(ctx context.Context, conn *sql.DB, table string, cols []string, where string, args ...any) (*sql.Rows, error) { + if len(cols) == 0 { + return nil, fmt.Errorf("select requires at least one column") + } + + query := "SELECT " + strings.Join(cols, ", ") + " FROM `" + table + "`" + if strings.TrimSpace(where) != "" { + query += " WHERE " + where + } + + return conn.QueryContext(ctx, query, args...) +} + +func Insert(ctx context.Context, conn *sql.DB, table string, cols []string, values ...any) (sql.Result, error) { + if len(cols) == 0 { + return nil, fmt.Errorf("insert requires at least one column") + } + if len(cols) != len(values) { + return nil, fmt.Errorf("insert column/value count mismatch") + } + + quotedCols := make([]string, len(cols)) + placeholders := make([]string, len(cols)) + for i, col := range cols { + quotedCols[i] = "`" + col + "`" + placeholders[i] = "?" + } + + query := "INSERT INTO `" + table + "` (" + strings.Join(quotedCols, ", ") + ") VALUES (" + strings.Join(placeholders, ", ") + ")" + return conn.ExecContext(ctx, query, values...) +} + +func Update(ctx context.Context, conn *sql.DB, table string, cols []string, where string, args ...any) (sql.Result, error) { + if len(cols) == 0 { + return nil, fmt.Errorf("update requires at least one column") + } + if strings.TrimSpace(where) == "" { + return nil, fmt.Errorf("update requires a WHERE clause") + } + if len(args) < len(cols) { + return nil, fmt.Errorf("update requires values for all columns") + } + + sets := make([]string, len(cols)) + for i, col := range cols { + sets[i] = "`" + col + "` = ?" + } + + query := "UPDATE `" + table + "` SET " + strings.Join(sets, ", ") + " WHERE " + where + return conn.ExecContext(ctx, query, args...) +} + +func Delete(ctx context.Context, conn *sql.DB, table string, where string, args ...any) (sql.Result, error) { + if strings.TrimSpace(where) == "" { + return nil, fmt.Errorf("delete requires a WHERE clause") + } + + query := "DELETE FROM `" + table + "` WHERE " + where + return conn.ExecContext(ctx, query, args...) +} diff --git a/server/internal/database/http_models.go b/server/internal/database/http_models.go new file mode 100644 index 0000000..470e675 --- /dev/null +++ b/server/internal/database/http_models.go @@ -0,0 +1,197 @@ +package database + +import ( + "database/sql" + "encoding/json" + "strconv" + "strings" + "time" + + "server/internal/models" +) + +var AuthorSortByColumn = map[string]string{ + string(models.AuthorSortByDisplayName): "display_name", + string(models.AuthorSortByCreatedAt): "created_at", + string(models.AuthorSortByUpdatedAt): "updated_at", +} + +var ArticleSortByColumn = map[string]string{ + string(models.ArticleSortByTitle): "title", + string(models.ArticleSortBySlug): "slug", + string(models.ArticleSortByCreatedAt): "created_at", + string(models.ArticleSortByPublishedAt): "pub_date", + string(models.ArticleSortByStatus): "comment_status", +} + +var AuthorColumns = []string{"id", "display_name", "first_name", "last_name", "email"} + +var ArticleColumns = []string{ + "id", "title", "description", "text", "tags", + "pub_date", "mod_date", "priority", "breaking_news", + "comment_status", "photo_url", +} + +func BuildOrderLimit(query, sortBy, sortDir string, sortColumnMap map[string]string, limit, offset int) string { + if col, ok := sortColumnMap[sortBy]; ok && sortBy != "" { + dir := "ASC" + if strings.EqualFold(sortDir, string(models.SortDirectionDescending)) { + dir = "DESC" + } + query += " ORDER BY `" + col + "` " + dir + } + if limit > 0 { + query += " LIMIT " + strconv.Itoa(limit) + } + if offset > 0 { + query += " OFFSET " + strconv.Itoa(offset) + } + return query +} + +func ScanAuthor(rows *sql.Rows) (models.Author, error) { + var a models.Author + var firstName sql.NullString + var lastName sql.NullString + var email sql.NullString + err := rows.Scan(&a.ID, &a.DisplayName, &firstName, &lastName, &email) + if err != nil { + return models.Author{}, err + } + if firstName.Valid { + a.FirstName = firstName.String + } + if lastName.Valid { + a.LastName = lastName.String + } + if email.Valid { + a.Email = email.String + } + return a, nil +} + +func ScanArticle(rows *sql.Rows) (models.Article, error) { + var ( + a models.Article + description sql.NullString + text sql.NullString + tags sql.NullString + pubDate sql.NullTime + priority sql.NullBool + commentStatus sql.NullString + photoURL sql.NullString + ignoredMod sql.NullTime + ignoredBreak sql.NullBool + ) + err := rows.Scan( + &a.ID, &a.Title, &description, &text, &tags, + &pubDate, &ignoredMod, &priority, &ignoredBreak, + &commentStatus, &photoURL, + ) + if err != nil { + return models.Article{}, err + } + if text.Valid { + a.Content = text.String + } + if description.Valid { + a.Excerpt = description.String + } + if tags.Valid && strings.TrimSpace(tags.String) != "" { + if err := json.Unmarshal([]byte(tags.String), &a.Categories); err != nil { + a.Categories = strings.Split(tags.String, ",") + } + } + if pubDate.Valid { + t := pubDate.Time + a.PublishedAt = &t + } + if priority.Valid { + a.IsFeatured = priority.Bool + } + if commentStatus.Valid { + a.Status = models.ArticleStatus(commentStatus.String) + } + if photoURL.Valid { + a.PhotoURL = photoURL.String + } + return a, nil +} + +func CollectArticles(rows *sql.Rows) ([]models.Article, error) { + var articles []models.Article + for rows.Next() { + a, err := ScanArticle(rows) + if err != nil { + return nil, err + } + articles = append(articles, a) + } + return articles, rows.Err() +} + +func ParsePublishedAt(value string) *time.Time { + if strings.TrimSpace(value) == "" { + return nil + } + layouts := []string{ + time.RFC3339, + "2006-01-02 15:04:05", + "2006-01-02", + } + for _, layout := range layouts { + if t, err := time.Parse(layout, value); err == nil { + return &t + } + } + return nil +} + +func FormatTags(categories []string) string { + if len(categories) == 0 { + return "" + } + buf, err := json.Marshal(categories) + if err != nil { + return strings.Join(categories, ",") + } + return string(buf) +} + +func statusToCommentStatus(status models.ArticleStatus) string { + return strings.TrimSpace(string(status)) +} + +func ArticleInputToDBFields(body models.ArticleInput) []any { + return []any{ + body.Title, + nil, + body.Content, + FormatTags(body.Categories), + nil, + nil, + body.IsFeatured, + false, + statusToCommentStatus(body.Status), + body.PhotoURL, + } +} + +func ArticleToDBFields(body models.Article) []any { + var publishedAt any + if body.PublishedAt != nil { + publishedAt = body.PublishedAt.UTC().Format("2006-01-02 15:04:05") + } + return []any{ + body.Title, + body.Excerpt, + body.Content, + FormatTags(body.Categories), + publishedAt, + nil, + body.IsFeatured, + false, + statusToCommentStatus(body.Status), + body.PhotoURL, + } +} diff --git a/server/internal/handlers/handlers.go b/server/internal/handlers/handlers.go index 560add9..62de35c 100644 --- a/server/internal/handlers/handlers.go +++ b/server/internal/handlers/handlers.go @@ -1,4 +1,4 @@ -package api +package handlers import ( "database/sql" @@ -7,41 +7,26 @@ import ( "strconv" "strings" - db "database" + db "server/internal/database" + "server/internal/models" ) -type Author struct { - ID int `json:"id"` - DisplayName string `json:"display_name"` - FirstName string `json:"first_name"` - LastName string `json:"last_name"` - Email string `json:"email"` - Login string `json:"login"` -} - -type Article struct { - ID int `json:"id"` - Title string `json:"title"` - Description string `json:"description"` - Text string `json:"text"` - Tags string `json:"tags"` - PubDate string `json:"pub_date"` - ModDate string `json:"mod_date"` - Priority bool `json:"priority"` - BreakingNews bool `json:"breaking_news"` - CommentStatus string `json:"comment_status"` - PhotoURL string `json:"photo_url"` -} +func Users(w http.ResponseWriter, _ *http.Request) { + w.Header().Set("Content-Type", "application/json") -var validAuthorSortBy = map[string]bool{ - "display_name": true, - "last_name": true, -} + resp := struct { + Status string `json:"status"` + Message string `json:"message"` + Code int `json:"code"` + }{ + Status: "OK", + Message: "Users endpoint hit", + Code: http.StatusOK, + } -var validArticleSortBy = map[string]bool{ - "pub_date": true, - "mod_date": true, - "title": true, + if err := json.NewEncoder(w).Encode(resp); err != nil { + http.Error(w, "failed to encode response", http.StatusInternalServerError) + } } func intParam(r *http.Request, key string, fallback int) int { @@ -66,49 +51,8 @@ func writeError(w http.ResponseWriter, status int, msg string) { writeJSON(w, status, map[string]string{"error": msg}) } -func buildOrderLimit(query, sortBy, sortDir string, validBy map[string]bool, limit, offset int) string { - if sortBy != "" && validBy[sortBy] { - dir := "ASC" - if sortDir == "desc" { - dir = "DESC" - } - query += " ORDER BY `" + sortBy + "` " + dir - } - if limit > 0 { - query += " LIMIT " + strconv.Itoa(limit) - } - if offset > 0 { - query += " OFFSET " + strconv.Itoa(offset) - } - return query -} - -var authorCols = []string{"id", "display_name", "first_name", "last_name", "email", "login"} - -func scanAuthor(rows *sql.Rows) (Author, error) { - var a Author - err := rows.Scan(&a.ID, &a.DisplayName, &a.FirstName, &a.LastName, &a.Email, &a.Login) - return a, err -} - -var articleCols = []string{ - "id", "title", "description", "text", "tags", - "pub_date", "mod_date", "priority", "breaking_news", - "comment_status", "photo_url", -} - -func scanArticle(rows *sql.Rows) (Article, error) { - var a Article - err := rows.Scan( - &a.ID, &a.Title, &a.Description, &a.Text, &a.Tags, - &a.PubDate, &a.ModDate, &a.Priority, &a.BreakingNews, - &a.CommentStatus, &a.PhotoURL, - ) - return a, err -} - // GET /v1/authors -func ListAuthors(conn *sql.DB) http.HandlerFunc { +func GetAuthors(conn *sql.DB) http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { q := r.URL.Query() limit := intParam(r, "limit", 20) @@ -123,11 +67,14 @@ func ListAuthors(conn *sql.DB) http.HandlerFunc { args = append(args, articleID) } - query := "SELECT `id`, `display_name`, `first_name`, `last_name`, `email`, `login` FROM `authors`" + query := "SELECT `id`, `display_name`, `first_name`, `last_name`, `email` FROM `authors`" if len(conditions) > 0 { query += " WHERE " + strings.Join(conditions, " AND ") } - query = buildOrderLimit(query, q.Get("sort_by"), q.Get("sort_direction"), validAuthorSortBy, limit, offset) + if q.Get("sort_by") == "" { + query += " ORDER BY `id` DESC" + } + query = db.BuildOrderLimit(query, q.Get("sort_by"), q.Get("sort_direction"), db.AuthorSortByColumn, limit, offset) rows, err := conn.QueryContext(r.Context(), query, args...) if err != nil { @@ -136,9 +83,9 @@ func ListAuthors(conn *sql.DB) http.HandlerFunc { } defer rows.Close() - var authors []Author + var authors []models.Author for rows.Next() { - a, err := scanAuthor(rows) + a, err := db.ScanAuthor(rows) if err != nil { writeError(w, http.StatusInternalServerError, err.Error()) return @@ -154,16 +101,16 @@ func ListAuthors(conn *sql.DB) http.HandlerFunc { } // POST /v1/authors -func CreateAuthor(conn *sql.DB) http.HandlerFunc { +func PostAuthors(conn *sql.DB) http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { - var body Author + var body models.AuthorInput if err := json.NewDecoder(r.Body).Decode(&body); err != nil { writeError(w, http.StatusBadRequest, "invalid JSON") return } _, err := db.Insert(r.Context(), conn, "authors", - []string{"display_name", "first_name", "last_name", "email", "login"}, - body.DisplayName, body.FirstName, body.LastName, body.Email, body.Login, + []string{"display_name", "first_name", "last_name", "email"}, + body.DisplayName, body.FirstName, body.LastName, body.Email, ) if err != nil { writeError(w, http.StatusInternalServerError, err.Error()) @@ -177,7 +124,7 @@ func CreateAuthor(conn *sql.DB) http.HandlerFunc { func GetAuthor(conn *sql.DB) http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { id := r.PathValue("id") - rows, err := db.Select(r.Context(), conn, "authors", authorCols, "`id` = ?", id) + rows, err := db.Select(r.Context(), conn, "authors", db.AuthorColumns, "`id` = ?", id) if err != nil { writeError(w, http.StatusInternalServerError, err.Error()) return @@ -187,7 +134,7 @@ func GetAuthor(conn *sql.DB) http.HandlerFunc { writeError(w, http.StatusNotFound, "author not found") return } - a, err := scanAuthor(rows) + a, err := db.ScanAuthor(rows) if err != nil { writeError(w, http.StatusInternalServerError, err.Error()) return @@ -197,18 +144,18 @@ func GetAuthor(conn *sql.DB) http.HandlerFunc { } // PUT /v1/authors/{id} -func ReplaceAuthor(conn *sql.DB) http.HandlerFunc { +func PutAuthor(conn *sql.DB) http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { id := r.PathValue("id") - var body Author + var body models.AuthorInput if err := json.NewDecoder(r.Body).Decode(&body); err != nil { writeError(w, http.StatusBadRequest, "invalid JSON") return } _, err := db.Update(r.Context(), conn, "authors", - []string{"display_name", "first_name", "last_name", "email", "login"}, + []string{"display_name", "first_name", "last_name", "email"}, "`id` = ?", - body.DisplayName, body.FirstName, body.LastName, body.Email, body.Login, id, + body.DisplayName, body.FirstName, body.LastName, body.Email, id, ) if err != nil { writeError(w, http.StatusInternalServerError, err.Error()) @@ -219,7 +166,7 @@ func ReplaceAuthor(conn *sql.DB) http.HandlerFunc { } // PATCH /v1/authors/{id} -func UpdateAuthor(conn *sql.DB) http.HandlerFunc { +func PatchAuthor(conn *sql.DB) http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { id := r.PathValue("id") var body map[string]any @@ -229,7 +176,7 @@ func UpdateAuthor(conn *sql.DB) http.HandlerFunc { } var setCols []string var setArgs []any - for _, col := range []string{"display_name", "first_name", "last_name", "email", "login"} { + for _, col := range []string{"display_name", "first_name", "last_name", "email"} { if v, ok := body[col]; ok { setCols = append(setCols, col) setArgs = append(setArgs, v) @@ -262,7 +209,7 @@ func DeleteAuthor(conn *sql.DB) http.HandlerFunc { } // GET /v1/authors/{id}/articles -func ListAuthorArticles(conn *sql.DB) http.HandlerFunc { +func GetAuthorArticles(conn *sql.DB) http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { id := r.PathValue("id") rows, err := queryArticles(r, conn, id) @@ -271,7 +218,7 @@ func ListAuthorArticles(conn *sql.DB) http.HandlerFunc { return } defer rows.Close() - articles, err := collectArticles(rows) + articles, err := db.CollectArticles(rows) if err != nil { writeError(w, http.StatusInternalServerError, err.Error()) return @@ -283,7 +230,7 @@ func ListAuthorArticles(conn *sql.DB) http.HandlerFunc { // ---- Article Handlers ------------------------------------------------------ // GET /v1/articles -func ListArticles(conn *sql.DB) http.HandlerFunc { +func GetArticles(conn *sql.DB) http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { authorID := r.URL.Query().Get("author_id") rows, err := queryArticles(r, conn, authorID) @@ -292,7 +239,7 @@ func ListArticles(conn *sql.DB) http.HandlerFunc { return } defer rows.Close() - articles, err := collectArticles(rows) + articles, err := db.CollectArticles(rows) if err != nil { writeError(w, http.StatusInternalServerError, err.Error()) return @@ -301,45 +248,33 @@ func ListArticles(conn *sql.DB) http.HandlerFunc { } } -// queryArticles is shared by ListArticles and ListAuthorArticles. +// queryArticles is shared by GetArticles and GetAuthorArticles. func queryArticles(r *http.Request, conn *sql.DB, authorID string) (*sql.Rows, error) { q := r.URL.Query() limit := intParam(r, "limit", 20) offset := intParam(r, "offset", 0) - var conditions []string - var args []any - - if authorID != "" { - conditions = append(conditions, "`id` IN (SELECT `articles_id` FROM `articles_authors` WHERE `author_id` = ?)") - args = append(args, authorID) - } - - query := "SELECT `id`, `title`, `description`, `text`, `tags`, `pub_date`, `mod_date`, `priority`, `breaking_news`, `comment_status`, `photo_url` FROM `articles`" - if len(conditions) > 0 { - query += " WHERE " + strings.Join(conditions, " AND ") - } - query = buildOrderLimit(query, q.Get("sort_by"), q.Get("sort_direction"), validArticleSortBy, limit, offset) + var conditions []string + var args []any - return conn.QueryContext(r.Context(), query, args...) -} + if authorID != "" { + conditions = append(conditions, "`id` IN (SELECT `articles_id` FROM `articles_authors` WHERE `author_id` = ?)") + args = append(args, authorID) + } -func collectArticles(rows *sql.Rows) ([]Article, error) { - var articles []Article - for rows.Next() { - a, err := scanArticle(rows) - if err != nil { - return nil, err - } - articles = append(articles, a) + query := "SELECT `id`, `title`, `description`, `text`, `tags`, `pub_date`, `mod_date`, `priority`, `breaking_news`, `comment_status`, `photo_url` FROM `articles`" + if len(conditions) > 0 { + query += " WHERE " + strings.Join(conditions, " AND ") } - return articles, rows.Err() + query = db.BuildOrderLimit(query, q.Get("sort_by"), q.Get("sort_direction"), db.ArticleSortByColumn, limit, offset) + + return conn.QueryContext(r.Context(), query, args...) } // GET /v1/articles/{id} func GetArticle(conn *sql.DB) http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { id := r.PathValue("id") - rows, err := db.Select(r.Context(), conn, "articles", articleCols, "`id` = ?", id) + rows, err := db.Select(r.Context(), conn, "articles", db.ArticleColumns, "`id` = ?", id) if err != nil { writeError(w, http.StatusInternalServerError, err.Error()) return @@ -349,7 +284,7 @@ func GetArticle(conn *sql.DB) http.HandlerFunc { writeError(w, http.StatusNotFound, "article not found") return } - a, err := scanArticle(rows) + a, err := db.ScanArticle(rows) if err != nil { writeError(w, http.StatusInternalServerError, err.Error()) return @@ -359,17 +294,18 @@ func GetArticle(conn *sql.DB) http.HandlerFunc { } // POST /v1/articles -func CreateArticle(conn *sql.DB) http.HandlerFunc { +func PostArticles(conn *sql.DB) http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { - var body Article + var body models.ArticleInput if err := json.NewDecoder(r.Body).Decode(&body); err != nil { writeError(w, http.StatusBadRequest, "invalid JSON") return } - _, err := db.Insert(r.Context(), conn, "articles", - []string{"title", "description", "text", "tags", "pub_date", "mod_date", "priority", "breaking_news", "comment_status", "photo_url"}, - body.Title, body.Description, body.Text, body.Tags, body.PubDate, body.ModDate, body.Priority, body.BreakingNews, body.CommentStatus, body.PhotoURL, - ) + fields := db.ArticleInputToDBFields(body) + _, err := db.Insert(r.Context(), conn, "articles", + []string{"title", "description", "text", "tags", "pub_date", "mod_date", "priority", "breaking_news", "comment_status", "photo_url"}, + fields..., + ) if err != nil { writeError(w, http.StatusInternalServerError, err.Error()) return @@ -379,19 +315,21 @@ func CreateArticle(conn *sql.DB) http.HandlerFunc { } // PUT /v1/articles/{id} -func ReplaceArticle(conn *sql.DB) http.HandlerFunc { +func PutArticle(conn *sql.DB) http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { id := r.PathValue("id") - var body Article + var body models.Article if err := json.NewDecoder(r.Body).Decode(&body); err != nil { writeError(w, http.StatusBadRequest, "invalid JSON") return } - _, err := db.Update(r.Context(), conn, "articles", - []string{"title", "description", "text", "tags", "pub_date", "mod_date", "priority", "breaking_news", "comment_status", "photo_url"}, - "`id` = ?", - body.Title, body.Description, body.Text, body.Tags, body.PubDate, body.ModDate, body.Priority, body.BreakingNews, body.CommentStatus, body.PhotoURL, id, - ) + fields := db.ArticleToDBFields(body) + fields = append(fields, id) + _, err := db.Update(r.Context(), conn, "articles", + []string{"title", "description", "text", "tags", "pub_date", "mod_date", "priority", "breaking_news", "comment_status", "photo_url"}, + "`id` = ?", + fields..., + ) if err != nil { writeError(w, http.StatusInternalServerError, err.Error()) return @@ -401,7 +339,7 @@ func ReplaceArticle(conn *sql.DB) http.HandlerFunc { } // PATCH /v1/articles/{id} -func UpdateArticle(conn *sql.DB) http.HandlerFunc { +func PatchArticle(conn *sql.DB) http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { id := r.PathValue("id") var body map[string]any @@ -411,9 +349,62 @@ func UpdateArticle(conn *sql.DB) http.HandlerFunc { } var setCols []string var setArgs []any - for _, col := range []string{"title", "description", "text", "tags", "pub_date", "mod_date", "priority", "breaking_news", "comment_status", "photo_url"} { - if v, ok := body[col]; ok { - setCols = append(setCols, col) + columnByJSONField := map[string]string{ + "title": "title", + "excerpt": "description", + "content": "text", + "categories": "tags", + "published_at": "pub_date", + "is_featured": "priority", + "status": "comment_status", + "photo_url": "photo_url", + } + for jsonField, column := range columnByJSONField { + v, ok := body[jsonField] + if !ok { + continue + } + switch jsonField { + case "categories": + arr, ok := v.([]any) + if !ok { + writeError(w, http.StatusBadRequest, "categories must be an array of strings") + return + } + categories := make([]string, 0, len(arr)) + for _, raw := range arr { + s, ok := raw.(string) + if !ok { + writeError(w, http.StatusBadRequest, "categories must be an array of strings") + return + } + categories = append(categories, s) + } + setCols = append(setCols, column) + setArgs = append(setArgs, db.FormatTags(categories)) + case "published_at": + s, ok := v.(string) + if !ok { + writeError(w, http.StatusBadRequest, "published_at must be an RFC3339 string") + return + } + t := db.ParsePublishedAt(s) + if t == nil { + writeError(w, http.StatusBadRequest, "published_at has invalid format") + return + } + setCols = append(setCols, column) + setArgs = append(setArgs, t.UTC().Format("2006-01-02 15:04:05")) + case "status": + s, ok := v.(string) + if !ok { + writeError(w, http.StatusBadRequest, "status must be a string") + return + } + setCols = append(setCols, column) + setArgs = append(setArgs, strings.TrimSpace(s)) + default: + setCols = append(setCols, column) setArgs = append(setArgs, v) } } @@ -442,26 +433,3 @@ func DeleteArticle(conn *sql.DB) http.HandlerFunc { w.WriteHeader(http.StatusNoContent) } } - -// ---- Router ---------------------------------------------------------------- - -func NewRouter(conn *sql.DB) http.Handler { - mux := http.NewServeMux() - - mux.HandleFunc("GET /v1/authors", ListAuthors(conn)) - mux.HandleFunc("POST /v1/authors", CreateAuthor(conn)) - mux.HandleFunc("GET /v1/authors/{id}", GetAuthor(conn)) - mux.HandleFunc("PUT /v1/authors/{id}", ReplaceAuthor(conn)) - mux.HandleFunc("PATCH /v1/authors/{id}", UpdateAuthor(conn)) - mux.HandleFunc("DELETE /v1/authors/{id}", DeleteAuthor(conn)) - mux.HandleFunc("GET /v1/authors/{id}/articles", ListAuthorArticles(conn)) - - mux.HandleFunc("GET /v1/articles", ListArticles(conn)) - mux.HandleFunc("GET /v1/articles/{id}", GetArticle(conn)) - mux.HandleFunc("POST /v1/articles", CreateArticle(conn)) - mux.HandleFunc("PUT /v1/articles/{id}", ReplaceArticle(conn)) - mux.HandleFunc("PATCH /v1/articles/{id}", UpdateArticle(conn)) - mux.HandleFunc("DELETE /v1/articles/{id}", DeleteArticle(conn)) - - return mux -} \ No newline at end of file diff --git a/server/internal/routes/routes.go b/server/internal/routes/routes.go index acfaf48..b6dc32c 100644 --- a/server/internal/routes/routes.go +++ b/server/internal/routes/routes.go @@ -2,9 +2,42 @@ package routes import ( "net/http" + "database/sql" "server/internal/handlers" ) -func Register(mux *http.ServeMux) { +func Register(mux *http.ServeMux, conn *sql.DB) { + // Authors mux.HandleFunc("GET /users", handlers.Users) + mux.HandleFunc("GET /v1/authors", handlers.GetAuthors(conn)) + mux.HandleFunc("POST /v1/authors", handlers.PostAuthors(conn)) + mux.HandleFunc("GET /v1/authors/{id}", handlers.GetAuthor(conn)) + mux.HandleFunc("PUT /v1/authors/{id}", handlers.PutAuthor(conn)) + mux.HandleFunc("PATCH /v1/authors/{id}", handlers.PatchAuthor(conn)) + mux.HandleFunc("DELETE /v1/authors/{id}", handlers.DeleteAuthor(conn)) + mux.HandleFunc("GET /v1/authors/{id}/articles", handlers.GetAuthorArticles(conn)) + + // Articles + mux.HandleFunc("GET /v1/articles", handlers.GetArticles(conn)) + mux.HandleFunc("GET /v1/articles/{id}", handlers.GetArticle(conn)) + mux.HandleFunc("POST /v1/articles", handlers.PostArticles(conn)) + mux.HandleFunc("PUT /v1/articles/{id}", handlers.PutArticle(conn)) + mux.HandleFunc("PATCH /v1/articles/{id}", handlers.PatchArticle(conn)) + mux.HandleFunc("DELETE /v1/articles/{id}", handlers.DeleteArticle(conn)) + + // Sections + mux.HandleFunc("GET /v1/sections/{section_slug}/articles", handlers.Users) + mux.HandleFunc("GET /v1/subsections/{subsection_slug}/articles", handlers.Users) + + // Media + mux.HandleFunc("GET /v1/media", handlers.Users) + mux.HandleFunc("POST /v1/media", handlers.Users) + mux.HandleFunc("GET /v1/media/{id}", handlers.Users) + mux.HandleFunc("PUT /v1/media/{id}", handlers.Users) + mux.HandleFunc("PATCH /v1/media/{id}", handlers.Users) + mux.HandleFunc("DELETE /v1/media/{id}", handlers.Users) + mux.HandleFunc("GET /v1/media/gallery", handlers.Users) + + // Homepage + mux.HandleFunc("GET /v1/homepage", handlers.Users) } diff --git a/server/internal/routes/routes_test.go b/server/internal/routes/routes_test.go index aef3848..b9231d9 100644 --- a/server/internal/routes/routes_test.go +++ b/server/internal/routes/routes_test.go @@ -8,7 +8,7 @@ import ( func TestRegister_UsersRoute(t *testing.T) { mux := http.NewServeMux() - Register(mux) + Register(mux, nil) tests := []struct { name string diff --git a/server/main.go b/server/main.go index ec00ebe..eb35113 100644 --- a/server/main.go +++ b/server/main.go @@ -3,19 +3,20 @@ package main import ( "context" "crypto/tls" + "database/sql" "errors" "fmt" "log/slog" "net/http" "os" "os/signal" - + "server/internal/database" "server/internal/middleware" "server/internal/routes" - "server/internal/database" - "syscall" "strconv" + "strings" + "syscall" "time" "github.com/joho/godotenv" @@ -54,38 +55,65 @@ type runDeps struct { } func main() { - godotenv.Load() - - dbName := os.Getenv("DB_NAME") - user := os.Getenv("DB_USER") - password := os.Getenv("DB_PASSWORD") - host := os.Getenv("DB_HOST") - portStr := os.Getenv("DB_PORT") - port, _ := strconv.Atoi(portStr) - - db, err := database.InitializeConnection(context.Background(), dbName, user, password, host, port) - if err != nil { - panic(err) - } + godotenv.Load(".env") + + logger := slog.New(slog.NewJSONHandler(os.Stdout, nil)).With("service", "cms") + slog.SetDefault(logger) + + dbName, user, password, host, port, err := dbConfigFromEnv() + if err != nil { + slog.Error("invalid database configuration", "error", err) + os.Exit(1) + } + + db, err := database.InitializeConnection(context.Background(), dbName, user, password, host, port) + if err != nil { + slog.Error("database initialization failed", "error", err, "host", host, "port", port, "db_name", dbName) + os.Exit(1) + } // Just for testing the database row := db.QueryRow("SELECT COUNT(*) FROM information_schema.tables WHERE table_schema = ?", dbName) var tableCount int if err := row.Scan(&tableCount); err != nil { - fmt.Println("Error querying table count:", err) + slog.Error("cms startup", "event", "startup", "stage", "db_table_count", "error", err) } else { - fmt.Println("Number of tables in database:", tableCount) + slog.Info("cms startup", "event", "startup", "stage", "db_table_count", "table_count", tableCount) } - logger := slog.New(slog.NewTextHandler(os.Stdout, nil)) - slog.SetDefault(logger) - - if err := run(defaultRunDeps()); err != nil { + if err := run(defaultRunDeps(), db); err != nil { slog.Error("server terminated", "error", err) os.Exit(1) } } +func dbConfigFromEnv() (dbName, user, password, host string, port int, err error) { + dbName = strings.TrimSpace(os.Getenv("DB_NAME")) + user = strings.TrimSpace(os.Getenv("DB_USER")) + password = os.Getenv("DB_PASSWORD") + host = strings.TrimSpace(os.Getenv("DB_HOST")) + portStr := strings.TrimSpace(os.Getenv("DB_PORT")) + + if dbName == "" { + return "", "", "", "", 0, fmt.Errorf("DB_NAME is required") + } + if user == "" { + return "", "", "", "", 0, fmt.Errorf("DB_USER is required") + } + if host == "" { + host = "127.0.0.1" + } + if portStr == "" { + port = 8080 + return dbName, user, password, host, port, nil + } + port, err = strconv.Atoi(portStr) + if err != nil || port <= 0 || port > 65535 { + return "", "", "", "", 0, fmt.Errorf("DB_PORT must be a valid TCP port, got %q", portStr) + } + return dbName, user, password, host, port, nil +} + func defaultRunDeps() runDeps { return runDeps{ loadX509KeyPair: tls.LoadX509KeyPair, @@ -109,7 +137,7 @@ func newDefaultServer(cert tls.Certificate, mux *http.ServeMux, logger *slog.Log } } -func run(deps runDeps) error { +func run(deps runDeps, conn *sql.DB) error { if deps.loadX509KeyPair == nil { deps.loadX509KeyPair = tls.LoadX509KeyPair } @@ -135,7 +163,7 @@ func run(deps runDeps) error { } mux := http.NewServeMux() - routes.Register(mux) + routes.Register(mux, conn) server := deps.newServer(cert, mux, slog.Default()) serverErr := make(chan error, 1) diff --git a/server/main_test.go b/server/main_test.go index be8ae35..d2f85b5 100644 --- a/server/main_test.go +++ b/server/main_test.go @@ -60,7 +60,7 @@ func TestRun_TLSLoadFailure(t *testing.T) { return &fakeServer{} }, signalCh: make(chan os.Signal, 1), - }) + }, nil) if err == nil { t.Fatal("expected an error, got nil") @@ -86,7 +86,7 @@ func TestRun_TLSLoadPathsFromEnv(t *testing.T) { return tls.Certificate{}, errors.New("bad certificate") }, signalCh: make(chan os.Signal, 1), - }) + }, nil) if err == nil { t.Fatal("expected an error, got nil") @@ -115,7 +115,7 @@ func TestRun_ServerExitError(t *testing.T) { return srv }, signalCh: make(chan os.Signal, 1), - }) + }, nil) if err == nil { t.Fatal("expected an error, got nil") @@ -154,7 +154,7 @@ func TestRun_GracefulShutdownOnSignal(t *testing.T) { }, signalCh: sigCh, shutdownTimeout: time.Second, - }) + }, nil) if err != nil { t.Fatalf("expected nil error, got %v", err) @@ -196,7 +196,7 @@ func TestRun_ShutdownFailureCallsClose(t *testing.T) { }, signalCh: sigCh, shutdownTimeout: time.Second, - }) + }, nil) if err == nil { t.Fatal("expected an error, got nil") From b1b1176c8ce26bc036934a61d9ea0170f48dda81 Mon Sep 17 00:00:00 2001 From: ssavutu Date: Thu, 19 Mar 2026 03:11:51 -0400 Subject: [PATCH 03/10] Fix header issue --- frontend/src/components/{header.tsx => Header.tsx} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename frontend/src/components/{header.tsx => Header.tsx} (100%) diff --git a/frontend/src/components/header.tsx b/frontend/src/components/Header.tsx similarity index 100% rename from frontend/src/components/header.tsx rename to frontend/src/components/Header.tsx From c068fabcb593dc4fb03032586ff78576115330b5 Mon Sep 17 00:00:00 2001 From: ssavutu Date: Thu, 19 Mar 2026 03:14:02 -0400 Subject: [PATCH 04/10] Update git ignore for sql database files --- server/.gitignore | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/server/.gitignore b/server/.gitignore index 0cfa848..68ac35e 100644 --- a/server/.gitignore +++ b/server/.gitignore @@ -1,4 +1,5 @@ cover.out certs/*.crt certs/*.key -.env \ No newline at end of file +.env +*.sql From e0b00b41c8fb4acbbf5ba22084724528472e1c5e Mon Sep 17 00:00:00 2001 From: ssavutu Date: Thu, 19 Mar 2026 06:00:12 -0400 Subject: [PATCH 05/10] Update README, adjust minor pathing errors --- README.md | 193 +++++++++++++++++++++++++++++- docker-compose.yml | 2 + scripts/generate_wordpress_sql.sh | 93 +++++++++++++- server/main.go | 2 +- 4 files changed, 286 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index 7dfae82..32b27a8 100644 --- a/README.md +++ b/README.md @@ -1 +1,192 @@ -# triangle-cms +# Triangle CMS (Delta) + +## Foreword + +Triangle CMS (Delta) is the Drexel Triangle's solution for a minimal WordPress replacement. Its core is a headless CMS (Content Management System) that serves and edits content from a local database. WordPress content can be migrated into Delta via the `wordpress-etl` tool. Delta also offers a minimal frontend similar to WordPress, and because the core CMS is headless, users are welcome to build their own. Delta also includes built-in logging, accessible through a Grafana dashboard, to enable easy monitoring of API calls and internal errors. + +## Developer Guide + +### Project Structure + +Triangle CMS is split into: +- `server/`: Go backend +- `frontend/`: React frontend +- `observability/`: Loki + Promtail + Grafana config for logging purposes +- `scripts/`: setup scripts + +### API Specification and Data Models + +API specification and response/data model documentation live in the project wiki: +- https://github.com/DrexelTriangle/triangle-cms/wiki + - Endpoints: https://github.com/DrexelTriangle/triangle-cms/wiki/Endpoints + - Response shapes/models: https://github.com/DrexelTriangle/triangle-cms/wiki/Response-Shapes + +### Prerequisites + +- Docker and Docker Compose +- Go 1.24+ +- Node.js 20+ and npm +- Local clone of `wordpress-etl`: https://github.com/DrexelTriangle/wordpress-etl + +### Quick Start + +1. Run the WordPress ETL pipeline locally in `wordpress-etl` to generate source SQL files. +2. From `triangle-cms` repo root, generate CMS SQL files: + +```bash +./scripts/generate_wordpress_sql.sh +``` + +By default, the script auto-detects ETL SQL if the `wordpress-etl` repository is in the same directory or the parent directory of `triangle-cms`: +- `../wordpress-etl/logs/sql` +- `./wordpress-etl/logs/sql` + +3. Start only MariaDB in Docker (recommended for local backend development): + +```bash +docker compose up -d mariadb +``` + +This starts: +- MariaDB (`mariadb`) on `localhost:${MARIADB_PORT_FORWARD:-3306}` (container port `3306`), populated from the ETL pipeline output + +If you want the full Docker stack (CMS + observability), run: + +```bash +./scripts/setup-containers.sh +``` + +This starts: +- MariaDB (`mariadb`) on `localhost:${MARIADB_PORT_FORWARD:-3306}` (container port `3306`), populated from the ETL pipeline output +- CMS backend (`cms`) on `https://localhost:8080` which exposes the API +- Promtail (`promtail`) which collects logs from Docker containers +- Loki (`loki`) on port `3100`, which indexes logs from Promtail +- Grafana (`http://localhost:3000`, default `User:admin, Password:admin`) which allows you to explore and query logs from Loki + +Important: +- Logging/observability (Promtail, Loki, Grafana) is only available when running the full Docker stack. +- If you run the backend locally via `go run` (instead of the `cms` Docker container), those Docker logging pipelines do not apply to your local process. + +Reset all compose volumes and rebuild: + +> [!WARNING] +> Running this command will erase all logs and database entries, equivalent to a fresh install + +```bash +./scripts/setup-containers.sh --reset-data +``` + +### Local Development + +#### Backend (Go API) + +1. Ensure MariaDB is running (via Docker or local instance). +2. If using Docker MariaDB and running the backend via `go run`, configure `server/.env` with the same values used by your compose `MARIADB_DATABASE`, `MARIADB_USER`, and `MARIADB_PASSWORD`: + +```env +DB_NAME=triangle +DB_USER=triangle_user +DB_PASSWORD=triangle_password +DB_HOST=127.0.0.1 +DB_PORT=3306 +``` + +If using a separate local MariaDB instance, configure `server/.env` for that instance instead. + +3. If the Docker `cms` service is running, stop it first to avoid port conflict on `:8080`: + +```bash +docker compose stop cms +``` + +4. Run backend: + +```bash +cd server +go run ./main.go +``` + +The backend serves HTTPS on `https://localhost:8080` using (the certs are just there to keep Postman happy): +- `server/certs/localhost.crt` +- `server/certs/localhost.key` + +#### Apply Backend Changes to Docker CMS + +When you change Go backend code under `server/` and want those changes reflected in the Docker `cms` container, rebuild and restart that service image: + +```bash +docker compose up -d --build cms +``` + +If your change is to MariaDB bootstrap SQL files in `server/internal/database/wordpress_etl/`, those are only applied on fresh DB initialization. Recreate volumes for those to take effect: + +> [!WARNING] +> Running this command will erase all logs and database entries, equivalent to a fresh install + +```bash +./scripts/setup-containers.sh --reset-data +``` + +#### Frontend (Vite) + +```bash +cd frontend +npm install +npm run dev +``` + +Vite dev server starts on `http://localhost:5173`. + +### WordPress SQL ETL Flow + +SQL files for bootstrap imports live in: +- `server/internal/database/wordpress_etl/` + +Before running this step, you must have already run the ETL pipeline in: +- https://github.com/DrexelTriangle/wordpress-etl + +Generate CMS SQL files from the ETL pipeline output: + +```bash +./scripts/generate_wordpress_sql.sh [source_sql_dir] [output_dir] +``` + +Defaults: +- source: auto-detected (`../wordpress-etl`, `../wordpress-etl/logs/sql`, `./wordpress-etl`, `./wordpress-etl/logs/sql`) +- output: default (`server/internal/database/wordpress_etl`) + +Overrides: +- Pass explicit args: + `./scripts/generate_wordpress_sql.sh ../wordpress-etl/logs/sql server/internal/database/wordpress_etl` +- Or use env vars: + `WP_ETL_SQL_DIR=../wordpress-etl ./scripts/generate_wordpress_sql.sh` + `WP_ETL_OUT_DIR=server/internal/database/wordpress_etl ./scripts/generate_wordpress_sql.sh` + `WP_ETL_SQL_DIR=../wordpress-etl WP_ETL_OUT_DIR=server/internal/database/wordpress_etl ./scripts/generate_wordpress_sql.sh` + +Generated files: +- `01-authors.sql` +- `02-articles.sql` +- `03-articles-authors.sql` +- `04-seo.sql` + +These are mounted into MariaDB init at container startup through `docker-compose.yml`. + +### Testing + +Backend tests: + +```bash +cd server +go test ./... +``` + +Test coverage: + +Run to see the percentage of code each test covers + +```bash +cd server +go test -coverprofile=cover.out ./... +``` + +This also generates the `cover.out` file, showing exactly which lines are run during tests. diff --git a/docker-compose.yml b/docker-compose.yml index e1a13b3..c96c8af 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -13,6 +13,8 @@ services: - ./server/internal/database/wordpress_etl/02-articles.sql:/docker-entrypoint-initdb.d/02-articles.sql:ro,z - ./server/internal/database/wordpress_etl/03-articles-authors.sql:/docker-entrypoint-initdb.d/03-articles-authors.sql:ro,z - ./server/internal/database/wordpress_etl/04-seo.sql:/docker-entrypoint-initdb.d/04-seo.sql:ro,z + ports: + - "${MARIADB_PORT_FORWARD:-3306}:3306" healthcheck: test: ["CMD", "healthcheck.sh", "--connect", "--innodb_initialized"] interval: 10s diff --git a/scripts/generate_wordpress_sql.sh b/scripts/generate_wordpress_sql.sh index b90f7d1..1c5f3cd 100755 --- a/scripts/generate_wordpress_sql.sh +++ b/scripts/generate_wordpress_sql.sh @@ -1,8 +1,97 @@ #!/usr/bin/env bash set -euo pipefail -SRC_DIR="${1:-/home/sachin/Documents/Coding/wordpress-etl/logs/sql}" -OUT_DIR="${2:-server/internal/database/wordpress_etl}" +ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)" + +is_valid_source_dir() { + local dir="$1" + [[ -f "$dir/articles.sql" && -f "$dir/articles_authors.sql" ]] +} + +resolve_source_dir() { + local base="$1" + local candidates=("$base" "$base/logs/sql" "$base/sql") + local candidate + for candidate in "${candidates[@]}"; do + if [[ "$candidate" != /* ]]; then + candidate="$ROOT_DIR/$candidate" + fi + if is_valid_source_dir "$candidate"; then + echo "$candidate" + return 0 + fi + done + return 1 +} + +usage_error() { + cat >&2 <<'ERR' +Could not determine WordPress ETL SQL source directory. + +Expected one of: + - first script argument + - WP_ETL_SQL_DIR environment variable + - ../wordpress-etl or ../wordpress-etl/logs/sql (relative to triangle-cms) + - ./wordpress-etl or ./wordpress-etl/logs/sql (inside triangle-cms) + +The resolved source must contain: + - articles.sql + - articles_authors.sql + +Example: + ./scripts/generate_wordpress_sql.sh ../wordpress-etl + ./scripts/generate_wordpress_sql.sh ../wordpress-etl/logs/sql + WP_ETL_SQL_DIR=../wordpress-etl ./scripts/generate_wordpress_sql.sh +ERR +} + +# Source selection precedence: +# 1) first CLI arg (repo root or sql dir) +# 2) WP_ETL_SQL_DIR env var (repo root or sql dir) +# 3) common relative locations near this repo +SRC_HINTS=() +if [[ -n "${1:-}" ]]; then + SRC_HINTS+=("$1") +fi +if [[ -n "${WP_ETL_SQL_DIR:-}" ]]; then + SRC_HINTS+=("$WP_ETL_SQL_DIR") +fi +SRC_HINTS+=( + "$ROOT_DIR/../wordpress-etl" + "$ROOT_DIR/../wordpress-etl/logs/sql" + "$ROOT_DIR/wordpress-etl" + "$ROOT_DIR/wordpress-etl/logs/sql" +) + +SRC_DIR="" +for hint in "${SRC_HINTS[@]}"; do + if resolved="$(resolve_source_dir "$hint")"; then + SRC_DIR="$resolved" + break + fi +done + +if [[ -z "$SRC_DIR" ]]; then + usage_error + exit 1 +fi + +# Output selection precedence: +# 1) second CLI arg +# 2) WP_ETL_OUT_DIR env var +# 3) repo default +if [[ -n "${2:-}" ]]; then + OUT_DIR="$2" +elif [[ -n "${WP_ETL_OUT_DIR:-}" ]]; then + OUT_DIR="$WP_ETL_OUT_DIR" +else + OUT_DIR="$ROOT_DIR/server/internal/database/wordpress_etl" +fi + +# Normalize relative paths from repo root for consistent behavior. +if [[ "$OUT_DIR" != /* ]]; then + OUT_DIR="$ROOT_DIR/$OUT_DIR" +fi mkdir -p "$OUT_DIR" diff --git a/server/main.go b/server/main.go index eb35113..3eb1aca 100644 --- a/server/main.go +++ b/server/main.go @@ -104,7 +104,7 @@ func dbConfigFromEnv() (dbName, user, password, host string, port int, err error host = "127.0.0.1" } if portStr == "" { - port = 8080 + port = 3306 return dbName, user, password, host, port, nil } port, err = strconv.Atoi(portStr) From 0d7cd45dcfe7448bcab0ba27321ff7465739f2ba Mon Sep 17 00:00:00 2001 From: ssavutu Date: Mon, 23 Mar 2026 03:07:18 -0400 Subject: [PATCH 06/10] Update go mod --- server/go.mod | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/go.mod b/server/go.mod index 87af45e..26d9c41 100644 --- a/server/go.mod +++ b/server/go.mod @@ -6,5 +6,5 @@ require github.com/go-sql-driver/mysql v1.9.3 require ( filippo.io/edwards25519 v1.1.0 // indirect - github.com/joho/godotenv v1.5.1 // indirect + github.com/joho/godotenv v1.5.1 ) From 23270f12b7f1d006cb3c271101ad538b1bb25d33 Mon Sep 17 00:00:00 2001 From: ssavutu Date: Mon, 23 Mar 2026 03:33:02 -0400 Subject: [PATCH 07/10] Update package version, git ignore --- server/.gitignore | 1 + server/go.mod | 2 +- server/go.sum | 2 ++ 3 files changed, 4 insertions(+), 1 deletion(-) diff --git a/server/.gitignore b/server/.gitignore index 68ac35e..7fd6897 100644 --- a/server/.gitignore +++ b/server/.gitignore @@ -3,3 +3,4 @@ certs/*.crt certs/*.key .env *.sql +report.json \ No newline at end of file diff --git a/server/go.mod b/server/go.mod index 26d9c41..1784f7e 100644 --- a/server/go.mod +++ b/server/go.mod @@ -5,6 +5,6 @@ go 1.24.13 require github.com/go-sql-driver/mysql v1.9.3 require ( - filippo.io/edwards25519 v1.1.0 // indirect + filippo.io/edwards25519 v1.2.0 // indirect github.com/joho/godotenv v1.5.1 ) diff --git a/server/go.sum b/server/go.sum index b380070..ed83ad9 100644 --- a/server/go.sum +++ b/server/go.sum @@ -1,5 +1,7 @@ filippo.io/edwards25519 v1.1.0 h1:FNf4tywRC1HmFuKW5xopWpigGjJKiJSV0Cqo0cJWDaA= filippo.io/edwards25519 v1.1.0/go.mod h1:BxyFTGdWcka3PhytdK4V28tE5sGfRvvvRV7EaN4VDT4= +filippo.io/edwards25519 v1.2.0 h1:crnVqOiS4jqYleHd9vaKZ+HKtHfllngJIiOpNpoJsjo= +filippo.io/edwards25519 v1.2.0/go.mod h1:xzAOLCNug/yB62zG1bQ8uziwrIqIuxhctzJT18Q77mc= github.com/go-sql-driver/mysql v1.9.3 h1:U/N249h2WzJ3Ukj8SowVFjdtZKfu9vlLZxjPXV1aweo= github.com/go-sql-driver/mysql v1.9.3/go.mod h1:qn46aNg1333BRMNU69Lq93t8du/dwxI64Gl8i5p1WMU= github.com/joho/godotenv v1.5.1 h1:7eLL/+HRGLY0ldzfGMeQkb7vMd0as4CfYvUVzLqw0N0= From b2521774bd93e7498bbfb8ed79b0d45c027f9365 Mon Sep 17 00:00:00 2001 From: ssavutu Date: Mon, 23 Mar 2026 03:38:42 -0400 Subject: [PATCH 08/10] Update git ignore --- .gitignore | 6 ++++++ server/.gitignore | 6 ------ 2 files changed, 6 insertions(+), 6 deletions(-) create mode 100644 .gitignore delete mode 100644 server/.gitignore diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..e15744f --- /dev/null +++ b/.gitignore @@ -0,0 +1,6 @@ +/server/cover.out +/server/certs/*.crt +/server/certs/*.key +/server/.env +/server/**/*.sql +/report.json diff --git a/server/.gitignore b/server/.gitignore deleted file mode 100644 index 7fd6897..0000000 --- a/server/.gitignore +++ /dev/null @@ -1,6 +0,0 @@ -cover.out -certs/*.crt -certs/*.key -.env -*.sql -report.json \ No newline at end of file From 2dccc330c039fac1de158f4976e67522e2d974f0 Mon Sep 17 00:00:00 2001 From: ssavutu Date: Wed, 1 Apr 2026 13:37:48 -0400 Subject: [PATCH 09/10] Misc, fixed subsections --- .gitignore | 1 + server/internal/database/http_models.go | 40 +++-- server/internal/handlers/article_params.go | 81 ++++++++++ server/internal/handlers/handlers.go | 168 +++++++++++++++++++-- server/internal/models/types.go | 89 +++++------ server/internal/routes/routes.go | 34 ++--- 6 files changed, 329 insertions(+), 84 deletions(-) create mode 100644 server/internal/handlers/article_params.go diff --git a/.gitignore b/.gitignore index e15744f..5ab8dc0 100644 --- a/.gitignore +++ b/.gitignore @@ -4,3 +4,4 @@ /server/.env /server/**/*.sql /report.json +.vscode/settings.json diff --git a/server/internal/database/http_models.go b/server/internal/database/http_models.go index 470e675..e58e3fe 100644 --- a/server/internal/database/http_models.go +++ b/server/internal/database/http_models.go @@ -17,11 +17,12 @@ var AuthorSortByColumn = map[string]string{ } var ArticleSortByColumn = map[string]string{ - string(models.ArticleSortByTitle): "title", - string(models.ArticleSortBySlug): "slug", - string(models.ArticleSortByCreatedAt): "created_at", - string(models.ArticleSortByPublishedAt): "pub_date", - string(models.ArticleSortByStatus): "comment_status", + string(models.ArticleSortByTitle): "title", + string(models.ArticleSortBySlug): "slug", + string(models.ArticleSortByCreatedAt): "created_at", + string(models.ArticleSortByPublishedAt): "pub_date", + string(models.ArticleSortByStatus): "pub_date", + string(models.ArticleSortByCommentStatus): "comment_status", } var AuthorColumns = []string{"id", "display_name", "first_name", "last_name", "email"} @@ -105,12 +106,15 @@ func ScanArticle(rows *sql.Rows) (models.Article, error) { if pubDate.Valid { t := pubDate.Time a.PublishedAt = &t + a.Status = models.ArticleStatusPublished + } else { + a.Status = models.ArticleStatusDraft } if priority.Valid { a.IsFeatured = priority.Bool } if commentStatus.Valid { - a.Status = models.ArticleStatus(commentStatus.String) + a.CommentStatus = strings.TrimSpace(commentStatus.String) } if photoURL.Valid { a.PhotoURL = photoURL.String @@ -158,21 +162,33 @@ func FormatTags(categories []string) string { return string(buf) } -func statusToCommentStatus(status models.ArticleStatus) string { - return strings.TrimSpace(string(status)) +func defaultCommentStatus() string { + return "open" +} + +func normalizeCommentStatus(commentStatus string) string { + if v := strings.TrimSpace(commentStatus); v != "" { + return v + } + return defaultCommentStatus() } func ArticleInputToDBFields(body models.ArticleInput) []any { + var publishedAt any + if body.Status == models.ArticleStatusPublished { + publishedAt = time.Now().UTC().Format("2006-01-02 15:04:05") + } + return []any{ body.Title, nil, body.Content, FormatTags(body.Categories), - nil, + publishedAt, nil, body.IsFeatured, false, - statusToCommentStatus(body.Status), + normalizeCommentStatus(body.CommentStatus), body.PhotoURL, } } @@ -181,6 +197,8 @@ func ArticleToDBFields(body models.Article) []any { var publishedAt any if body.PublishedAt != nil { publishedAt = body.PublishedAt.UTC().Format("2006-01-02 15:04:05") + } else if body.Status == models.ArticleStatusPublished { + publishedAt = time.Now().UTC().Format("2006-01-02 15:04:05") } return []any{ body.Title, @@ -191,7 +209,7 @@ func ArticleToDBFields(body models.Article) []any { nil, body.IsFeatured, false, - statusToCommentStatus(body.Status), + normalizeCommentStatus(body.CommentStatus), body.PhotoURL, } } diff --git a/server/internal/handlers/article_params.go b/server/internal/handlers/article_params.go new file mode 100644 index 0000000..9957a62 --- /dev/null +++ b/server/internal/handlers/article_params.go @@ -0,0 +1,81 @@ +package handlers + +import ( + "fmt" + "strings" +) + +var allowedSubsectionsBySection = map[string]map[string]struct{}{ + "news": { + "academic-transformation": {}, + "politics": {}, + "transit": {}, + "crime-policy-violations": {}, + }, + "sports": { + "mens-basketball": {}, + "womens-basketball": {}, + "big-5": {}, + "philly-sports": {}, + "field-hockey": {}, + "mens-soccer": {}, + "womens-soccer": {}, + }, + "opinion": { + "science-tech": {}, + "from-the-editor": {}, + }, + "columns": { + "the-love-triangle": {}, + "tri-this-sweet-treat": {}, + }, + "entertainment": { + "movies": {}, + "music": {}, + "happening-in-philly": {}, + "cooking": {}, + "books": {}, + "gaming": {}, + "listicles": {}, + }, + "comics-puzzles": { + "political-cartoons": {}, + "crossword": {}, + "sudoku": {}, + }, +} + +func normalizeAndValidateArticleParams(params ArticleParams) (ArticleParams, error) { + params.AuthorID = strings.TrimSpace(params.AuthorID) + params.Section = strings.ToLower(strings.TrimSpace(params.Section)) + params.Subsection = strings.ToLower(strings.TrimSpace(params.Subsection)) + + if params.Section != "" { + if _, ok := allowedSubsectionsBySection[params.Section]; !ok { + return ArticleParams{}, fmt.Errorf("invalid section_slug") + } + } + + if params.Subsection != "" { + parentSection, ok := sectionForSubsection(params.Subsection) + if !ok { + return ArticleParams{}, fmt.Errorf("invalid subsection_slug") + } + if params.Section == "" { + params.Section = parentSection + } else if params.Section != parentSection { + return ArticleParams{}, fmt.Errorf("subsection_slug does not belong to section_slug") + } + } + + return params, nil +} + +func sectionForSubsection(subsection string) (string, bool) { + for section, subsections := range allowedSubsectionsBySection { + if _, ok := subsections[subsection]; ok { + return section, true + } + } + return "", false +} diff --git a/server/internal/handlers/handlers.go b/server/internal/handlers/handlers.go index 62de35c..50df972 100644 --- a/server/internal/handlers/handlers.go +++ b/server/internal/handlers/handlers.go @@ -6,6 +6,7 @@ import ( "net/http" "strconv" "strings" + "time" db "server/internal/database" "server/internal/models" @@ -212,7 +213,16 @@ func DeleteAuthor(conn *sql.DB) http.HandlerFunc { func GetAuthorArticles(conn *sql.DB) http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { id := r.PathValue("id") - rows, err := queryArticles(r, conn, id) + params, err := normalizeAndValidateArticleParams(ArticleParams{ + AuthorID: id, + Section: r.URL.Query().Get("section_slug"), + Subsection: r.URL.Query().Get("subsection_slug"), + }) + if err != nil { + writeError(w, http.StatusBadRequest, err.Error()) + return + } + rows, err := queryArticles(r, conn, params) if err != nil { writeError(w, http.StatusInternalServerError, err.Error()) return @@ -232,8 +242,70 @@ func GetAuthorArticles(conn *sql.DB) http.HandlerFunc { // GET /v1/articles func GetArticles(conn *sql.DB) http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { - authorID := r.URL.Query().Get("author_id") - rows, err := queryArticles(r, conn, authorID) + params, err := normalizeAndValidateArticleParams(ArticleParams{ + AuthorID: r.URL.Query().Get("author_id"), + Section: r.URL.Query().Get("section_slug"), + Subsection: r.URL.Query().Get("subsection_slug"), + }) + if err != nil { + writeError(w, http.StatusBadRequest, err.Error()) + return + } + rows, err := queryArticles(r, conn, params) + if err != nil { + writeError(w, http.StatusInternalServerError, err.Error()) + return + } + defer rows.Close() + articles, err := db.CollectArticles(rows) + if err != nil { + writeError(w, http.StatusInternalServerError, err.Error()) + return + } + writeJSON(w, http.StatusOK, articles) + } +} + +// GET /v1/sections/{section_slug}/articles +func GetSectionArticles(conn *sql.DB) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + params, err := normalizeAndValidateArticleParams(ArticleParams{ + AuthorID: r.URL.Query().Get("author_id"), + Section: r.PathValue("section_slug"), + Subsection: r.URL.Query().Get("subsection_slug"), + }) + if err != nil { + writeError(w, http.StatusBadRequest, err.Error()) + return + } + rows, err := queryArticles(r, conn, params) + if err != nil { + writeError(w, http.StatusInternalServerError, err.Error()) + return + } + defer rows.Close() + articles, err := db.CollectArticles(rows) + if err != nil { + writeError(w, http.StatusInternalServerError, err.Error()) + return + } + writeJSON(w, http.StatusOK, articles) + } +} + +// GET /v1/subsections/{subsection_slug}/articles +func GetSubsectionArticles(conn *sql.DB) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + params, err := normalizeAndValidateArticleParams(ArticleParams{ + AuthorID: r.URL.Query().Get("author_id"), + Section: r.URL.Query().Get("section_slug"), + Subsection: r.PathValue("subsection_slug"), + }) + if err != nil { + writeError(w, http.StatusBadRequest, err.Error()) + return + } + rows, err := queryArticles(r, conn, params) if err != nil { writeError(w, http.StatusInternalServerError, err.Error()) return @@ -249,22 +321,71 @@ func GetArticles(conn *sql.DB) http.HandlerFunc { } // queryArticles is shared by GetArticles and GetAuthorArticles. -func queryArticles(r *http.Request, conn *sql.DB, authorID string) (*sql.Rows, error) { +type ArticleParams struct { + AuthorID string + Section string + Subsection string +} + +// queryArticles is shared by GetArticles and GetAuthorArticles. +func queryArticles(r *http.Request, conn *sql.DB, params ArticleParams) (*sql.Rows, error) { q := r.URL.Query() limit := intParam(r, "limit", 20) offset := intParam(r, "offset", 0) var conditions []string var args []any - if authorID != "" { + if params.AuthorID != "" { conditions = append(conditions, "`id` IN (SELECT `articles_id` FROM `articles_authors` WHERE `author_id` = ?)") - args = append(args, authorID) + args = append(args, params.AuthorID) + } + + if params.Section != "" { + conditions = append(conditions, "LOWER(`tags`) LIKE ?") + args = append(args, "%"+strings.ToLower(params.Section)+"%") + } + + if params.Subsection != "" { + conditions = append(conditions, "LOWER(`tags`) LIKE ?") + args = append(args, "%"+strings.ToLower(params.Subsection)+"%") + } + + if status := strings.TrimSpace(q.Get("status")); status != "" { + switch strings.ToLower(status) { + case string(models.ArticleStatusDraft): + conditions = append(conditions, "`pub_date` IS NULL") + case string(models.ArticleStatusPublished): + conditions = append(conditions, "`pub_date` IS NOT NULL") + } + } + + if title := strings.TrimSpace(q.Get("title")); title != "" { + conditions = append(conditions, "`title` LIKE ?") + args = append(args, "%"+title+"%") + } + + if pub_date := db.ParsePublishedAt(q.Get("published_at")); pub_date != nil { + conditions = append(conditions, "`pub_date` >= ?") + args = append(args, pub_date.UTC().Format("2026-03-23 15:04:05")) + } + + if creation_date := db.ParsePublishedAt(q.Get("created_at")); creation_date != nil { + conditions = append(conditions, "`creation_date` >= ?") + args = append(args, creation_date.UTC().Format("2026-03-23 15:04:05")) + } + + if slug := strings.TrimSpace(q.Get("slug")); slug != "" { + conditions = append(conditions, "`slug` LIKE ?") + args = append(args, "%"+slug+"%") } query := "SELECT `id`, `title`, `description`, `text`, `tags`, `pub_date`, `mod_date`, `priority`, `breaking_news`, `comment_status`, `photo_url` FROM `articles`" if len(conditions) > 0 { query += " WHERE " + strings.Join(conditions, " AND ") } + if q.Get("sort_by") == "" { + query += " ORDER BY `id` DESC" + } query = db.BuildOrderLimit(query, q.Get("sort_by"), q.Get("sort_direction"), db.ArticleSortByColumn, limit, offset) return conn.QueryContext(r.Context(), query, args...) @@ -350,14 +471,15 @@ func PatchArticle(conn *sql.DB) http.HandlerFunc { var setCols []string var setArgs []any columnByJSONField := map[string]string{ - "title": "title", - "excerpt": "description", - "content": "text", - "categories": "tags", - "published_at": "pub_date", - "is_featured": "priority", - "status": "comment_status", - "photo_url": "photo_url", + "title": "title", + "excerpt": "description", + "content": "text", + "categories": "tags", + "published_at": "pub_date", + "is_featured": "priority", + "status": "pub_date", + "comment_status": "comment_status", + "photo_url": "photo_url", } for jsonField, column := range columnByJSONField { v, ok := body[jsonField] @@ -401,6 +523,24 @@ func PatchArticle(conn *sql.DB) http.HandlerFunc { writeError(w, http.StatusBadRequest, "status must be a string") return } + status := models.ArticleStatus(strings.TrimSpace(s)) + switch status { + case models.ArticleStatusDraft: + setCols = append(setCols, column) + setArgs = append(setArgs, nil) + case models.ArticleStatusPublished: + setCols = append(setCols, column) + setArgs = append(setArgs, time.Now().UTC().Format("2006-01-02 15:04:05")) + default: + writeError(w, http.StatusBadRequest, "status must be draft or published") + return + } + case "comment_status": + s, ok := v.(string) + if !ok { + writeError(w, http.StatusBadRequest, "comment_status must be a string") + return + } setCols = append(setCols, column) setArgs = append(setArgs, strings.TrimSpace(s)) default: diff --git a/server/internal/models/types.go b/server/internal/models/types.go index 4cbd694..b9311b6 100644 --- a/server/internal/models/types.go +++ b/server/internal/models/types.go @@ -20,11 +20,12 @@ const ( type ArticleSortBy string const ( - ArticleSortByTitle ArticleSortBy = "title" - ArticleSortBySlug ArticleSortBy = "slug" - ArticleSortByCreatedAt ArticleSortBy = "created_at" - ArticleSortByPublishedAt ArticleSortBy = "published_at" - ArticleSortByStatus ArticleSortBy = "status" + ArticleSortByTitle ArticleSortBy = "title" + ArticleSortBySlug ArticleSortBy = "slug" + ArticleSortByCreatedAt ArticleSortBy = "created_at" + ArticleSortByPublishedAt ArticleSortBy = "published_at" + ArticleSortByStatus ArticleSortBy = "status" + ArticleSortByCommentStatus ArticleSortBy = "comment_status" ) type MediaSortBy string @@ -83,52 +84,56 @@ type AuthorListParams struct { } type Article struct { - Title string `json:"title"` - ID int64 `json:"id"` - Authors []AuthorOverview `json:"authors"` - Content string `json:"content"` - Categories []string `json:"categories"` - Excerpt string `json:"excerpt"` - Slug string `json:"slug"` - PhotoURL string `json:"photo_url"` - IsFeatured bool `json:"is_featured"` - Status ArticleStatus `json:"status"` - CreatedAt *time.Time `json:"created_at,omitempty"` - PublishedAt *time.Time `json:"published_at,omitempty"` + Title string `json:"title"` + ID int64 `json:"id"` + Authors []AuthorOverview `json:"authors"` + Content string `json:"content"` + Categories []string `json:"categories"` + Excerpt string `json:"excerpt"` + Slug string `json:"slug"` + PhotoURL string `json:"photo_url"` + IsFeatured bool `json:"is_featured"` + Status ArticleStatus `json:"status"` + CommentStatus string `json:"comment_status"` + CreatedAt *time.Time `json:"created_at,omitempty"` + PublishedAt *time.Time `json:"published_at,omitempty"` } type ArticleOverview struct { - Title string `json:"title"` - ID int64 `json:"id"` - Authors []AuthorOverview `json:"authors"` - PublishedAt *time.Time `json:"published_at,omitempty"` - Categories []string `json:"categories"` - Excerpt string `json:"excerpt"` - Slug string `json:"slug"` - Status ArticleStatus `json:"status"` - PhotoURL string `json:"photo_url"` - IsFeatured bool `json:"is_featured"` + Title string `json:"title"` + ID int64 `json:"id"` + Authors []AuthorOverview `json:"authors"` + PublishedAt *time.Time `json:"published_at,omitempty"` + Categories []string `json:"categories"` + Excerpt string `json:"excerpt"` + Slug string `json:"slug"` + Status ArticleStatus `json:"status"` + CommentStatus string `json:"comment_status"` + PhotoURL string `json:"photo_url"` + IsFeatured bool `json:"is_featured"` } type ArticleInput struct { - Title string `json:"title"` - Authors []int64 `json:"authors"` - Content string `json:"content"` - Categories []string `json:"categories"` - PhotoURL string `json:"photo_url"` - IsFeatured bool `json:"is_featured"` - Status ArticleStatus `json:"status"` + Title string `json:"title"` + Authors []int64 `json:"authors"` + Content string `json:"content"` + Categories []string `json:"categories"` + PhotoURL string `json:"photo_url"` + IsFeatured bool `json:"is_featured"` + Status ArticleStatus `json:"status"` + CommentStatus string `json:"comment_status,omitempty"` } type ArticlePatch struct { - Title *string `json:"title,omitempty"` - Authors *[]int64 `json:"authors,omitempty"` - Content *string `json:"content,omitempty"` - Categories *[]string `json:"categories,omitempty"` - Excerpt *string `json:"excerpt,omitempty"` - PhotoURL *string `json:"photo_url,omitempty"` - IsFeatured *bool `json:"is_featured,omitempty"` - Status *ArticleStatus `json:"status,omitempty"` + Title *string `json:"title,omitempty"` + Authors *[]int64 `json:"authors,omitempty"` + Content *string `json:"content,omitempty"` + Categories *[]string `json:"categories,omitempty"` + Excerpt *string `json:"excerpt,omitempty"` + PhotoURL *string `json:"photo_url,omitempty"` + IsFeatured *bool `json:"is_featured,omitempty"` + Status *ArticleStatus `json:"status,omitempty"` + CommentStatus *string `json:"comment_status,omitempty"` } type ArticleListParams struct { diff --git a/server/internal/routes/routes.go b/server/internal/routes/routes.go index b6dc32c..f5e9de5 100644 --- a/server/internal/routes/routes.go +++ b/server/internal/routes/routes.go @@ -1,8 +1,8 @@ package routes import ( - "net/http" "database/sql" + "net/http" "server/internal/handlers" ) @@ -18,26 +18,26 @@ func Register(mux *http.ServeMux, conn *sql.DB) { mux.HandleFunc("GET /v1/authors/{id}/articles", handlers.GetAuthorArticles(conn)) // Articles - mux.HandleFunc("GET /v1/articles", handlers.GetArticles(conn)) - mux.HandleFunc("GET /v1/articles/{id}", handlers.GetArticle(conn)) - mux.HandleFunc("POST /v1/articles", handlers.PostArticles(conn)) - mux.HandleFunc("PUT /v1/articles/{id}", handlers.PutArticle(conn)) - mux.HandleFunc("PATCH /v1/articles/{id}", handlers.PatchArticle(conn)) - mux.HandleFunc("DELETE /v1/articles/{id}", handlers.DeleteArticle(conn)) + mux.HandleFunc("GET /v1/articles", handlers.GetArticles(conn)) + mux.HandleFunc("GET /v1/articles/{id}", handlers.GetArticle(conn)) + mux.HandleFunc("POST /v1/articles", handlers.PostArticles(conn)) + mux.HandleFunc("PUT /v1/articles/{id}", handlers.PutArticle(conn)) + mux.HandleFunc("PATCH /v1/articles/{id}", handlers.PatchArticle(conn)) + mux.HandleFunc("DELETE /v1/articles/{id}", handlers.DeleteArticle(conn)) // Sections - mux.HandleFunc("GET /v1/sections/{section_slug}/articles", handlers.Users) - mux.HandleFunc("GET /v1/subsections/{subsection_slug}/articles", handlers.Users) + mux.HandleFunc("GET /v1/sections/{section_slug}/articles", handlers.GetSectionArticles(conn)) + mux.HandleFunc("GET /v1/subsections/{subsection_slug}/articles", handlers.GetSubsectionArticles(conn)) // Media - mux.HandleFunc("GET /v1/media", handlers.Users) - mux.HandleFunc("POST /v1/media", handlers.Users) - mux.HandleFunc("GET /v1/media/{id}", handlers.Users) - mux.HandleFunc("PUT /v1/media/{id}", handlers.Users) - mux.HandleFunc("PATCH /v1/media/{id}", handlers.Users) - mux.HandleFunc("DELETE /v1/media/{id}", handlers.Users) - mux.HandleFunc("GET /v1/media/gallery", handlers.Users) + mux.HandleFunc("GET /v1/media", handlers.Users) + mux.HandleFunc("POST /v1/media", handlers.Users) + mux.HandleFunc("GET /v1/media/{id}", handlers.Users) + mux.HandleFunc("PUT /v1/media/{id}", handlers.Users) + mux.HandleFunc("PATCH /v1/media/{id}", handlers.Users) + mux.HandleFunc("DELETE /v1/media/{id}", handlers.Users) + mux.HandleFunc("GET /v1/media/gallery", handlers.Users) // Homepage - mux.HandleFunc("GET /v1/homepage", handlers.Users) + mux.HandleFunc("GET /v1/homepage", handlers.Users) } From b78196135bba7217321197c53768da8dae8132de Mon Sep 17 00:00:00 2001 From: ssavutu Date: Fri, 3 Apr 2026 01:32:31 -0400 Subject: [PATCH 10/10] revise author overview, add author-article functionality, revise schema, add postman routes --- .../triangle-cms.postman_collection.v2.0.json | 538 ++++++++++++++++++ scripts/generate_wordpress_sql.sh | 109 +++- server/internal/database/http_models.go | 147 ++++- server/internal/database/http_models_test.go | 32 ++ server/internal/handlers/handlers.go | 124 +++- server/internal/models/types.go | 2 - 6 files changed, 925 insertions(+), 27 deletions(-) create mode 100644 postman/triangle-cms.postman_collection.v2.0.json create mode 100644 server/internal/database/http_models_test.go diff --git a/postman/triangle-cms.postman_collection.v2.0.json b/postman/triangle-cms.postman_collection.v2.0.json new file mode 100644 index 0000000..43cad66 --- /dev/null +++ b/postman/triangle-cms.postman_collection.v2.0.json @@ -0,0 +1,538 @@ +{ + "info": { + "_postman_id": "9fbc9d24-79f7-4f16-8037-c6e5f02ad4b8", + "name": "Triangle CMS API", + "description": "Routes from server/internal/routes/routes.go", + "schema": "https://schema.getpostman.com/json/collection/v2.0.0/collection.json" + }, + "item": [ + { + "name": "Users", + "item": [ + { + "name": "GET /users", + "request": { + "method": "GET", + "header": [], + "url": { + "raw": "https://localhost:8080/users", + "protocol": "https", + "host": [ + "localhost" + ], + "port": "8080", + "path": [ + "users" + ] + } + }, + "response": [] + } + ] + }, + { + "name": "Authors", + "item": [ + { + "name": "GET /v1/authors", + "request": { + "method": "GET", + "header": [], + "url": { + "raw": "https://localhost:8080/v1/authors", + "protocol": "https", + "host": [ + "localhost" + ], + "port": "8080", + "path": [ + "v1", + "authors" + ] + } + }, + "response": [] + }, + { + "name": "POST /v1/authors", + "request": { + "method": "POST", + "header": [], + "url": { + "raw": "https://localhost:8080/v1/authors", + "protocol": "https", + "host": [ + "localhost" + ], + "port": "8080", + "path": [ + "v1", + "authors" + ] + } + }, + "response": [] + }, + { + "name": "GET /v1/authors/1", + "request": { + "method": "GET", + "header": [], + "url": { + "raw": "https://localhost:8080/v1/authors/1", + "protocol": "https", + "host": [ + "localhost" + ], + "port": "8080", + "path": [ + "v1", + "authors", + "1" + ] + } + }, + "response": [] + }, + { + "name": "PUT /v1/authors/1", + "request": { + "method": "PUT", + "header": [], + "url": { + "raw": "https://localhost:8080/v1/authors/1", + "protocol": "https", + "host": [ + "localhost" + ], + "port": "8080", + "path": [ + "v1", + "authors", + "1" + ] + } + }, + "response": [] + }, + { + "name": "PATCH /v1/authors/1", + "request": { + "method": "PATCH", + "header": [], + "url": { + "raw": "https://localhost:8080/v1/authors/1", + "protocol": "https", + "host": [ + "localhost" + ], + "port": "8080", + "path": [ + "v1", + "authors", + "1" + ] + } + }, + "response": [] + }, + { + "name": "DELETE /v1/authors/1", + "request": { + "method": "DELETE", + "header": [], + "url": { + "raw": "https://localhost:8080/v1/authors/1", + "protocol": "https", + "host": [ + "localhost" + ], + "port": "8080", + "path": [ + "v1", + "authors", + "1" + ] + } + }, + "response": [] + }, + { + "name": "GET /v1/authors/1/articles", + "request": { + "method": "GET", + "header": [], + "url": { + "raw": "https://localhost:8080/v1/authors/1/articles", + "protocol": "https", + "host": [ + "localhost" + ], + "port": "8080", + "path": [ + "v1", + "authors", + "1", + "articles" + ] + } + }, + "response": [] + } + ] + }, + { + "name": "Articles", + "item": [ + { + "name": "GET /v1/articles", + "request": { + "method": "GET", + "header": [], + "url": { + "raw": "https://localhost:8080/v1/articles", + "protocol": "https", + "host": [ + "localhost" + ], + "port": "8080", + "path": [ + "v1", + "articles" + ] + } + }, + "response": [] + }, + { + "name": "GET /v1/articles/1", + "request": { + "method": "GET", + "header": [], + "url": { + "raw": "https://localhost:8080/v1/articles/1", + "protocol": "https", + "host": [ + "localhost" + ], + "port": "8080", + "path": [ + "v1", + "articles", + "1" + ] + } + }, + "response": [] + }, + { + "name": "POST /v1/articles", + "request": { + "method": "POST", + "header": [], + "url": { + "raw": "https://localhost:8080/v1/articles", + "protocol": "https", + "host": [ + "localhost" + ], + "port": "8080", + "path": [ + "v1", + "articles" + ] + } + }, + "response": [] + }, + { + "name": "PUT /v1/articles/1", + "request": { + "method": "PUT", + "header": [], + "url": { + "raw": "https://localhost:8080/v1/articles/1", + "protocol": "https", + "host": [ + "localhost" + ], + "port": "8080", + "path": [ + "v1", + "articles", + "1" + ] + } + }, + "response": [] + }, + { + "name": "PATCH /v1/articles/1", + "request": { + "method": "PATCH", + "header": [], + "url": { + "raw": "https://localhost:8080/v1/articles/1", + "protocol": "https", + "host": [ + "localhost" + ], + "port": "8080", + "path": [ + "v1", + "articles", + "1" + ] + } + }, + "response": [] + }, + { + "name": "DELETE /v1/articles/1", + "request": { + "method": "DELETE", + "header": [], + "url": { + "raw": "https://localhost:8080/v1/articles/1", + "protocol": "https", + "host": [ + "localhost" + ], + "port": "8080", + "path": [ + "v1", + "articles", + "1" + ] + } + }, + "response": [] + } + ] + }, + { + "name": "Sections", + "item": [ + { + "name": "GET /v1/sections/news/articles", + "request": { + "method": "GET", + "header": [], + "url": { + "raw": "https://localhost:8080/v1/sections/news/articles", + "protocol": "https", + "host": [ + "localhost" + ], + "port": "8080", + "path": [ + "v1", + "sections", + "news", + "articles" + ] + } + }, + "response": [] + }, + { + "name": "GET /v1/subsections/politics/articles", + "request": { + "method": "GET", + "header": [], + "url": { + "raw": "https://localhost:8080/v1/subsections/politics/articles", + "protocol": "https", + "host": [ + "localhost" + ], + "port": "8080", + "path": [ + "v1", + "subsections", + "politics", + "articles" + ] + } + }, + "response": [] + } + ] + }, + { + "name": "Media", + "item": [ + { + "name": "GET /v1/media", + "request": { + "method": "GET", + "header": [], + "url": { + "raw": "https://localhost:8080/v1/media", + "protocol": "https", + "host": [ + "localhost" + ], + "port": "8080", + "path": [ + "v1", + "media" + ] + } + }, + "response": [] + }, + { + "name": "POST /v1/media", + "request": { + "method": "POST", + "header": [], + "url": { + "raw": "https://localhost:8080/v1/media", + "protocol": "https", + "host": [ + "localhost" + ], + "port": "8080", + "path": [ + "v1", + "media" + ] + } + }, + "response": [] + }, + { + "name": "GET /v1/media/1", + "request": { + "method": "GET", + "header": [], + "url": { + "raw": "https://localhost:8080/v1/media/1", + "protocol": "https", + "host": [ + "localhost" + ], + "port": "8080", + "path": [ + "v1", + "media", + "1" + ] + } + }, + "response": [] + }, + { + "name": "PUT /v1/media/1", + "request": { + "method": "PUT", + "header": [], + "url": { + "raw": "https://localhost:8080/v1/media/1", + "protocol": "https", + "host": [ + "localhost" + ], + "port": "8080", + "path": [ + "v1", + "media", + "1" + ] + } + }, + "response": [] + }, + { + "name": "PATCH /v1/media/1", + "request": { + "method": "PATCH", + "header": [], + "url": { + "raw": "https://localhost:8080/v1/media/1", + "protocol": "https", + "host": [ + "localhost" + ], + "port": "8080", + "path": [ + "v1", + "media", + "1" + ] + } + }, + "response": [] + }, + { + "name": "DELETE /v1/media/1", + "request": { + "method": "DELETE", + "header": [], + "url": { + "raw": "https://localhost:8080/v1/media/1", + "protocol": "https", + "host": [ + "localhost" + ], + "port": "8080", + "path": [ + "v1", + "media", + "1" + ] + } + }, + "response": [] + }, + { + "name": "GET /v1/media/gallery", + "request": { + "method": "GET", + "header": [], + "url": { + "raw": "https://localhost:8080/v1/media/gallery", + "protocol": "https", + "host": [ + "localhost" + ], + "port": "8080", + "path": [ + "v1", + "media", + "gallery" + ] + } + }, + "response": [] + } + ] + }, + { + "name": "Homepage", + "item": [ + { + "name": "GET /v1/homepage", + "request": { + "method": "GET", + "header": [], + "url": { + "raw": "https://localhost:8080/v1/homepage", + "protocol": "https", + "host": [ + "localhost" + ], + "port": "8080", + "path": [ + "v1", + "homepage" + ] + } + }, + "response": [] + } + ] + } + ] +} diff --git a/scripts/generate_wordpress_sql.sh b/scripts/generate_wordpress_sql.sh index 1c5f3cd..5de0895 100755 --- a/scripts/generate_wordpress_sql.sh +++ b/scripts/generate_wordpress_sql.sh @@ -5,7 +5,11 @@ ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)" is_valid_source_dir() { local dir="$1" - [[ -f "$dir/articles.sql" && -f "$dir/articles_authors.sql" ]] + [[ -f "$dir/articles.sql" && -f "$dir/articles_authors.sql" ]] \ + || [[ -f "$dir/article_output.json" ]] \ + || [[ -f "$dir/merged_auth_output.json" ]] \ + || [[ -f "$dir/logs/article_output.json" ]] \ + || [[ -f "$dir/logs/merged_auth_output.json" ]] } resolve_source_dir() { @@ -31,15 +35,16 @@ Could not determine WordPress ETL SQL source directory. Expected one of: - first script argument - WP_ETL_SQL_DIR environment variable - - ../wordpress-etl or ../wordpress-etl/logs/sql (relative to triangle-cms) - - ./wordpress-etl or ./wordpress-etl/logs/sql (inside triangle-cms) + - ../wordpress-etl, ../wordpress-etl/logs, or ../wordpress-etl/logs/sql (relative to triangle-cms) + - ./wordpress-etl, ./wordpress-etl/logs, or ./wordpress-etl/logs/sql (inside triangle-cms) -The resolved source must contain: - - articles.sql - - articles_authors.sql +The resolved source must contain either: + - legacy SQL files: articles.sql and articles_authors.sql + - ETL JSON logs: article_output.json and/or merged_auth_output.json Example: ./scripts/generate_wordpress_sql.sh ../wordpress-etl + ./scripts/generate_wordpress_sql.sh ../wordpress-etl/logs ./scripts/generate_wordpress_sql.sh ../wordpress-etl/logs/sql WP_ETL_SQL_DIR=../wordpress-etl ./scripts/generate_wordpress_sql.sh ERR @@ -58,8 +63,10 @@ if [[ -n "${WP_ETL_SQL_DIR:-}" ]]; then fi SRC_HINTS+=( "$ROOT_DIR/../wordpress-etl" + "$ROOT_DIR/../wordpress-etl/logs" "$ROOT_DIR/../wordpress-etl/logs/sql" "$ROOT_DIR/wordpress-etl" + "$ROOT_DIR/wordpress-etl/logs" "$ROOT_DIR/wordpress-etl/logs/sql" ) @@ -95,19 +102,59 @@ fi mkdir -p "$OUT_DIR" -# Authors: source CREATE statement from ETL intent, but skip malformed INSERT rows. +first_existing_file() { + local candidate + for candidate in "$@"; do + if [[ -n "$candidate" && -f "$candidate" ]]; then + echo "$candidate" + return 0 + fi + done + return 1 +} + +SRC_PARENT="$(cd "$SRC_DIR/.." && pwd)" +SRC_GRANDPARENT="$(cd "$SRC_DIR/../.." && pwd)" + +ARTICLES_SQL_FILE="$(first_existing_file "$SRC_DIR/articles.sql" "$SRC_PARENT/articles.sql" "$SRC_GRANDPARENT/articles.sql" || true)" +ARTICLE_AUTHORS_SQL_FILE="$(first_existing_file "$SRC_DIR/articles_authors.sql" "$SRC_PARENT/articles_authors.sql" "$SRC_GRANDPARENT/articles_authors.sql" || true)" +ARTICLES_JSON_FILE="$(first_existing_file "$SRC_DIR/article_output.json" "$SRC_DIR/logs/article_output.json" "$SRC_PARENT/article_output.json" "$SRC_PARENT/logs/article_output.json" "$SRC_GRANDPARENT/article_output.json" "$SRC_GRANDPARENT/logs/article_output.json" || true)" +AUTHORS_JSON_FILE="$(first_existing_file "$SRC_DIR/auth_output.json" "$SRC_DIR/merged_auth_output.json" "$SRC_DIR/gauth_output.json" "$SRC_DIR/logs/auth_output.json" "$SRC_DIR/logs/merged_auth_output.json" "$SRC_DIR/logs/gauth_output.json" "$SRC_PARENT/auth_output.json" "$SRC_PARENT/merged_auth_output.json" "$SRC_PARENT/gauth_output.json" "$SRC_PARENT/logs/auth_output.json" "$SRC_PARENT/logs/merged_auth_output.json" "$SRC_PARENT/logs/gauth_output.json" "$SRC_GRANDPARENT/auth_output.json" "$SRC_GRANDPARENT/merged_auth_output.json" "$SRC_GRANDPARENT/gauth_output.json" "$SRC_GRANDPARENT/logs/auth_output.json" "$SRC_GRANDPARENT/logs/merged_auth_output.json" "$SRC_GRANDPARENT/logs/gauth_output.json" || true)" + +# Authors: source CREATE statement from ETL intent. { cat <<'SQL' DROP TABLE IF EXISTS authors; CREATE TABLE authors ( id BIGINT PRIMARY KEY, - display_name VARCHAR(255) NOT NULL, + display_name VARCHAR(255), first_name VARCHAR(255), last_name VARCHAR(255), email VARCHAR(255), login VARCHAR(255) ); SQL + + if [[ -n "$AUTHORS_JSON_FILE" ]]; then + jq -r ' + def sqlq: + if . == null then + "NULL" + else + "'"'"'" + (tostring | gsub("\\\\";"\\\\\\\\") | gsub("'"'"'";"'"'"''"'"'")) + "'"'"'" + end; + to_entries + | map(.value) + | map(select(.id != null)) + | sort_by(.id) + | .[] + | "INSERT INTO authors (id, display_name, first_name, last_name, email, login) VALUES (\(.id), \(.display_name | sqlq), \(.first_name | sqlq), \(.last_name | sqlq), \(.email | sqlq), \(.login | sqlq));" + ' "$AUTHORS_JSON_FILE" + elif [[ -n "$ARTICLES_SQL_FILE" && -f "$(dirname "$ARTICLES_SQL_FILE")/authors.sql" ]]; then + grep '^INSERT INTO authors ' "$(dirname "$ARTICLES_SQL_FILE")/authors.sql" + else + echo "-- No author source found; generated schema only." + fi } > "$OUT_DIR/01-authors.sql" # Articles: normalize column names expected by the CMS handlers. @@ -134,8 +181,32 @@ CREATE TABLE articles ( ); SQL - grep '^INSERT INTO articles ' "$SRC_DIR/articles.sql" \ - | perl -pe 's/`authorIDs`/`author_ids`/g; s/`breakingNews`/`breaking_news`/g; s/`commentStatus`/`comment_status`/g; s/`featuredImgID`/`featured_img_id`/g; s/`modDate`/`mod_date`/g; s/`photoURL`/`photo_url`/g; s/`pubDate`/`pub_date`/g; s/'\''0000-00-00 00:00:00'\''/NULL/g' + if [[ -n "$ARTICLES_SQL_FILE" ]]; then + grep '^INSERT INTO articles ' "$ARTICLES_SQL_FILE" \ + | perl -pe 's/`authorIDs`/`author_ids`/g; s/`breakingNews`/`breaking_news`/g; s/`commentStatus`/`comment_status`/g; s/`featuredImgID`/`featured_img_id`/g; s/`modDate`/`mod_date`/g; s/`photoURL`/`photo_url`/g; s/`pubDate`/`pub_date`/g; s/'\''0000-00-00 00:00:00'\''/NULL/g' + elif [[ -n "$ARTICLES_JSON_FILE" ]]; then + jq -r ' + def sqlq: + if . == null then + "NULL" + else + "'"'"'" + (tostring | gsub("\\\\";"\\\\\\\\") | gsub("'"'"'";"'"'"''"'"'")) + "'"'"'" + end; + def dt: + if . == null or . == "" or . == "0000-00-00 00:00:00" then "NULL" else sqlq end; + def jarr: + if . == null then "NULL" else (tojson | sqlq) end; + + to_entries + | map(.value) + | map(select(.id != null)) + | sort_by(.id) + | .[] + | "INSERT INTO articles (id, author_ids, authors, breaking_news, comment_status, description, featured_img_id, priority, mod_date, photo_url, pub_date, tags, categories, metadata, `text`, title) VALUES (\(.id), \((.authorIDs // []) | jarr), \((.authors // []) | jarr), \(if .breakingNews then 1 else 0 end), \(.commentStatus | sqlq), \(.description | sqlq), \(.featuredImgID | sqlq), \(if .priority then 1 else 0 end), \(.modDate | dt), \(.photoURL | sqlq), \(.pubDate | dt), \((.tags // []) | jarr), \((.categories // []) | jarr), \((.metadata // {}) | jarr), \(.text | sqlq), \(.title | sqlq));" + ' "$ARTICLES_JSON_FILE" + else + echo "-- No articles source found; generated schema only." + fi } > "$OUT_DIR/02-articles.sql" # Article-author joins: fix the broken CREATE TABLE statement name. @@ -149,7 +220,23 @@ CREATE TABLE articles_authors ( ); SQL - grep '^INSERT INTO articles_authors ' "$SRC_DIR/articles_authors.sql" + if [[ -n "$ARTICLE_AUTHORS_SQL_FILE" ]]; then + grep '^INSERT INTO articles_authors ' "$ARTICLE_AUTHORS_SQL_FILE" + elif [[ -n "$ARTICLES_JSON_FILE" ]]; then + jq -r ' + [ + to_entries[] + | .value as $article + | ($article.authorIDs // [])[] + | select(type == "number") + | {author_id: ., articles_id: $article.id} + ] + | to_entries[] + | "INSERT INTO articles_authors (id, author_id, articles_id) VALUES (\(.key + 1), \(.value.author_id), \(.value.articles_id));" + ' "$ARTICLES_JSON_FILE" + else + echo "-- No article-author source found; generated schema only." + fi } > "$OUT_DIR/03-articles-authors.sql" # SEO: source CREATE statement from ETL intent, but skip malformed INSERT rows. diff --git a/server/internal/database/http_models.go b/server/internal/database/http_models.go index e58e3fe..f858baa 100644 --- a/server/internal/database/http_models.go +++ b/server/internal/database/http_models.go @@ -1,8 +1,10 @@ package database import ( + "context" "database/sql" "encoding/json" + "fmt" "strconv" "strings" "time" @@ -12,8 +14,6 @@ import ( var AuthorSortByColumn = map[string]string{ string(models.AuthorSortByDisplayName): "display_name", - string(models.AuthorSortByCreatedAt): "created_at", - string(models.AuthorSortByUpdatedAt): "updated_at", } var ArticleSortByColumn = map[string]string{ @@ -52,13 +52,17 @@ func BuildOrderLimit(query, sortBy, sortDir string, sortColumnMap map[string]str func ScanAuthor(rows *sql.Rows) (models.Author, error) { var a models.Author + var displayName sql.NullString var firstName sql.NullString var lastName sql.NullString var email sql.NullString - err := rows.Scan(&a.ID, &a.DisplayName, &firstName, &lastName, &email) + err := rows.Scan(&a.ID, &displayName, &firstName, &lastName, &email) if err != nil { return models.Author{}, err } + if displayName.Valid { + a.DisplayName = displayName.String + } if firstName.Valid { a.FirstName = firstName.String } @@ -71,6 +75,19 @@ func ScanAuthor(rows *sql.Rows) (models.Author, error) { return a, nil } +func ScanAuthorOverview(rows *sql.Rows) (models.AuthorOverview, error) { + var a models.AuthorOverview + var displayName sql.NullString + err := rows.Scan(&a.ID, &displayName) + if err != nil { + return models.AuthorOverview{}, err + } + if displayName.Valid { + a.DisplayName = displayName.String + } + return a, nil +} + func ScanArticle(rows *sql.Rows) (models.Article, error) { var ( a models.Article @@ -134,6 +151,130 @@ func CollectArticles(rows *sql.Rows) ([]models.Article, error) { return articles, rows.Err() } +func LoadAuthorsByArticleIDs(ctx context.Context, conn *sql.DB, articleIDs []int64) (map[int64][]models.AuthorOverview, error) { + authorsByArticle := make(map[int64][]models.AuthorOverview, len(articleIDs)) + if len(articleIDs) == 0 { + return authorsByArticle, nil + } + + for _, articleID := range articleIDs { + authorsByArticle[articleID] = []models.AuthorOverview{} + } + + placeholders := make([]string, len(articleIDs)) + args := make([]any, len(articleIDs)) + for i, id := range articleIDs { + placeholders[i] = "?" + args[i] = id + } + + query := "SELECT aa.articles_id, a.id, a.display_name " + + "FROM articles_authors aa " + + "JOIN authors a ON a.id = aa.author_id " + + "WHERE aa.articles_id IN (" + strings.Join(placeholders, ",") + ") " + + "ORDER BY aa.articles_id ASC, aa.id ASC" + + rows, err := conn.QueryContext(ctx, query, args...) + if err != nil { + return nil, err + } + defer rows.Close() + + for rows.Next() { + var articleID int64 + var author models.AuthorOverview + var displayName sql.NullString + + if err := rows.Scan(&articleID, &author.ID, &displayName); err != nil { + return nil, err + } + if displayName.Valid { + author.DisplayName = displayName.String + } + + authorsByArticle[articleID] = append(authorsByArticle[articleID], author) + } + + if err := rows.Err(); err != nil { + return nil, err + } + + return authorsByArticle, nil +} + +func PopulateArticleAuthors(ctx context.Context, conn *sql.DB, articles []models.Article) error { + if len(articles) == 0 { + return nil + } + + articleIDs := make([]int64, 0, len(articles)) + for _, article := range articles { + articleIDs = append(articleIDs, article.ID) + } + + authorsByArticle, err := LoadAuthorsByArticleIDs(ctx, conn, articleIDs) + if err != nil { + return err + } + + for i := range articles { + authors := authorsByArticle[articles[i].ID] + if authors == nil { + articles[i].Authors = []models.AuthorOverview{} + continue + } + articles[i].Authors = authors + } + + return nil +} + +func nextArticleAuthorLinkIDTx(ctx context.Context, tx *sql.Tx) (int64, error) { + var nextID int64 + if err := tx.QueryRowContext(ctx, "SELECT COALESCE(MAX(id), 0) + 1 FROM articles_authors").Scan(&nextID); err != nil { + return 0, err + } + return nextID, nil +} + +func ReplaceArticleAuthors(ctx context.Context, conn *sql.DB, articleID int64, authorIDs []int64) error { + tx, err := conn.BeginTx(ctx, nil) + if err != nil { + return err + } + defer func() { + if err != nil { + _ = tx.Rollback() + } + }() + + if _, err = tx.ExecContext(ctx, "DELETE FROM articles_authors WHERE articles_id = ?", articleID); err != nil { + return err + } + + if len(authorIDs) > 0 { + nextID, err := nextArticleAuthorLinkIDTx(ctx, tx) + if err != nil { + return err + } + + for i, authorID := range authorIDs { + linkID := nextID + int64(i) + if _, err = tx.ExecContext( + ctx, + "INSERT INTO articles_authors (id, author_id, articles_id) VALUES (?, ?, ?)", + linkID, + authorID, + articleID, + ); err != nil { + return fmt.Errorf("insert article author relation %s: %w", strconv.FormatInt(authorID, 10), err) + } + } + } + + return tx.Commit() +} + func ParsePublishedAt(value string) *time.Time { if strings.TrimSpace(value) == "" { return nil diff --git a/server/internal/database/http_models_test.go b/server/internal/database/http_models_test.go new file mode 100644 index 0000000..12cccac --- /dev/null +++ b/server/internal/database/http_models_test.go @@ -0,0 +1,32 @@ +package database + +import ( + "strings" + "testing" +) + +func TestAuthorSortByColumn_UsesExistingColumns(t *testing.T) { + for key, column := range AuthorSortByColumn { + if column == "created_at" || column == "updated_at" { + t.Fatalf("author sort key %q maps to non-existent column %q", key, column) + } + } +} + +func TestBuildOrderLimit_UnsupportedAuthorSortByIgnored(t *testing.T) { + query := BuildOrderLimit( + "SELECT `id` FROM `authors` ORDER BY `id` DESC", + "created_at", + "desc", + AuthorSortByColumn, + 20, + 0, + ) + + if strings.Count(query, "ORDER BY") != 1 { + t.Fatalf("expected only default ORDER BY clause, got query: %s", query) + } + if strings.Contains(query, "created_at") { + t.Fatalf("query should not include unsupported sort column: %s", query) + } +} diff --git a/server/internal/handlers/handlers.go b/server/internal/handlers/handlers.go index 50df972..b08b05a 100644 --- a/server/internal/handlers/handlers.go +++ b/server/internal/handlers/handlers.go @@ -52,6 +52,42 @@ func writeError(w http.ResponseWriter, status int, msg string) { writeJSON(w, status, map[string]string{"error": msg}) } +func parseArticleID(pathValue string) (int64, error) { + return strconv.ParseInt(strings.TrimSpace(pathValue), 10, 64) +} + +func authorIDsFromOverviews(authors []models.AuthorOverview) []int64 { + ids := make([]int64, 0, len(authors)) + for _, author := range authors { + ids = append(ids, author.ID) + } + return ids +} + +func parseAuthorIDs(raw any) ([]int64, error) { + arr, ok := raw.([]any) + if !ok { + return nil, strconv.ErrSyntax + } + + authorIDs := make([]int64, 0, len(arr)) + for _, item := range arr { + switch v := item.(type) { + case float64: + if v != float64(int64(v)) { + return nil, strconv.ErrSyntax + } + authorIDs = append(authorIDs, int64(v)) + case int64: + authorIDs = append(authorIDs, v) + default: + return nil, strconv.ErrSyntax + } + } + + return authorIDs, nil +} + // GET /v1/authors func GetAuthors(conn *sql.DB) http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { @@ -59,6 +95,10 @@ func GetAuthors(conn *sql.DB) http.HandlerFunc { limit := intParam(r, "limit", 20) offset := intParam(r, "offset", 0) articleID := intParam(r, "article_id", 0) + sortBy := q.Get("sort_by") + if _, ok := db.AuthorSortByColumn[sortBy]; !ok { + sortBy = "" + } var conditions []string var args []any @@ -68,14 +108,14 @@ func GetAuthors(conn *sql.DB) http.HandlerFunc { args = append(args, articleID) } - query := "SELECT `id`, `display_name`, `first_name`, `last_name`, `email` FROM `authors`" + query := "SELECT `id`, `display_name` FROM `authors`" if len(conditions) > 0 { query += " WHERE " + strings.Join(conditions, " AND ") } - if q.Get("sort_by") == "" { + if sortBy == "" { query += " ORDER BY `id` DESC" } - query = db.BuildOrderLimit(query, q.Get("sort_by"), q.Get("sort_direction"), db.AuthorSortByColumn, limit, offset) + query = db.BuildOrderLimit(query, sortBy, q.Get("sort_direction"), db.AuthorSortByColumn, limit, offset) rows, err := conn.QueryContext(r.Context(), query, args...) if err != nil { @@ -84,9 +124,9 @@ func GetAuthors(conn *sql.DB) http.HandlerFunc { } defer rows.Close() - var authors []models.Author + authors := make([]models.AuthorOverview, 0) for rows.Next() { - a, err := db.ScanAuthor(rows) + a, err := db.ScanAuthorOverview(rows) if err != nil { writeError(w, http.StatusInternalServerError, err.Error()) return @@ -233,6 +273,10 @@ func GetAuthorArticles(conn *sql.DB) http.HandlerFunc { writeError(w, http.StatusInternalServerError, err.Error()) return } + if err := db.PopulateArticleAuthors(r.Context(), conn, articles); err != nil { + writeError(w, http.StatusInternalServerError, err.Error()) + return + } writeJSON(w, http.StatusOK, articles) } } @@ -262,6 +306,10 @@ func GetArticles(conn *sql.DB) http.HandlerFunc { writeError(w, http.StatusInternalServerError, err.Error()) return } + if err := db.PopulateArticleAuthors(r.Context(), conn, articles); err != nil { + writeError(w, http.StatusInternalServerError, err.Error()) + return + } writeJSON(w, http.StatusOK, articles) } } @@ -289,6 +337,10 @@ func GetSectionArticles(conn *sql.DB) http.HandlerFunc { writeError(w, http.StatusInternalServerError, err.Error()) return } + if err := db.PopulateArticleAuthors(r.Context(), conn, articles); err != nil { + writeError(w, http.StatusInternalServerError, err.Error()) + return + } writeJSON(w, http.StatusOK, articles) } } @@ -316,6 +368,10 @@ func GetSubsectionArticles(conn *sql.DB) http.HandlerFunc { writeError(w, http.StatusInternalServerError, err.Error()) return } + if err := db.PopulateArticleAuthors(r.Context(), conn, articles); err != nil { + writeError(w, http.StatusInternalServerError, err.Error()) + return + } writeJSON(w, http.StatusOK, articles) } } @@ -410,6 +466,12 @@ func GetArticle(conn *sql.DB) http.HandlerFunc { writeError(w, http.StatusInternalServerError, err.Error()) return } + articles := []models.Article{a} + if err := db.PopulateArticleAuthors(r.Context(), conn, articles); err != nil { + writeError(w, http.StatusInternalServerError, err.Error()) + return + } + a = articles[0] writeJSON(w, http.StatusOK, a) } } @@ -423,7 +485,7 @@ func PostArticles(conn *sql.DB) http.HandlerFunc { return } fields := db.ArticleInputToDBFields(body) - _, err := db.Insert(r.Context(), conn, "articles", + result, err := db.Insert(r.Context(), conn, "articles", []string{"title", "description", "text", "tags", "pub_date", "mod_date", "priority", "breaking_news", "comment_status", "photo_url"}, fields..., ) @@ -431,6 +493,15 @@ func PostArticles(conn *sql.DB) http.HandlerFunc { writeError(w, http.StatusInternalServerError, err.Error()) return } + articleID, err := result.LastInsertId() + if err != nil { + writeError(w, http.StatusInternalServerError, err.Error()) + return + } + if err := db.ReplaceArticleAuthors(r.Context(), conn, articleID, body.Authors); err != nil { + writeError(w, http.StatusInternalServerError, err.Error()) + return + } w.WriteHeader(http.StatusCreated) } } @@ -455,6 +526,15 @@ func PutArticle(conn *sql.DB) http.HandlerFunc { writeError(w, http.StatusInternalServerError, err.Error()) return } + articleID, err := parseArticleID(id) + if err != nil { + writeError(w, http.StatusBadRequest, "invalid article id") + return + } + if err := db.ReplaceArticleAuthors(r.Context(), conn, articleID, authorIDsFromOverviews(body.Authors)); err != nil { + writeError(w, http.StatusInternalServerError, err.Error()) + return + } w.WriteHeader(http.StatusNoContent) } } @@ -468,6 +548,15 @@ func PatchArticle(conn *sql.DB) http.HandlerFunc { writeError(w, http.StatusBadRequest, "invalid JSON") return } + var authorIDs *[]int64 + if rawAuthors, ok := body["authors"]; ok { + parsedIDs, err := parseAuthorIDs(rawAuthors) + if err != nil { + writeError(w, http.StatusBadRequest, "authors must be an array of IDs") + return + } + authorIDs = &parsedIDs + } var setCols []string var setArgs []any columnByJSONField := map[string]string{ @@ -548,14 +637,27 @@ func PatchArticle(conn *sql.DB) http.HandlerFunc { setArgs = append(setArgs, v) } } - if len(setCols) == 0 { + if len(setCols) == 0 && authorIDs == nil { writeError(w, http.StatusBadRequest, "no valid fields to update") return } - _, err := db.Update(r.Context(), conn, "articles", setCols, "`id` = ?", append(setArgs, id)...) - if err != nil { - writeError(w, http.StatusInternalServerError, err.Error()) - return + if len(setCols) > 0 { + _, err := db.Update(r.Context(), conn, "articles", setCols, "`id` = ?", append(setArgs, id)...) + if err != nil { + writeError(w, http.StatusInternalServerError, err.Error()) + return + } + } + if authorIDs != nil { + articleID, err := parseArticleID(id) + if err != nil { + writeError(w, http.StatusBadRequest, "invalid article id") + return + } + if err := db.ReplaceArticleAuthors(r.Context(), conn, articleID, *authorIDs); err != nil { + writeError(w, http.StatusInternalServerError, err.Error()) + return + } } w.WriteHeader(http.StatusNoContent) } diff --git a/server/internal/models/types.go b/server/internal/models/types.go index b9311b6..d7052bf 100644 --- a/server/internal/models/types.go +++ b/server/internal/models/types.go @@ -57,8 +57,6 @@ type Author struct { type AuthorOverview struct { ID int64 `json:"id"` DisplayName string `json:"display_name"` - FirstName string `json:"first_name,omitempty"` - LastName string `json:"last_name,omitempty"` } type AuthorInput struct {