2019-11-27 00:54:02 +00:00
|
|
|
package models
|
|
|
|
|
|
|
|
import (
|
|
|
|
"errors"
|
|
|
|
"fmt"
|
|
|
|
"html/template"
|
|
|
|
"math"
|
|
|
|
"regexp"
|
2020-02-14 06:03:01 +00:00
|
|
|
"strconv"
|
2019-11-27 00:54:02 +00:00
|
|
|
"strings"
|
|
|
|
"time"
|
|
|
|
|
|
|
|
"git.kirsle.net/apps/gophertype/pkg/console"
|
|
|
|
"git.kirsle.net/apps/gophertype/pkg/markdown"
|
2020-04-10 02:16:41 +00:00
|
|
|
"git.kirsle.net/apps/gophertype/pkg/mogrify"
|
2020-02-18 05:21:09 +00:00
|
|
|
"git.kirsle.net/apps/gophertype/pkg/rng"
|
2019-11-27 00:54:02 +00:00
|
|
|
"github.com/albrow/forms"
|
|
|
|
)
|
|
|
|
|
|
|
|
type postMan struct{}
|
|
|
|
|
|
|
|
// Posts is a singleton manager class for Post model access.
|
|
|
|
var Posts = postMan{}
|
|
|
|
|
|
|
|
// Post represents a single blog entry.
|
|
|
|
type Post struct {
|
2020-02-18 02:10:35 +00:00
|
|
|
BaseModel
|
2019-11-27 00:54:02 +00:00
|
|
|
|
|
|
|
Title string
|
|
|
|
Fragment string `gorm:"unique_index"`
|
2020-02-18 02:10:35 +00:00
|
|
|
ContentType string `gorm:"default:'html'"`
|
|
|
|
AuthorID int // foreign key to User.ID
|
2020-02-17 23:50:04 +00:00
|
|
|
Thumbnail string // image thumbnail for the post
|
2019-11-27 00:54:02 +00:00
|
|
|
Body string
|
|
|
|
Privacy string
|
|
|
|
|
|
|
|
Sticky bool
|
|
|
|
EnableComments bool
|
|
|
|
Tags []TaggedPost
|
|
|
|
Author User `gorm:"foreign_key:UserID"`
|
2020-02-14 06:03:01 +00:00
|
|
|
|
|
|
|
// Private fields not in DB.
|
|
|
|
CommentCount int `gorm:"-"`
|
2019-11-27 00:54:02 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// PagedPosts holds a paginated response of multiple posts.
|
|
|
|
type PagedPosts struct {
|
|
|
|
Posts []Post
|
|
|
|
Page int
|
|
|
|
PerPage int
|
|
|
|
Pages int
|
|
|
|
Total int
|
|
|
|
NextPage int
|
|
|
|
PreviousPage int
|
|
|
|
}
|
|
|
|
|
2020-02-18 02:10:35 +00:00
|
|
|
// PostArchive holds the posts for a single year/month for the archive page.
|
|
|
|
type PostArchive struct {
|
|
|
|
Label string
|
|
|
|
Date time.Time
|
|
|
|
Posts []Post
|
|
|
|
}
|
|
|
|
|
2020-02-17 23:50:04 +00:00
|
|
|
// Regexp for matching a thumbnail image for a blog post.
|
|
|
|
var ThumbnailImageRegexp = regexp.MustCompile(`['"(]([a-zA-Z0-9-_:/?.=&]+\.(?:jpe?g|png|gif))['")]`)
|
|
|
|
|
2019-11-27 00:54:02 +00:00
|
|
|
// New creates a new Post model.
|
|
|
|
func (m postMan) New() Post {
|
|
|
|
return Post{
|
|
|
|
ContentType: Markdown,
|
|
|
|
Privacy: Public,
|
|
|
|
EnableComments: true,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Load a post by ID.
|
|
|
|
func (m postMan) Load(id int) (Post, error) {
|
|
|
|
var post Post
|
|
|
|
r := DB.Preload("Author").Preload("Tags").First(&post, id)
|
|
|
|
return post, r.Error
|
|
|
|
}
|
|
|
|
|
|
|
|
// LoadFragment loads a blog post by its URL fragment.
|
|
|
|
func (m postMan) LoadFragment(fragment string) (Post, error) {
|
|
|
|
var post Post
|
|
|
|
r := DB.Preload("Author").Preload("Tags").Where("fragment = ?", strings.Trim(fragment, "/")).First(&post)
|
|
|
|
return post, r.Error
|
|
|
|
}
|
|
|
|
|
2020-02-18 05:21:09 +00:00
|
|
|
// LoadRandom gets a random post for a given privacy setting.
|
|
|
|
func (m postMan) LoadRandom(privacy string) (Post, error) {
|
|
|
|
// Find all the post IDs.
|
|
|
|
var pp []Post
|
2020-02-26 20:49:39 +00:00
|
|
|
r := DB.Select("id").Where("privacy = ?", privacy).Find(&pp)
|
2020-02-18 05:21:09 +00:00
|
|
|
if r.Error != nil || len(pp) == 0 {
|
|
|
|
return Post{}, r.Error
|
|
|
|
}
|
|
|
|
|
|
|
|
// Pick one at random.
|
|
|
|
randPost := pp[rng.Intn(len(pp))]
|
|
|
|
post, err := Posts.Load(randPost.ID)
|
|
|
|
return post, err
|
|
|
|
}
|
|
|
|
|
2019-11-27 00:54:02 +00:00
|
|
|
// GetIndex returns the index page of blog posts.
|
|
|
|
func (m postMan) GetIndexPosts(privacy string, page, perPage int) (PagedPosts, error) {
|
|
|
|
var pp = PagedPosts{
|
|
|
|
Page: page,
|
|
|
|
PerPage: perPage,
|
|
|
|
}
|
|
|
|
|
|
|
|
if pp.Page < 1 {
|
|
|
|
pp.Page = 1
|
|
|
|
}
|
|
|
|
if pp.PerPage <= 0 {
|
|
|
|
pp.PerPage = 20
|
|
|
|
}
|
|
|
|
|
2020-02-18 02:10:35 +00:00
|
|
|
query := DB.Preload("Author").Preload("Tags").
|
2019-11-27 00:54:02 +00:00
|
|
|
Where("privacy = ?", privacy).
|
|
|
|
Order("sticky desc, created_at desc")
|
|
|
|
|
|
|
|
// Count the total number of rows for paging purposes.
|
|
|
|
query.Model(&Post{}).Count(&pp.Total)
|
|
|
|
|
|
|
|
// Query the paginated slice of results.
|
|
|
|
r := query.
|
2020-02-16 03:43:08 +00:00
|
|
|
Offset((pp.Page - 1) * pp.PerPage).
|
|
|
|
Limit(pp.PerPage).
|
2019-11-27 00:54:02 +00:00
|
|
|
Find(&pp.Posts)
|
|
|
|
|
|
|
|
pp.Pages = int(math.Ceil(float64(pp.Total) / float64(pp.PerPage)))
|
|
|
|
if pp.Page < pp.Pages {
|
|
|
|
pp.NextPage = pp.Page + 1
|
|
|
|
}
|
|
|
|
if pp.Page > 1 {
|
|
|
|
pp.PreviousPage = pp.Page - 1
|
|
|
|
}
|
|
|
|
|
2020-02-14 06:03:01 +00:00
|
|
|
if err := pp.CountComments(); err != nil {
|
|
|
|
console.Error("PagedPosts.CountComments: %s", err)
|
|
|
|
}
|
|
|
|
|
2019-11-27 00:54:02 +00:00
|
|
|
return pp, r.Error
|
|
|
|
}
|
|
|
|
|
|
|
|
// GetPostsByTag gets posts by a certain tag.
|
|
|
|
func (m postMan) GetPostsByTag(tag, privacy string, page, perPage int) (PagedPosts, error) {
|
|
|
|
var pp = PagedPosts{
|
|
|
|
Page: page,
|
|
|
|
PerPage: perPage,
|
|
|
|
}
|
|
|
|
|
|
|
|
if pp.Page < 1 {
|
|
|
|
pp.Page = 1
|
|
|
|
}
|
|
|
|
if pp.PerPage <= 0 {
|
|
|
|
pp.PerPage = 20
|
|
|
|
}
|
|
|
|
|
2020-02-18 04:26:30 +00:00
|
|
|
// Multi-tag query.
|
|
|
|
whitelist, blacklist, err := ParseMultitag(tag)
|
|
|
|
if err != nil {
|
|
|
|
return pp, err
|
2019-11-27 00:54:02 +00:00
|
|
|
}
|
|
|
|
|
2020-02-18 04:26:30 +00:00
|
|
|
// Query the whitelist of post IDs which match the whitelist tags.
|
|
|
|
postIDs := getPostIDsByTag("tag IN (?)", whitelist)
|
|
|
|
notPostIDs := getPostIDsByTag("tag IN (?)", blacklist)
|
|
|
|
postIDs = narrowWhitelistByBlacklist(postIDs, notPostIDs)
|
2019-11-27 00:54:02 +00:00
|
|
|
if len(postIDs) == 0 {
|
|
|
|
return pp, errors.New("no posts found")
|
|
|
|
}
|
|
|
|
|
|
|
|
// Query this set of posts.
|
2020-02-18 02:10:35 +00:00
|
|
|
query := DB.Preload("Author").Preload("Tags").
|
2019-11-27 00:54:02 +00:00
|
|
|
Where("id IN (?) AND privacy = ?", postIDs, privacy).
|
|
|
|
Order("sticky desc, created_at desc")
|
|
|
|
|
|
|
|
// Count the total number of rows for paging purposes.
|
|
|
|
query.Model(&Post{}).Count(&pp.Total)
|
|
|
|
|
|
|
|
// Query the paginated slice of results.
|
2020-02-18 04:26:30 +00:00
|
|
|
r := query.
|
2019-11-27 00:54:02 +00:00
|
|
|
Offset((page - 1) * perPage).
|
|
|
|
Limit(perPage).
|
|
|
|
Find(&pp.Posts)
|
|
|
|
|
|
|
|
pp.Pages = int(math.Ceil(float64(pp.Total) / float64(pp.PerPage)))
|
|
|
|
if pp.Page < pp.Pages {
|
|
|
|
pp.NextPage = pp.Page + 1
|
|
|
|
}
|
|
|
|
if pp.Page > 1 {
|
|
|
|
pp.PreviousPage = pp.Page - 1
|
|
|
|
}
|
|
|
|
|
2020-02-14 06:03:01 +00:00
|
|
|
if err := pp.CountComments(); err != nil {
|
|
|
|
console.Error("PagedPosts.CountComments: %s", err)
|
|
|
|
}
|
|
|
|
|
2019-11-27 00:54:02 +00:00
|
|
|
return pp, r.Error
|
|
|
|
}
|
|
|
|
|
2020-02-18 04:26:30 +00:00
|
|
|
// getPostIDsByTag helper function returns the post IDs that either whitelist,
|
|
|
|
// or blacklist, a set of tags.
|
|
|
|
func getPostIDsByTag(query string, value []string) []int {
|
|
|
|
var tags []TaggedPost
|
|
|
|
var result []int
|
|
|
|
|
|
|
|
if len(value) == 0 {
|
|
|
|
return result
|
|
|
|
}
|
|
|
|
|
|
|
|
DB.Where(query, value).Find(&tags)
|
|
|
|
for _, tag := range tags {
|
|
|
|
result = append(result, tag.PostID)
|
|
|
|
}
|
|
|
|
|
|
|
|
return result
|
|
|
|
}
|
|
|
|
|
|
|
|
// narrowWhitelistByBlacklist removes IDs in whitelist that appear in blacklist.
|
|
|
|
func narrowWhitelistByBlacklist(wl []int, bl []int) []int {
|
|
|
|
// Map the blacklist into a hash map.
|
|
|
|
var blacklist = map[int]interface{}{}
|
|
|
|
for _, id := range bl {
|
|
|
|
blacklist[id] = nil
|
|
|
|
}
|
|
|
|
|
|
|
|
// Limit the whitelist by the blacklist.
|
|
|
|
var result []int
|
|
|
|
for _, id := range wl {
|
|
|
|
if _, ok := blacklist[id]; !ok {
|
|
|
|
result = append(result, id)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return result
|
|
|
|
}
|
|
|
|
|
2020-02-18 02:10:35 +00:00
|
|
|
// GetArchive queries the archive view of the blog.
|
|
|
|
// Set private=true to return private posts, false returns public only.
|
|
|
|
func (m postMan) GetArchive(private bool) ([]*PostArchive, error) {
|
|
|
|
var result = []*PostArchive{}
|
|
|
|
|
|
|
|
query := DB.Table("posts").
|
|
|
|
Select("title, fragment, thumbnail, created_at, privacy")
|
|
|
|
if !private {
|
|
|
|
query = query.Where("privacy=?", Public)
|
|
|
|
}
|
|
|
|
rows, err := query.
|
|
|
|
Order("created_at desc").
|
|
|
|
Rows()
|
|
|
|
if err != nil {
|
|
|
|
return result, err
|
|
|
|
}
|
|
|
|
|
|
|
|
// Group the posts by their month/year.
|
|
|
|
var months []string
|
|
|
|
var byMonth = map[string]*PostArchive{}
|
|
|
|
|
|
|
|
for rows.Next() {
|
|
|
|
var row Post
|
|
|
|
if err := rows.Scan(&row.Title, &row.Fragment, &row.Thumbnail, &row.CreatedAt, &row.Privacy); err != nil {
|
|
|
|
return result, err
|
|
|
|
}
|
|
|
|
|
|
|
|
label := row.CreatedAt.Format("2006-01")
|
|
|
|
if _, ok := byMonth[label]; !ok {
|
|
|
|
months = append(months, label)
|
|
|
|
byMonth[label] = &PostArchive{
|
|
|
|
Label: label,
|
|
|
|
Date: time.Date(
|
|
|
|
row.CreatedAt.Year(), row.CreatedAt.Month(), 1,
|
|
|
|
0, 0, 0, 0, time.UTC,
|
|
|
|
),
|
|
|
|
Posts: []Post{},
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
byMonth[label].Posts = append(byMonth[label].Posts, row)
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, month := range months {
|
|
|
|
result = append(result, byMonth[month])
|
|
|
|
}
|
|
|
|
|
|
|
|
return result, nil
|
|
|
|
}
|
|
|
|
|
2020-02-14 06:03:01 +00:00
|
|
|
// CountComments gets comment counts for one or more posts.
|
|
|
|
// Returns a map[uint]int mapping post ID to comment count.
|
2020-02-18 02:10:35 +00:00
|
|
|
func (m postMan) CountComments(posts ...Post) (map[int]int, error) {
|
|
|
|
var result = map[int]int{}
|
2020-02-14 06:03:01 +00:00
|
|
|
|
|
|
|
// Create the comment thread IDs.
|
|
|
|
var threadIDs = make([]string, len(posts))
|
|
|
|
for i, post := range posts {
|
|
|
|
threadIDs[i] = fmt.Sprintf("post-%d", post.ID)
|
|
|
|
}
|
|
|
|
|
|
|
|
// Query comment counts for each thread.
|
|
|
|
if len(threadIDs) > 0 {
|
|
|
|
rows, err := DB.Table("comments").
|
|
|
|
Select("thread, count(*) as count").
|
|
|
|
Group("thread").
|
|
|
|
Rows()
|
|
|
|
if err != nil {
|
|
|
|
return result, err
|
|
|
|
}
|
|
|
|
|
|
|
|
for rows.Next() {
|
|
|
|
var thread string
|
|
|
|
var count int
|
|
|
|
if err := rows.Scan(&thread, &count); err != nil {
|
|
|
|
console.Error("CountComments: rows.Scan: %s", err)
|
|
|
|
}
|
|
|
|
|
|
|
|
postID, err := strconv.Atoi(strings.TrimPrefix(thread, "post-"))
|
|
|
|
if err != nil {
|
|
|
|
console.Warn("CountComments: strconv.Atoi(%s): %s", thread, err)
|
|
|
|
}
|
2020-02-18 02:10:35 +00:00
|
|
|
result[postID] = count
|
2020-02-14 06:03:01 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return result, nil
|
|
|
|
}
|
|
|
|
|
2020-02-18 04:26:30 +00:00
|
|
|
// ParseMultitag parses a tag string to return arrays for IN and NOT IN queries from DB.
|
|
|
|
//
|
|
|
|
// Example input: "blog,updates,-photo,-ask"
|
|
|
|
// Returns: ["blog", "updates"], ["photo", "ask"]
|
|
|
|
func ParseMultitag(tagline string) (whitelist, blacklist []string, err error) {
|
|
|
|
words := strings.Split(tagline, ",")
|
|
|
|
for _, word := range words {
|
|
|
|
word = strings.TrimSpace(word)
|
|
|
|
if len(word) == 0 {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
// Negation
|
|
|
|
if strings.HasPrefix(word, "-") {
|
|
|
|
blacklist = append(blacklist, strings.TrimPrefix(word, "-"))
|
|
|
|
} else {
|
|
|
|
whitelist = append(whitelist, word)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if len(whitelist) == 0 && len(blacklist) == 0 {
|
|
|
|
err = errors.New("parsing error")
|
|
|
|
}
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2020-02-14 06:03:01 +00:00
|
|
|
// CountComments on the posts in a PagedPosts list.
|
|
|
|
func (pp *PagedPosts) CountComments() error {
|
|
|
|
counts, err := Posts.CountComments(pp.Posts...)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
console.Info("counts: %+v", counts)
|
|
|
|
|
|
|
|
for i, post := range pp.Posts {
|
|
|
|
if count, ok := counts[post.ID]; ok {
|
|
|
|
pp.Posts[i].CommentCount = count
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2019-11-27 00:54:02 +00:00
|
|
|
// PreviewHTML returns the post's body as rendered HTML code, but only above
|
|
|
|
// the <snip> tag for index views.
|
|
|
|
func (p Post) PreviewHTML() template.HTML {
|
|
|
|
var (
|
|
|
|
parts = strings.Split(p.Body, "<snip>")
|
|
|
|
hasMore = len(parts) > 1
|
|
|
|
body = strings.TrimSpace(parts[0])
|
|
|
|
)
|
|
|
|
|
|
|
|
if p.ContentType == Markdown {
|
|
|
|
if hasMore {
|
|
|
|
body += fmt.Sprintf("\n\n[Read more...](/%s)", p.Fragment)
|
|
|
|
}
|
2020-04-10 02:16:41 +00:00
|
|
|
body = markdown.RenderTrustedMarkdown(body)
|
|
|
|
} else if hasMore {
|
2020-02-26 20:49:39 +00:00
|
|
|
body += fmt.Sprintf(`<p><a href="/%s">Read more...</a></p>`, p.Fragment)
|
|
|
|
}
|
2020-04-10 02:16:41 +00:00
|
|
|
|
|
|
|
// Make all images lazy loaded. TODO: make this configurable behavior?
|
|
|
|
body = mogrify.LazyLoadImages(body)
|
2019-11-27 00:54:02 +00:00
|
|
|
return template.HTML(body)
|
|
|
|
}
|
|
|
|
|
|
|
|
// HTML returns the post's body as rendered HTML code.
|
|
|
|
func (p Post) HTML() template.HTML {
|
|
|
|
body := strings.ReplaceAll(p.Body, "<snip>", "")
|
|
|
|
if p.ContentType == Markdown {
|
2020-04-10 02:16:41 +00:00
|
|
|
body = markdown.RenderTrustedMarkdown(body)
|
2019-11-27 00:54:02 +00:00
|
|
|
}
|
2020-04-10 02:16:41 +00:00
|
|
|
|
|
|
|
// Make all images lazy loaded. TODO: make this configurable behavior?
|
|
|
|
body = mogrify.LazyLoadImages(body)
|
2019-11-27 00:54:02 +00:00
|
|
|
return template.HTML(body)
|
|
|
|
}
|
|
|
|
|
|
|
|
// Save a post.
|
|
|
|
// This method also makes sure a unique Fragment is set and links the Tags correctly.
|
|
|
|
func (p *Post) Save() error {
|
|
|
|
// Generate the default fragment from the post title.
|
|
|
|
if p.Fragment == "" {
|
|
|
|
fragment := strings.ToLower(p.Title)
|
|
|
|
fragment = regexp.MustCompile(`[^A-Za-z0-9]+`).ReplaceAllString(fragment, "-")
|
|
|
|
fragment = strings.ReplaceAll(fragment, "--", "-")
|
|
|
|
p.Fragment = strings.Trim(fragment, "-")
|
2021-05-31 23:11:01 +00:00
|
|
|
console.Error("frag: %s", p.Fragment)
|
2019-11-27 00:54:02 +00:00
|
|
|
|
|
|
|
// If still no fragment, make one up from the current time.
|
|
|
|
if p.Fragment == "" {
|
|
|
|
p.Fragment = time.Now().Format("2006-01-02-150405")
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Ensure the fragment is unique!
|
|
|
|
{
|
2021-05-31 23:11:01 +00:00
|
|
|
console.Debug("Ensuring fragment '%s' is unique", p.Fragment)
|
|
|
|
if exist, err := Posts.LoadFragment(p.Fragment); err == nil && exist.ID != p.ID {
|
2019-11-27 00:54:02 +00:00
|
|
|
console.Debug("Post.Save: fragment %s is not unique, trying to resolve", p.Fragment)
|
|
|
|
var resolved bool
|
|
|
|
for i := 2; i <= 100; i++ {
|
|
|
|
fragment := fmt.Sprintf("%s-%d", p.Fragment, i)
|
|
|
|
console.Debug("Post.Save: try fragment '%s'", fragment)
|
2021-05-31 23:11:01 +00:00
|
|
|
exist, err = Posts.LoadFragment(fragment)
|
|
|
|
if err == nil && exist.ID != p.ID {
|
2019-11-27 00:54:02 +00:00
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
p.Fragment = fragment
|
|
|
|
resolved = true
|
|
|
|
break
|
|
|
|
}
|
|
|
|
|
|
|
|
if !resolved {
|
|
|
|
return fmt.Errorf("failed to generate a unique URL fragment for '%s' after 100 attempts", p.Fragment)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-02-17 23:50:04 +00:00
|
|
|
// Cache the post thumbnail from the body.
|
|
|
|
if thumbnail, ok := p.ExtractThumbnail(); ok {
|
|
|
|
p.Thumbnail = thumbnail
|
|
|
|
}
|
|
|
|
|
2019-11-27 00:54:02 +00:00
|
|
|
// Empty tags list.
|
|
|
|
if len(p.Tags) == 1 && p.Tags[0].Tag == "" {
|
|
|
|
p.Tags = []TaggedPost{}
|
|
|
|
}
|
|
|
|
|
|
|
|
// TODO: tag relationships. For now just delete and re-add them all.
|
|
|
|
if p.ID != 0 {
|
|
|
|
DB.Where("post_id = ?", p.ID).Delete(TaggedPost{})
|
|
|
|
}
|
|
|
|
|
|
|
|
// Dedupe tags.
|
|
|
|
p.fixTags()
|
|
|
|
|
|
|
|
// Save the post.
|
|
|
|
if DB.NewRecord(p) {
|
|
|
|
return DB.Create(&p).Error
|
|
|
|
}
|
|
|
|
|
|
|
|
return DB.Save(&p).Error
|
|
|
|
}
|
|
|
|
|
2020-02-26 20:49:39 +00:00
|
|
|
// SetUpdated force sets the updated time of a post, i.e. to reset it to original.
|
|
|
|
func (p Post) SetUpdated(dt time.Time) error {
|
|
|
|
r := DB.Table("posts").Where("id = ?", p.ID).Updates(map[string]interface{}{
|
|
|
|
"updated_at": dt,
|
|
|
|
})
|
|
|
|
return r.Error
|
|
|
|
}
|
|
|
|
|
2019-11-27 00:54:02 +00:00
|
|
|
// ParseForm populates a Post from an HTTP form.
|
|
|
|
func (p *Post) ParseForm(form *forms.Data) {
|
|
|
|
p.Title = form.Get("title")
|
|
|
|
p.Fragment = form.Get("fragment")
|
|
|
|
p.ContentType = form.Get("content-type")
|
|
|
|
p.Body = form.Get("body")
|
|
|
|
p.Privacy = form.Get("privacy")
|
|
|
|
p.Sticky = form.GetBool("sticky")
|
|
|
|
p.EnableComments = form.GetBool("enable-comments")
|
|
|
|
|
|
|
|
// Parse the tags array. This replaces the post.Tags with an empty TaggedPost
|
|
|
|
// list containing only the string Tag values. The IDs and DB side will be
|
|
|
|
// patched up when the post gets saved.
|
|
|
|
p.Tags = []TaggedPost{}
|
|
|
|
tags := strings.Split(form.Get("tags"), ",")
|
|
|
|
for _, tag := range tags {
|
|
|
|
tag = strings.TrimSpace(tag)
|
|
|
|
if len(tag) == 0 {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
p.Tags = append(p.Tags, TaggedPost{
|
|
|
|
Tag: tag,
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-02-17 23:50:04 +00:00
|
|
|
// ExtractThumbnail searches and returns a thumbnail image to represent the post.
|
|
|
|
// It will be the first image embedded in the post body, or nothing.
|
|
|
|
func (p Post) ExtractThumbnail() (string, bool) {
|
|
|
|
result := ThumbnailImageRegexp.FindStringSubmatch(p.Body)
|
|
|
|
if len(result) < 2 {
|
|
|
|
return "", false
|
|
|
|
}
|
|
|
|
return result[1], true
|
|
|
|
}
|
|
|
|
|
2019-11-27 00:54:02 +00:00
|
|
|
// TagsString turns the post tags into a comma separated string.
|
|
|
|
func (p Post) TagsString() string {
|
|
|
|
console.Error("TagsString: %+v", p.Tags)
|
|
|
|
var tags = make([]string, len(p.Tags))
|
|
|
|
for i, tag := range p.Tags {
|
|
|
|
tags[i] = tag.Tag
|
|
|
|
}
|
|
|
|
return strings.Join(tags, ", ")
|
|
|
|
}
|
|
|
|
|
|
|
|
// fixTags is a pre-Save function to fix up the Tags relationships.
|
|
|
|
// It checks that each tag has an ID, and if it doesn't have an ID yet, removes
|
|
|
|
// it if a duplicate tag does exist that has an ID.
|
|
|
|
func (p *Post) fixTags() {
|
|
|
|
// De-duplicate tag values.
|
|
|
|
var dedupe = map[string]interface{}{}
|
|
|
|
var finalTags []TaggedPost
|
|
|
|
for _, tag := range p.Tags {
|
|
|
|
if _, ok := dedupe[tag.Tag]; !ok {
|
|
|
|
finalTags = append(finalTags, tag)
|
|
|
|
dedupe[tag.Tag] = nil
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
p.Tags = finalTags
|
|
|
|
}
|