Add get posts by tag
This commit is contained in:
parent
c4b3a6bbc5
commit
9f01ec25da
1 changed files with 47 additions and 0 deletions
|
@ -313,6 +313,53 @@ func getBlogPostById(w http.ResponseWriter, r *http.Request) {
|
|||
}
|
||||
|
||||
func getBlogPostsByTag(w http.ResponseWriter, r *http.Request) {
|
||||
returnError := ReturnError{}
|
||||
referenceID := &ReferenceID{}
|
||||
err := json.NewDecoder(r.Body).Decode(referenceID)
|
||||
// hardcode 9001 for cool kid points
|
||||
if err != nil {
|
||||
referenceID.LastID = 9001
|
||||
}
|
||||
search_id := referenceID.LastID
|
||||
// if someone is lame and sends up a negative number...
|
||||
if search_id < 1 {
|
||||
search_id = 9001
|
||||
}
|
||||
tag := chi.URLParam(r, "tag")
|
||||
search := `
|
||||
SELECT posts.id, posts.title, posts.slug, posts.author, posts.content, posts.time_published, posts.modified, posts.last_modified
|
||||
FROM posts
|
||||
INNER JOIN tags on posts .id = tags.article_id
|
||||
WHERE posts.id < $1
|
||||
AND tags.tag = $2
|
||||
ORDER BY id DESC
|
||||
FETCH FIRST 10 ROWS ONLY
|
||||
`
|
||||
rows, err := DB.Query(search, search_id, tag)
|
||||
if err != nil {
|
||||
returnError.Message = "something is super broken..."
|
||||
w.WriteHeader(http.StatusInternalServerError)
|
||||
render.JSON(w, r, returnError)
|
||||
fmt.Println(err)
|
||||
return
|
||||
}
|
||||
defer rows.Close()
|
||||
post := BlogPost{}
|
||||
posts := make(BlogPosts, 0)
|
||||
for rows.Next() {
|
||||
if err := rows.Scan(&post.ID, &post.Title, &post.Slug, &post.Author, &post.Content, &post.TimePublished, &post.Modified, &post.TimeModified); err != nil {
|
||||
}
|
||||
posts = append(posts, post)
|
||||
}
|
||||
if err := rows.Err(); err != nil {
|
||||
returnError.Message = "something is super broken..."
|
||||
w.WriteHeader(http.StatusInternalServerError)
|
||||
render.JSON(w, r, returnError)
|
||||
fmt.Println(err)
|
||||
return
|
||||
}
|
||||
w.WriteHeader(http.StatusOK)
|
||||
render.JSON(w, r, posts)
|
||||
return
|
||||
}
|
||||
|
||||
|
|
Reference in a new issue