chore: optmize code

This commit is contained in:
henry.chen
2025-07-17 14:01:17 +08:00
parent 4abe528742
commit 91e1731909
13 changed files with 406 additions and 394 deletions

4
.gitignore vendored
View File

@@ -17,7 +17,7 @@
# Dependency directories (remove the comment below to include it)
# vendor/
bin
assets/*.xml
assets/*.txt
cmd/eiblog/etc/assets/*.xml
cmd/eiblog/etc/assets/*.txt
db.sqlite
cmd/*/backend

View File

@@ -210,12 +210,14 @@ func handleAPIPostDelete(c *gin.Context) {
ids = append(ids, id)
}
// elasticsearch
err := internal.ESClient.ElasticDelIndex(ids)
if err != nil {
logrus.Error("handleAPIPostDelete.ElasticDelIndex: ", err)
if internal.ESClient != nil {
err := internal.ESClient.ElasticDelIndex(ids)
if err != nil {
logrus.Error("handleAPIPostDelete.ElasticDelIndex: ", err)
}
}
// TODO disqus delete
responseNotice(c, NoticeSuccess, "删除成功", "")
responseNotice(c, NoticeSuccess, "删除成功,已移入到回收箱", "")
}
// handleAPIPostCreate 创建文章
@@ -291,7 +293,9 @@ func handleAPIPostCreate(c *gin.Context) {
// 异步执行,快
go func() {
// elastic
internal.ESClient.ElasticAddIndex(article)
if internal.ESClient != nil {
internal.ESClient.ElasticAddIndex(article)
}
// rss
internal.Pinger.PingFunc(internal.Ei.Blogger.BTitle, slug)
}()
@@ -332,7 +336,9 @@ func handleAPIPostCreate(c *gin.Context) {
// 异步执行,快
go func() {
// elastic
internal.ESClient.ElasticAddIndex(article)
if internal.ESClient != nil {
internal.ESClient.ElasticAddIndex(article)
}
// rss
internal.Pinger.PingFunc(internal.Ei.Blogger.BTitle, slug)
}()

View File

@@ -2,47 +2,68 @@ package file
import (
"net/http"
"path/filepath"
"github.com/eiblog/eiblog/cmd/eiblog/config"
"github.com/gin-gonic/gin"
)
// RegisterRoutes register routes
func RegisterRoutes(e *gin.Engine) {
e.GET("/rss.html", handleFeed)
e.GET("/feed", handleFeed)
e.GET("/opensearch.xml", handleOpensearch)
e.GET("/sitemap.xml", handleSitemap)
e.GET("/robots.txt", handleRobots)
e.GET("/crossdomain.xml", handleCrossDomain)
e.GET("/favicon.ico", handleFavicon)
e.GET("/rss.html", handleFeed())
e.GET("/feed", handleFeed())
e.GET("/opensearch.xml", handleOpensearch())
e.GET("/sitemap.xml", handleSitemap())
e.GET("/robots.txt", handleRobots())
e.GET("/crossdomain.xml", handleCrossDomain())
e.GET("/favicon.ico", handleFavicon())
}
// handleFeed feed.xml
func handleFeed(c *gin.Context) {
http.ServeFile(c.Writer, c.Request, "assets/feed.xml")
func handleFeed() gin.HandlerFunc {
path := filepath.Join(config.EtcDir, "assets", "feed.xml")
return func(c *gin.Context) {
http.ServeFile(c.Writer, c.Request, path)
}
}
// handleOpensearch opensearch.xml
func handleOpensearch(c *gin.Context) {
http.ServeFile(c.Writer, c.Request, "assets/opensearch.xml")
func handleOpensearch() gin.HandlerFunc {
path := filepath.Join(config.EtcDir, "assets", "opensearch.xml")
return func(c *gin.Context) {
http.ServeFile(c.Writer, c.Request, path)
}
}
// handleRobots robotx.txt
func handleRobots(c *gin.Context) {
http.ServeFile(c.Writer, c.Request, "assets/robots.txt")
func handleRobots() gin.HandlerFunc {
path := filepath.Join(config.EtcDir, "assets", "robots.txt")
return func(c *gin.Context) {
http.ServeFile(c.Writer, c.Request, path)
}
}
// handleSitemap sitemap.xml
func handleSitemap(c *gin.Context) {
http.ServeFile(c.Writer, c.Request, "assets/sitemap.xml")
func handleSitemap() gin.HandlerFunc {
path := filepath.Join(config.EtcDir, "assets", "sitemap.xml")
return func(c *gin.Context) {
http.ServeFile(c.Writer, c.Request, path)
}
}
// handleCrossDomain crossdomain.xml
func handleCrossDomain(c *gin.Context) {
http.ServeFile(c.Writer, c.Request, "assets/crossdomain.xml")
func handleCrossDomain() gin.HandlerFunc {
path := filepath.Join(config.EtcDir, "assets", "crossdomain.xml")
return func(c *gin.Context) {
http.ServeFile(c.Writer, c.Request, path)
}
}
// handleFavicon favicon.ico
func handleFavicon(c *gin.Context) {
http.ServeFile(c.Writer, c.Request, "assets/favicon.ico")
func handleFavicon() gin.HandlerFunc {
path := filepath.Join(config.EtcDir, "assets", "favicon.ico")
return func(c *gin.Context) {
http.ServeFile(c.Writer, c.Request, path)
}
}

View File

@@ -1,163 +0,0 @@
package file
import (
"os"
"path/filepath"
"text/template"
"time"
"github.com/eiblog/eiblog/cmd/eiblog/config"
"github.com/eiblog/eiblog/cmd/eiblog/handler/internal"
"github.com/eiblog/eiblog/tools"
"github.com/sirupsen/logrus"
)
var xmlTmpl *template.Template
func init() {
root := filepath.Join(config.EtcDir, "template", "*.xml")
var err error
xmlTmpl, err = template.New("").Funcs(template.FuncMap{
"dateformat": tools.DateFormat,
"imgtonormal": tools.ImgToNormal,
}).ParseGlob(root)
if err != nil {
panic(err)
}
generateOpensearch()
generateRobots()
generateCrossdomain()
go timerFeed()
go timerSitemap()
}
// timerFeed 定时刷新feed
func timerFeed() {
tpl := xmlTmpl.Lookup("feedTpl.xml")
if tpl == nil {
logrus.Info("file: not found: feedTpl.xml")
return
}
now := time.Now()
_, _, articles := internal.Ei.PageArticleFE(1, 20)
params := map[string]interface{}{
"Title": internal.Ei.Blogger.BTitle,
"SubTitle": internal.Ei.Blogger.SubTitle,
"Host": config.Conf.Host,
"FeedrURL": config.Conf.FeedRPC.FeedrURL,
"BuildDate": now.Format(time.RFC1123Z),
"Articles": articles,
}
f, err := os.OpenFile("assets/feed.xml", os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0666)
if err != nil {
logrus.Error("file: timerFeed.OpenFile: ", err)
return
}
defer f.Close()
err = tpl.Execute(f, params)
if err != nil {
logrus.Error("file: timerFeed.Execute: ", err)
return
}
time.AfterFunc(time.Hour*4, timerFeed)
}
// timerSitemap 定时刷新sitemap
func timerSitemap() {
tpl := xmlTmpl.Lookup("sitemapTpl.xml")
if tpl == nil {
logrus.Info("file: not found: sitemapTpl.xml")
return
}
params := map[string]interface{}{
"Articles": internal.Ei.Articles,
"Host": config.Conf.Host,
}
f, err := os.OpenFile("assets/sitemap.xml", os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0666)
if err != nil {
logrus.Error("file: timerSitemap.OpenFile: ", err)
return
}
defer f.Close()
err = tpl.Execute(f, params)
if err != nil {
logrus.Error("file: timerSitemap.Execute: ", err)
return
}
time.AfterFunc(time.Hour*24, timerSitemap)
}
// generateOpensearch 生成opensearch.xml
func generateOpensearch() {
tpl := xmlTmpl.Lookup("opensearchTpl.xml")
if tpl == nil {
logrus.Info("file: not found: opensearchTpl.xml")
return
}
params := map[string]string{
"BTitle": internal.Ei.Blogger.BTitle,
"SubTitle": internal.Ei.Blogger.SubTitle,
"Host": config.Conf.Host,
}
f, err := os.OpenFile("assets/opensearch.xml", os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0666)
if err != nil {
logrus.Error("file: generateOpensearch.OpenFile: ", err)
return
}
defer f.Close()
err = tpl.Execute(f, params)
if err != nil {
logrus.Error("file: generateOpensearch.Execute: ", err)
return
}
}
// generateRobots 生成robots.txt
func generateRobots() {
tpl := xmlTmpl.Lookup("robotsTpl.xml")
if tpl == nil {
logrus.Info("file: not found: robotsTpl.xml")
return
}
params := map[string]string{
"Host": config.Conf.Host,
}
f, err := os.OpenFile("assets/robots.txt", os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0666)
if err != nil {
logrus.Error("file: generateRobots.OpenFile: ", err)
return
}
defer f.Close()
err = tpl.Execute(f, params)
if err != nil {
logrus.Error("file: generateRobots.Execute: ", err)
return
}
}
// generateCrossdomain 生成crossdomain.xml
func generateCrossdomain() {
tpl := xmlTmpl.Lookup("crossdomainTpl.xml")
if tpl == nil {
logrus.Info("file: not found: crossdomainTpl.xml")
return
}
params := map[string]string{
"Host": config.Conf.Host,
}
f, err := os.OpenFile("assets/crossdomain.xml", os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0666)
if err != nil {
logrus.Error("file: generateCrossdomain.OpenFile: ", err)
return
}
defer f.Close()
err = tpl.Execute(f, params)
if err != nil {
logrus.Error("file: generateCrossdomain.Execute: ", err)
return
}
}

View File

@@ -15,14 +15,9 @@ import (
"github.com/eiblog/eiblog/cmd/eiblog/handler/internal/store"
"github.com/eiblog/eiblog/pkg/model"
"github.com/eiblog/eiblog/tools"
"github.com/sirupsen/logrus"
)
var (
// Ei eiblog cache
Ei *Cache
// PagesCh regenerate pages chan
PagesCh = make(chan string, 2)
// PageSeries the page series regenerate flag
@@ -32,32 +27,8 @@ var (
// ArticleStartID article start id
ArticleStartID = 11
// TrashArticleExp trash article timeout
TrashArticleExp = time.Duration(-48) * time.Hour
)
func init() {
// init timezone
var err error
tools.TimeLocation, err = time.LoadLocation(config.Conf.General.Timezone)
if err != nil {
panic(err)
}
// Ei init
Ei = &Cache{
lock: sync.Mutex{},
TagArticles: make(map[string]model.SortedArticles),
ArticlesMap: make(map[string]*model.Article),
}
err = Ei.loadOrInit()
if err != nil {
panic(err)
}
go Ei.regeneratePages()
go Ei.timerClean()
go Ei.timerDisqus()
}
// Cache 整站缓存
type Cache struct {
lock sync.Mutex
@@ -76,10 +47,27 @@ type Cache struct {
ArticlesMap map[string]*model.Article // slug:article
}
// NewCache 缓存整个博客数据
func NewCache() (*Cache, error) {
// Ei init
cache := &Cache{
lock: sync.Mutex{},
TagArticles: make(map[string]model.SortedArticles),
ArticlesMap: make(map[string]*model.Article),
}
err := cache.loadOrInit()
if err != nil {
return nil, err
}
// 异步渲染series,archive页面
go cache.regeneratePages()
return cache, nil
}
// AddArticle 添加文章
func (c *Cache) AddArticle(article *model.Article) error {
c.lock.Lock()
defer c.lock.Unlock()
func (cache *Cache) AddArticle(article *model.Article) error {
cache.lock.Lock()
defer cache.lock.Unlock()
// store
err := Store.InsertArticle(context.Background(), article, ArticleStartID)
@@ -91,32 +79,32 @@ func (c *Cache) AddArticle(article *model.Article) error {
return nil
}
// 正式发布文章
c.refreshCache(article, false)
cache.refreshCache(article, false)
return nil
}
// RepArticle 替换文章
func (c *Cache) RepArticle(oldArticle, newArticle *model.Article) {
c.lock.Lock()
defer c.lock.Unlock()
func (cache *Cache) RepArticle(oldArticle, newArticle *model.Article) {
cache.lock.Lock()
defer cache.lock.Unlock()
c.ArticlesMap[newArticle.Slug] = newArticle
cache.ArticlesMap[newArticle.Slug] = newArticle
GenerateExcerptMarkdown(newArticle)
if newArticle.ID < ArticleStartID {
return
}
if oldArticle != nil { // 移除旧文章
c.refreshCache(oldArticle, true)
cache.refreshCache(oldArticle, true)
}
c.refreshCache(newArticle, false)
cache.refreshCache(newArticle, false)
}
// DelArticle 删除文章
func (c *Cache) DelArticle(id int) error {
c.lock.Lock()
defer c.lock.Unlock()
func (cache *Cache) DelArticle(id int) error {
cache.lock.Lock()
defer cache.lock.Unlock()
article, _ := c.FindArticleByID(id)
article, _ := cache.FindArticleByID(id)
if article == nil {
return nil
}
@@ -128,30 +116,30 @@ func (c *Cache) DelArticle(id int) error {
return err
}
// drop from tags,series,archives
c.refreshCache(article, true)
cache.refreshCache(article, true)
return nil
}
// AddSerie 添加专题
func (c *Cache) AddSerie(serie *model.Serie) error {
c.lock.Lock()
defer c.lock.Unlock()
func (cache *Cache) AddSerie(serie *model.Serie) error {
cache.lock.Lock()
defer cache.lock.Unlock()
err := Store.InsertSerie(context.Background(), serie)
if err != nil {
return err
}
c.Series = append(c.Series, serie)
cache.Series = append(cache.Series, serie)
PagesCh <- PageSeries
return nil
}
// DelSerie 删除专题
func (c *Cache) DelSerie(id int) error {
c.lock.Lock()
defer c.lock.Unlock()
func (cache *Cache) DelSerie(id int) error {
cache.lock.Lock()
defer cache.lock.Unlock()
for i, serie := range c.Series {
for i, serie := range cache.Series {
if serie.ID == id {
if len(serie.Articles) > 0 {
return errors.New("请删除该专题下的所有文章")
@@ -160,8 +148,8 @@ func (c *Cache) DelSerie(id int) error {
if err != nil {
return err
}
c.Series[i] = nil
c.Series = append(c.Series[:i], c.Series[i+1:]...)
cache.Series[i] = nil
cache.Series = append(cache.Series[:i], cache.Series[i+1:]...)
PagesCh <- PageSeries
break
}
@@ -170,12 +158,12 @@ func (c *Cache) DelSerie(id int) error {
}
// PageArticleFE 文章翻页
func (c *Cache) PageArticleFE(page int, pageSize int) (prev,
func (cache *Cache) PageArticleFE(page int, pageSize int) (prev,
next int, articles []*model.Article) {
var l int
for l = len(c.Articles); l > 0; l-- {
if c.Articles[l-1].ID >= ArticleStartID {
for l = len(cache.Articles); l > 0; l-- {
if cache.Articles[l-1].ID >= ArticleStartID {
break
}
}
@@ -200,12 +188,12 @@ func (c *Cache) PageArticleFE(page int, pageSize int) (prev,
if e > l {
e = l
}
articles = c.Articles[s:e]
articles = cache.Articles[s:e]
return
}
// PageArticleBE 后台文章分页
func (c *Cache) PageArticleBE(se int, kw string, draft, del bool, p,
func (cache *Cache) PageArticleBE(se int, kw string, draft, del bool, p,
n int) ([]*model.Article, int) {
search := store.SearchArticles{
@@ -238,8 +226,8 @@ func (c *Cache) PageArticleBE(se int, kw string, draft, del bool, p,
}
// FindArticleByID 通过ID查找文章
func (c *Cache) FindArticleByID(id int) (*model.Article, int) {
for i, article := range c.Articles {
func (cache *Cache) FindArticleByID(id int) (*model.Article, int) {
for i, article := range cache.Articles {
if article.ID == id {
return article, i
}
@@ -248,32 +236,32 @@ func (c *Cache) FindArticleByID(id int) (*model.Article, int) {
}
// refreshCache 刷新缓存
func (c *Cache) refreshCache(article *model.Article, del bool) {
func (cache *Cache) refreshCache(article *model.Article, del bool) {
if del {
_, idx := c.FindArticleByID(article.ID)
_, idx := cache.FindArticleByID(article.ID)
delete(c.ArticlesMap, article.Slug)
c.Articles = append(c.Articles[:idx], c.Articles[idx+1:]...)
delete(cache.ArticlesMap, article.Slug)
cache.Articles = append(cache.Articles[:idx], cache.Articles[idx+1:]...)
// 从链表移除
c.recalcLinkedList(article, true)
cache.recalcLinkedList(article, true)
// 从tag、serie、archive移除
c.redelArticle(article)
cache.redelArticle(article)
return
}
// 添加文章
defer GenerateExcerptMarkdown(article)
c.ArticlesMap[article.Slug] = article
c.Articles = append([]*model.Article{article}, c.Articles...)
sort.Sort(c.Articles)
cache.ArticlesMap[article.Slug] = article
cache.Articles = append([]*model.Article{article}, cache.Articles...)
sort.Sort(cache.Articles)
// 从链表添加
c.recalcLinkedList(article, false)
cache.recalcLinkedList(article, false)
// 从tag、serie、archive添加
c.readdArticle(article, true)
cache.readdArticle(article, true)
}
// recalcLinkedList 重算文章链表
func (c *Cache) recalcLinkedList(article *model.Article, del bool) {
func (cache *Cache) recalcLinkedList(article *model.Article, del bool) {
// 删除操作
if del {
if article.Prev == nil && article.Next != nil {
@@ -287,56 +275,56 @@ func (c *Cache) recalcLinkedList(article *model.Article, del bool) {
return
}
// 添加操作
_, idx := c.FindArticleByID(article.ID)
if idx == 0 && c.Articles[idx+1].ID >= ArticleStartID {
article.Next = c.Articles[idx+1]
c.Articles[idx+1].Prev = article
} else if idx > 0 && c.Articles[idx-1].ID >= ArticleStartID {
article.Prev = c.Articles[idx-1]
if c.Articles[idx-1].Next != nil {
article.Next = c.Articles[idx-1].Next
c.Articles[idx-1].Next.Prev = article
_, idx := cache.FindArticleByID(article.ID)
if idx == 0 && cache.Articles[idx+1].ID >= ArticleStartID {
article.Next = cache.Articles[idx+1]
cache.Articles[idx+1].Prev = article
} else if idx > 0 && cache.Articles[idx-1].ID >= ArticleStartID {
article.Prev = cache.Articles[idx-1]
if cache.Articles[idx-1].Next != nil {
article.Next = cache.Articles[idx-1].Next
cache.Articles[idx-1].Next.Prev = article
}
c.Articles[idx-1].Next = article
cache.Articles[idx-1].Next = article
}
}
// readdArticle 添加文章到tag、series、archive
func (c *Cache) readdArticle(article *model.Article, needSort bool) {
func (cache *Cache) readdArticle(article *model.Article, needSort bool) {
// tag
for _, tag := range article.Tags {
c.TagArticles[tag] = append(c.TagArticles[tag], article)
cache.TagArticles[tag] = append(cache.TagArticles[tag], article)
if needSort {
sort.Sort(c.TagArticles[tag])
sort.Sort(cache.TagArticles[tag])
}
}
// series
for i, serie := range c.Series {
for i, serie := range cache.Series {
if serie.ID != article.SerieID {
continue
}
c.Series[i].Articles = append(c.Series[i].Articles, article)
cache.Series[i].Articles = append(cache.Series[i].Articles, article)
if needSort {
sort.Sort(c.Series[i].Articles)
sort.Sort(cache.Series[i].Articles)
PagesCh <- PageSeries // 重建专题
}
}
// archive
y, m, _ := article.CreatedAt.Date()
for i, archive := range c.Archives {
for i, archive := range cache.Archives {
ay, am, _ := archive.Time.Date()
if y != ay || m != am {
continue
}
c.Archives[i].Articles = append(c.Archives[i].Articles, article)
cache.Archives[i].Articles = append(cache.Archives[i].Articles, article)
if needSort {
sort.Sort(c.Archives[i].Articles)
sort.Sort(cache.Archives[i].Articles)
PagesCh <- PageArchive // 重建归档
}
return
}
// 新建归档
c.Archives = append(c.Archives, &model.Archive{
cache.Archives = append(cache.Archives, &model.Archive{
Time: article.CreatedAt,
Articles: model.SortedArticles{article},
})
@@ -346,25 +334,25 @@ func (c *Cache) readdArticle(article *model.Article, needSort bool) {
}
// redelArticle 从tag、series、archive删除文章
func (c *Cache) redelArticle(article *model.Article) {
func (cache *Cache) redelArticle(article *model.Article) {
// tag
for _, tag := range article.Tags {
for i, v := range c.TagArticles[tag] {
for i, v := range cache.TagArticles[tag] {
if v == article {
c.TagArticles[tag] = append(c.TagArticles[tag][0:i], c.TagArticles[tag][i+1:]...)
if len(c.TagArticles[tag]) == 0 {
delete(c.TagArticles, tag)
cache.TagArticles[tag] = append(cache.TagArticles[tag][0:i], cache.TagArticles[tag][i+1:]...)
if len(cache.TagArticles[tag]) == 0 {
delete(cache.TagArticles, tag)
}
}
}
}
// serie
for i, serie := range c.Series {
for i, serie := range cache.Series {
if serie.ID == article.SerieID {
for j, v := range serie.Articles {
if v == article {
c.Series[i].Articles = append(c.Series[i].Articles[0:j],
c.Series[i].Articles[j+1:]...)
cache.Series[i].Articles = append(cache.Series[i].Articles[0:j],
cache.Series[i].Articles[j+1:]...)
PagesCh <- PageSeries
break
}
@@ -372,15 +360,15 @@ func (c *Cache) redelArticle(article *model.Article) {
}
}
// archive
for i, archive := range c.Archives {
for i, archive := range cache.Archives {
ay, am, _ := archive.Time.Date()
if y, m, _ := article.CreatedAt.Date(); ay == y && am == m {
for j, v := range archive.Articles {
if v == article {
c.Archives[i].Articles = append(c.Archives[i].Articles[0:j],
c.Archives[i].Articles[j+1:]...)
if len(c.Archives[i].Articles) == 0 {
c.Archives = append(c.Archives[:i], c.Archives[i+1:]...)
cache.Archives[i].Articles = append(cache.Archives[i].Articles[0:j],
cache.Archives[i].Articles[j+1:]...)
if len(cache.Archives[i].Articles) == 0 {
cache.Archives = append(cache.Archives[:i], cache.Archives[i+1:]...)
}
PagesCh <- PageArchive
break
@@ -391,7 +379,7 @@ func (c *Cache) redelArticle(article *model.Article) {
}
// loadOrInit 读取数据或初始化
func (c *Cache) loadOrInit() error {
func (cache *Cache) loadOrInit() error {
// blogger
blogger := &model.Blogger{
BlogName: strings.Title(config.Conf.Account.Username),
@@ -404,7 +392,7 @@ func (c *Cache) loadOrInit() error {
if err != nil {
return err
}
c.Blogger = blogger
cache.Blogger = blogger
if created { // init articles: about blogroll
about := &model.Article{
ID: 1, // 固定ID
@@ -432,8 +420,7 @@ func (c *Cache) loadOrInit() error {
}
}
// account
pwd := tools.EncryptPasswd(config.Conf.Account.Username,
config.Conf.Account.Password)
pwd := tools.EncryptPasswd(config.Conf.Account.Username, config.Conf.Account.Password)
account := &model.Account{
Username: config.Conf.Account.Username,
@@ -443,13 +430,13 @@ func (c *Cache) loadOrInit() error {
if err != nil {
return err
}
c.Account = account
cache.Account = account
// series
series, err := Store.LoadAllSerie(context.Background())
if err != nil {
return err
}
c.Series = series
cache.Series = series
// all articles
search := store.SearchArticles{
Page: 1,
@@ -464,7 +451,7 @@ func (c *Cache) loadOrInit() error {
// 渲染页面
GenerateExcerptMarkdown(v)
c.ArticlesMap[v.Slug] = v
cache.ArticlesMap[v.Slug] = v
// 分析文章
if v.ID < ArticleStartID {
continue
@@ -476,9 +463,9 @@ func (c *Cache) loadOrInit() error {
articles[i+1].ID >= ArticleStartID {
v.Next = articles[i+1]
}
c.readdArticle(v, false)
cache.readdArticle(v, false)
}
Ei.Articles = articles
cache.Articles = articles
// 重建专题与归档
PagesCh <- PageSeries
PagesCh <- PageArchive
@@ -486,15 +473,15 @@ func (c *Cache) loadOrInit() error {
}
// regeneratePages 重新生成series,archive页面
func (c *Cache) regeneratePages() {
func (cache *Cache) regeneratePages() {
for {
switch page := <-PagesCh; page {
case PageSeries:
sort.Sort(c.Series)
sort.Sort(cache.Series)
buf := bytes.Buffer{}
buf.WriteString(c.Blogger.SeriesSay)
buf.WriteString(cache.Blogger.SeriesSay)
buf.WriteString("\n\n")
for _, series := range c.Series {
for _, series := range cache.Series {
buf.WriteString(fmt.Sprintf("### %s{#toc-%d}", series.Name, series.ID))
buf.WriteByte('\n')
buf.WriteString(series.Desc)
@@ -507,16 +494,16 @@ func (c *Cache) regeneratePages() {
}
buf.WriteString("\n")
}
c.PageSeries = string(PageRender(buf.Bytes()))
cache.PageSeries = string(PageRender(buf.Bytes()))
case PageArchive:
sort.Sort(c.Archives)
sort.Sort(cache.Archives)
buf := bytes.Buffer{}
buf.WriteString(c.Blogger.ArchivesSay + "\n")
buf.WriteString(cache.Blogger.ArchivesSay + "\n")
var (
currentYear string
gt12Month = len(c.Archives) > 12
gt12Month = len(cache.Archives) > 12
)
for _, archive := range c.Archives {
for _, archive := range cache.Archives {
t := archive.Time.In(tools.TimeLocation)
if gt12Month {
year := t.Format("2006 年")
@@ -540,32 +527,7 @@ func (c *Cache) regeneratePages() {
}
}
}
c.PageArchives = string(PageRender(buf.Bytes()))
}
}
}
// timerClean 定时清理文章
func (c *Cache) timerClean() {
ticker := time.NewTicker(time.Hour)
for now := range ticker.C {
exp := now.Add(TrashArticleExp)
err := Store.CleanArticles(context.Background(), exp)
if err != nil {
logrus.Error("cache.timerClean.CleanArticles: ", err)
}
}
}
// timerDisqus disqus定时操作
func (c *Cache) timerDisqus() {
ticker := time.NewTicker(5 * time.Hour)
for range ticker.C {
err := DisqusClient.PostsCount(c.ArticlesMap)
if err != nil {
logrus.Error("cache.timerDisqus.PostsCount: ", err)
cache.PageArchives = string(PageRender(buf.Bytes()))
}
}
}

View File

@@ -1,29 +1,75 @@
package internal
import (
"io/fs"
"path/filepath"
"strings"
"text/template"
"time"
"github.com/eiblog/eiblog/cmd/eiblog/config"
"github.com/eiblog/eiblog/cmd/eiblog/handler/internal/store"
"github.com/eiblog/eiblog/pkg/third/disqus"
"github.com/eiblog/eiblog/pkg/third/es"
"github.com/eiblog/eiblog/pkg/third/pinger"
"github.com/eiblog/eiblog/pkg/third/qiniu"
"github.com/eiblog/eiblog/tools"
"github.com/sirupsen/logrus"
)
var (
ESClient *es.ESClient
DisqusClient *disqus.DisqusClient
QiniuClient *qiniu.QiniuClient
Pinger *pinger.Pinger
Store store.Store
XMLTemplate *template.Template // template/xml模板
HTMLTemplate *template.Template // website/html模板
Store store.Store // 数据库存储
Ei *Cache // 博客数据缓存
ESClient *es.ESClient // es 客户端
DisqusClient *disqus.DisqusClient // disqus 客户端
QiniuClient *qiniu.QiniuClient // qiniu客户端
Pinger *pinger.Pinger // pinger 客户端
)
func init() {
var err error
ESClient, err = es.NewESClient(config.Conf.ESHost)
tools.TimeLocation, err = time.LoadLocation(config.Conf.General.Timezone)
if err != nil {
logrus.Fatal("init es client: ", err)
logrus.Fatal("init timezone: ", err)
}
// 模板解析初始化
root := filepath.Join(config.EtcDir, "template", "*.xml")
XMLTemplate, err = template.New("eiblog").Funcs(tools.TplFuncMap).ParseGlob(root)
if err != nil {
logrus.Fatal("init xml template: ", err)
}
root = filepath.Join(config.EtcDir, "website")
files := tools.ReadDirFiles(root, func(fi fs.DirEntry) bool {
// should not read dir & .DS_Store
return strings.HasPrefix(fi.Name(), ".")
})
HTMLTemplate, err = template.New("eiblog").Funcs(tools.TplFuncMap).ParseFiles(files...)
if err != nil {
logrus.Fatal("init html template: ", err)
}
// 数据库初始化
logrus.Info("store drivers: ", store.Drivers())
Store, err = store.NewStore(config.Conf.Database)
if err != nil {
logrus.Fatal("init store: ", err)
}
Ei, err = NewCache()
if err != nil {
logrus.Fatal("init blog cache: ", err)
}
if config.Conf.ESHost != "" {
ESClient, err = es.NewESClient(config.Conf.ESHost)
if err != nil {
logrus.Fatal("init es client: ", err)
}
}
DisqusClient, err = disqus.NewDisqusClient(config.Conf.Host, config.Conf.Disqus)
@@ -41,9 +87,6 @@ func init() {
logrus.Fatal("init pinger: ", err)
}
logrus.Info("store drivers: ", store.Drivers())
Store, err = store.NewStore(config.Conf.Database.Driver, config.Conf.Database.Source)
if err != nil {
logrus.Fatal("init store: ", err)
}
// 启动定时器
go startTimer()
}

View File

@@ -6,6 +6,7 @@ import (
"testing"
"time"
"github.com/eiblog/eiblog/pkg/config"
"github.com/eiblog/eiblog/pkg/model"
)
@@ -19,7 +20,10 @@ var (
func init() {
var err error
store, err = NewStore("mongodb", "mongodb://127.0.0.1:27017")
store, err = NewStore(config.Database{
Driver: "mongodb",
Source: "mongodb://127.0.0.1:27017",
})
if err != nil {
panic(err)
}

View File

@@ -8,6 +8,7 @@ import (
"sync"
"time"
"github.com/eiblog/eiblog/pkg/config"
"github.com/eiblog/eiblog/pkg/model"
)
@@ -99,13 +100,14 @@ func Drivers() []string {
}
// NewStore 新建存储
func NewStore(name string, source string) (Store, error) {
func NewStore(conf config.Database) (Store, error) {
storeMu.RLock()
driver, ok := stores[name]
driver, ok := stores[conf.Driver]
storeMu.RUnlock()
if !ok {
return nil, fmt.Errorf("store: unknown driver %q (forgotten import?)", name)
return nil, fmt.Errorf("store: unknown driver %q (forgotten import?)", conf.Driver)
}
return driver.Init(name, source)
return driver.Init(conf.Driver, conf.Source)
}

View File

@@ -0,0 +1,160 @@
package internal
import (
"context"
"errors"
"os"
"path/filepath"
"time"
"github.com/eiblog/eiblog/cmd/eiblog/config"
"github.com/sirupsen/logrus"
)
func startTimer() {
err := generateOpensearch()
if err != nil {
logrus.Error("startTimer.generateOpensearch: ", err)
}
err = generateRobots()
if err != nil {
logrus.Error("startTimer.generateRobots: ", err)
}
err = generateCrossdomain()
if err != nil {
logrus.Error("startTimer.generateCrossdomain: ", err)
}
ticker := time.NewTicker(time.Hour)
for now := range ticker.C {
// generate feed & sitemap
if now.Hour()%4 == 0 {
err = generateFeed()
if err != nil {
logrus.Error("startTimer.generateFeed: ", err)
}
err = generateSitemap()
if err != nil {
logrus.Error("startTimer.generateSitemap: ", err)
}
}
// clean expired articles
exp := now.Add(-48 * time.Hour)
err := Store.CleanArticles(context.Background(), exp)
if err != nil {
logrus.Error("startTimer.CleanArticles: ", err)
}
// fetch disqus count
if now.Hour()%5 == 0 {
err = DisqusClient.PostsCount(Ei.ArticlesMap)
if err != nil {
logrus.Error("startTimer.PostsCount: ", err)
}
}
}
}
// generateFeed 定时刷新feed
func generateFeed() error {
tpl := XMLTemplate.Lookup("feedTpl.xml")
if tpl == nil {
return errors.New("not found: feedTpl.xml")
}
_, _, articles := Ei.PageArticleFE(1, 20)
params := map[string]interface{}{
"Title": Ei.Blogger.BTitle,
"SubTitle": Ei.Blogger.SubTitle,
"Host": config.Conf.Host,
"FeedrURL": config.Conf.FeedRPC.FeedrURL,
"BuildDate": time.Now().Format(time.RFC1123Z),
"Articles": articles,
}
path := filepath.Join(config.EtcDir, "assets", "feed.xml")
f, err := os.OpenFile(path, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0666)
if err != nil {
return err
}
defer f.Close()
return tpl.Execute(f, params)
}
// generateSitemap 定时刷新sitemap
func generateSitemap() error {
tpl := XMLTemplate.Lookup("sitemapTpl.xml")
if tpl == nil {
return errors.New("not found: sitemapTpl.xml")
}
params := map[string]interface{}{
"Articles": Ei.Articles,
"Host": config.Conf.Host,
}
path := filepath.Join(config.EtcDir, "assets", "sitemap.xml")
f, err := os.OpenFile(path, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0666)
if err != nil {
return err
}
defer f.Close()
return tpl.Execute(f, params)
}
// generateOpensearch 生成opensearch.xml
func generateOpensearch() error {
tpl := XMLTemplate.Lookup("opensearchTpl.xml")
if tpl == nil {
return errors.New("not found: opensearchTpl.xml")
}
params := map[string]string{
"BTitle": Ei.Blogger.BTitle,
"SubTitle": Ei.Blogger.SubTitle,
"Host": config.Conf.Host,
}
path := filepath.Join(config.EtcDir, "assets", "opensearch.xml")
f, err := os.OpenFile(path, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0666)
if err != nil {
return err
}
defer f.Close()
return tpl.Execute(f, params)
}
// generateRobots 生成robots.txt
func generateRobots() error {
tpl := XMLTemplate.Lookup("robotsTpl.xml")
if tpl == nil {
return errors.New("not found: robotsTpl.xml")
}
params := map[string]string{
"Host": config.Conf.Host,
}
path := filepath.Join(config.EtcDir, "assets", "robots.txt")
f, err := os.OpenFile(path, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0666)
if err != nil {
return err
}
defer f.Close()
return tpl.Execute(f, params)
}
// generateCrossdomain 生成crossdomain.xml
func generateCrossdomain() error {
tpl := XMLTemplate.Lookup("crossdomainTpl.xml")
if tpl == nil {
return errors.New("not found: crossdomainTpl.xml")
}
params := map[string]string{
"Host": config.Conf.Host,
}
path := filepath.Join(config.EtcDir, "assets", "crossdomain.xml")
f, err := os.OpenFile(path, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0666)
if err != nil {
return err
}
defer f.Close()
return tpl.Execute(f, params)
}

View File

@@ -5,7 +5,7 @@ import (
"context"
"encoding/json"
"fmt"
htemplate "html/template"
"html/template"
"net/http"
"strconv"
@@ -242,19 +242,19 @@ func renderHTMLAdminLayout(c *gin.Context, name string, data gin.H) {
c.Header("Content-Type", "text/html; charset=utf-8")
// special page
if name == "login.html" {
err := htmlTmpl.ExecuteTemplate(c.Writer, name, data)
err := internal.HTMLTemplate.ExecuteTemplate(c.Writer, name, data)
if err != nil {
panic(err)
}
return
}
buf := bytes.Buffer{}
err := htmlTmpl.ExecuteTemplate(&buf, name, data)
err := internal.HTMLTemplate.ExecuteTemplate(&buf, name, data)
if err != nil {
panic(err)
}
data["LayoutContent"] = htemplate.HTML(buf.String())
err = htmlTmpl.ExecuteTemplate(c.Writer, "adminLayout.html", data)
data["LayoutContent"] = template.HTML(buf.String())
err = internal.HTMLTemplate.ExecuteTemplate(c.Writer, "adminLayout.html", data)
if err != nil {
panic(err)
}

View File

@@ -4,7 +4,7 @@ import (
"bytes"
"context"
"fmt"
htemplate "html/template"
"html/template"
"io"
"math/rand"
"net/http"
@@ -143,14 +143,14 @@ func handleSearchPage(c *gin.Context) {
params["Description"] = "站内搜索," + internal.Ei.Blogger.SubTitle
params["Path"] = ""
params["CurrentPage"] = "search-post"
q := strings.TrimSpace(c.Query("q"))
if q != "" {
params["Word"] = q
if q != "" && internal.ESClient != nil {
start, err := strconv.Atoi(c.Query("start"))
if start < 1 || err != nil {
start = 1
}
params["Word"] = q
vals := c.Request.URL.Query()
result, err := internal.ESClient.ElasticSearch(q, config.Conf.General.PageNum, start-1)
@@ -386,19 +386,19 @@ func renderHTMLHomeLayout(c *gin.Context, name string, data gin.H) {
c.Header("Content-Type", "text/html; charset=utf-8")
// special page
if name == "disqus.html" {
err := htmlTmpl.ExecuteTemplate(c.Writer, name, data)
err := internal.HTMLTemplate.ExecuteTemplate(c.Writer, name, data)
if err != nil {
panic(err)
}
return
}
buf := bytes.Buffer{}
err := htmlTmpl.ExecuteTemplate(&buf, name, data)
err := internal.HTMLTemplate.ExecuteTemplate(&buf, name, data)
if err != nil {
panic(err)
}
data["LayoutContent"] = htemplate.HTML(buf.String())
err = htmlTmpl.ExecuteTemplate(c.Writer, "homeLayout.html", data)
data["LayoutContent"] = template.HTML(buf.String())
err = internal.HTMLTemplate.ExecuteTemplate(c.Writer, "homeLayout.html", data)
if err != nil {
panic(err)
}

View File

@@ -1,33 +1,9 @@
package page
import (
"io/fs"
"path/filepath"
"strings"
"text/template"
"github.com/eiblog/eiblog/cmd/eiblog/config"
"github.com/eiblog/eiblog/tools"
"github.com/gin-gonic/gin"
)
// htmlTmpl html template cache
var htmlTmpl *template.Template
func init() {
htmlTmpl = template.New("eiblog").Funcs(tools.TplFuncMap)
root := filepath.Join(config.EtcDir, "website")
files := tools.ReadDirFiles(root, func(fi fs.DirEntry) bool {
// should not read dir & .DS_Store
return strings.HasPrefix(fi.Name(), ".") || fi.IsDir()
})
_, err := htmlTmpl.ParseFiles(files...)
if err != nil {
panic(err)
}
}
// RegisterRoutes register routes
func RegisterRoutes(e *gin.Engine) {
e.NoRoute(handleNotFound)

View File

@@ -25,6 +25,7 @@ func init() {
TplFuncMap["join"] = Join
TplFuncMap["isnotzero"] = IsNotZero
TplFuncMap["getavatar"] = GetAvatar
TplFuncMap["imgtonormal"] = ImgToNormal
}
// Str2html string to html