mirror of
https://github.com/eiblog/eiblog.git
synced 2026-02-17 03:42:26 +08:00
fix: 1. template read panic
2. optimization variable naming
This commit is contained in:
12
pkg/cache/cache.go
vendored
12
pkg/cache/cache.go
vendored
@@ -25,9 +25,11 @@ var (
|
|||||||
// Ei eiblog cache
|
// Ei eiblog cache
|
||||||
Ei *Cache
|
Ei *Cache
|
||||||
|
|
||||||
// regenerate pages chan
|
// PagesCh regenerate pages chan
|
||||||
PagesCh = make(chan string, 2)
|
PagesCh = make(chan string, 2)
|
||||||
PageSeries = "series-md"
|
// PageSeries the page series regenerate flag
|
||||||
|
PageSeries = "series-md"
|
||||||
|
// PageArchive the page archive regenerate flag
|
||||||
PageArchive = "archive-md"
|
PageArchive = "archive-md"
|
||||||
|
|
||||||
// ArticleStartID article start id
|
// ArticleStartID article start id
|
||||||
@@ -518,7 +520,7 @@ func (c *Cache) regeneratePages() {
|
|||||||
}
|
}
|
||||||
buf.WriteString("\n")
|
buf.WriteString("\n")
|
||||||
}
|
}
|
||||||
c.PageSeries = string(render.RenderPage(buf.Bytes()))
|
c.PageSeries = string(render.PageRender(buf.Bytes()))
|
||||||
case PageArchive:
|
case PageArchive:
|
||||||
sort.Sort(c.Archives)
|
sort.Sort(c.Archives)
|
||||||
buf := bytes.Buffer{}
|
buf := bytes.Buffer{}
|
||||||
@@ -551,7 +553,7 @@ func (c *Cache) regeneratePages() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
c.PageArchives = string(render.RenderPage(buf.Bytes()))
|
c.PageArchives = string(render.PageRender(buf.Bytes()))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
16
pkg/cache/render/render.go
vendored
16
pkg/cache/render/render.go
vendored
@@ -14,7 +14,7 @@ import (
|
|||||||
|
|
||||||
// blackfriday 配置
|
// blackfriday 配置
|
||||||
const (
|
const (
|
||||||
commonHtmlFlags = 0 |
|
commonHTMLFlags = 0 |
|
||||||
blackfriday.HTML_TOC |
|
blackfriday.HTML_TOC |
|
||||||
blackfriday.HTML_USE_XHTML |
|
blackfriday.HTML_USE_XHTML |
|
||||||
blackfriday.HTML_USE_SMARTYPANTS |
|
blackfriday.HTML_USE_SMARTYPANTS |
|
||||||
@@ -42,9 +42,9 @@ var (
|
|||||||
regHeader = regexp.MustCompile("</nav></div>")
|
regHeader = regexp.MustCompile("</nav></div>")
|
||||||
)
|
)
|
||||||
|
|
||||||
// RenderPage 渲染markdown
|
// PageRender 渲染markdown
|
||||||
func RenderPage(md []byte) []byte {
|
func PageRender(md []byte) []byte {
|
||||||
renderer := blackfriday.HtmlRenderer(commonHtmlFlags, "", "")
|
renderer := blackfriday.HtmlRenderer(commonHTMLFlags, "", "")
|
||||||
return blackfriday.Markdown(md, renderer, commonExtensions)
|
return blackfriday.Markdown(md, renderer, commonExtensions)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -56,12 +56,12 @@ func GenerateExcerptMarkdown(article *model.Article) {
|
|||||||
index := strings.Index(article.Content, "\r\n")
|
index := strings.Index(article.Content, "\r\n")
|
||||||
prefix := article.Content[len(blogapp.General.DescPrefix):index]
|
prefix := article.Content[len(blogapp.General.DescPrefix):index]
|
||||||
|
|
||||||
article.Desc = tools.IgnoreHtmlTag(prefix)
|
article.Desc = tools.IgnoreHTMLTag(prefix)
|
||||||
article.Content = article.Content[index:]
|
article.Content = article.Content[index:]
|
||||||
}
|
}
|
||||||
|
|
||||||
// 查找目录
|
// 查找目录
|
||||||
content := RenderPage([]byte(article.Content))
|
content := PageRender([]byte(article.Content))
|
||||||
index := regHeader.FindIndex(content)
|
index := regHeader.FindIndex(content)
|
||||||
if index != nil {
|
if index != nil {
|
||||||
article.Header = string(content[0:index[1]])
|
article.Header = string(content[0:index[1]])
|
||||||
@@ -73,7 +73,7 @@ func GenerateExcerptMarkdown(article *model.Article) {
|
|||||||
// excerpt
|
// excerpt
|
||||||
index = regIdentifier.FindStringIndex(article.Content)
|
index = regIdentifier.FindStringIndex(article.Content)
|
||||||
if index != nil {
|
if index != nil {
|
||||||
article.Excerpt = tools.IgnoreHtmlTag(article.Content[:index[0]])
|
article.Excerpt = tools.IgnoreHTMLTag(article.Content[:index[0]])
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
uc := []rune(article.Content)
|
uc := []rune(article.Content)
|
||||||
@@ -81,5 +81,5 @@ func GenerateExcerptMarkdown(article *model.Article) {
|
|||||||
if len(uc) < length {
|
if len(uc) < length {
|
||||||
length = len(uc)
|
length = len(uc)
|
||||||
}
|
}
|
||||||
article.Excerpt = tools.IgnoreHtmlTag(string(uc[0:length]))
|
article.Excerpt = tools.IgnoreHTMLTag(string(uc[0:length]))
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -2,6 +2,7 @@
|
|||||||
package page
|
package page
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"io/fs"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"text/template"
|
"text/template"
|
||||||
|
|
||||||
@@ -17,10 +18,15 @@ var htmlTmpl *template.Template
|
|||||||
func init() {
|
func init() {
|
||||||
htmlTmpl = template.New("eiblog").Funcs(tools.TplFuncMap)
|
htmlTmpl = template.New("eiblog").Funcs(tools.TplFuncMap)
|
||||||
root := filepath.Join(config.WorkDir, "website")
|
root := filepath.Join(config.WorkDir, "website")
|
||||||
files := tools.ReadDirFiles(root, func(name string) bool {
|
files := tools.ReadDirFiles(root, func(fi fs.FileInfo) bool {
|
||||||
|
name := fi.Name()
|
||||||
if name == ".DS_Store" {
|
if name == ".DS_Store" {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
// should not read template dir
|
||||||
|
if fi.IsDir() && name == "template" {
|
||||||
|
return true
|
||||||
|
}
|
||||||
return false
|
return false
|
||||||
})
|
})
|
||||||
_, err := htmlTmpl.ParseFiles(files...)
|
_, err := htmlTmpl.ParseFiles(files...)
|
||||||
|
|||||||
@@ -49,7 +49,7 @@ func checkESConfig() error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// ElasticSearch 搜索文章
|
// ElasticSearch 搜索文章
|
||||||
func ElasticSearch(query string, size, from int) (*searchIndexResult, error) {
|
func ElasticSearch(query string, size, from int) (*SearchIndexResult, error) {
|
||||||
if err := checkESConfig(); err != nil {
|
if err := checkESConfig(); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@@ -114,7 +114,7 @@ func ElasticAddIndex(article *model.Article) error {
|
|||||||
img := tools.PickFirstImage(article.Content)
|
img := tools.PickFirstImage(article.Content)
|
||||||
mapping := map[string]interface{}{
|
mapping := map[string]interface{}{
|
||||||
"title": article.Title,
|
"title": article.Title,
|
||||||
"content": tools.IgnoreHtmlTag(article.Content),
|
"content": tools.IgnoreHTMLTag(article.Content),
|
||||||
"slug": article.Slug,
|
"slug": article.Slug,
|
||||||
"tag": article.Tags,
|
"tag": article.Tags,
|
||||||
"img": img,
|
"img": img,
|
||||||
@@ -241,8 +241,8 @@ func deleteIndexDocument(index, typ string, ids []string) error {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// searchIndexResult 查询结果
|
// SearchIndexResult 查询结果
|
||||||
type searchIndexResult struct {
|
type SearchIndexResult struct {
|
||||||
Took float32 `json:"took"`
|
Took float32 `json:"took"`
|
||||||
Hits struct {
|
Hits struct {
|
||||||
Total int `json:"total"`
|
Total int `json:"total"`
|
||||||
@@ -264,7 +264,7 @@ type searchIndexResult struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// indexQueryDSL 语句查询文档
|
// indexQueryDSL 语句查询文档
|
||||||
func indexQueryDSL(index, typ string, size, from int, dsl []byte) (*searchIndexResult, error) {
|
func indexQueryDSL(index, typ string, size, from int, dsl []byte) (*SearchIndexResult, error) {
|
||||||
rawurl := fmt.Sprintf("%s/%s/%s/_search?size=%d&from=%d", config.Conf.ESHost,
|
rawurl := fmt.Sprintf("%s/%s/%s/_search?size=%d&from=%d", config.Conf.ESHost,
|
||||||
index, typ, size, from)
|
index, typ, size, from)
|
||||||
resp, err := httpPost(rawurl, dsl)
|
resp, err := httpPost(rawurl, dsl)
|
||||||
@@ -276,7 +276,7 @@ func indexQueryDSL(index, typ string, size, from int, dsl []byte) (*searchIndexR
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
result := &searchIndexResult{}
|
result := &SearchIndexResult{}
|
||||||
err = json.Unmarshal(data, result)
|
err = json.Unmarshal(data, result)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
|
|||||||
@@ -5,6 +5,7 @@ import (
|
|||||||
"crypto/sha256"
|
"crypto/sha256"
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
|
"io/fs"
|
||||||
"io/ioutil"
|
"io/ioutil"
|
||||||
"path"
|
"path"
|
||||||
"regexp"
|
"regexp"
|
||||||
@@ -22,13 +23,13 @@ func EncryptPasswd(name, pass string) string {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// ReadDirFiles 读取目录
|
// ReadDirFiles 读取目录
|
||||||
func ReadDirFiles(dir string, filter func(name string) bool) (files []string) {
|
func ReadDirFiles(dir string, filter func(fi fs.FileInfo) bool) (files []string) {
|
||||||
fileInfos, err := ioutil.ReadDir(dir)
|
fileInfos, err := ioutil.ReadDir(dir)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
for _, fi := range fileInfos {
|
for _, fi := range fileInfos {
|
||||||
if filter(fi.Name()) {
|
if filter(fi) {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
if fi.IsDir() {
|
if fi.IsDir() {
|
||||||
@@ -42,19 +43,19 @@ func ReadDirFiles(dir string, filter func(name string) bool) (files []string) {
|
|||||||
|
|
||||||
// 2016-10-22T07:03:01
|
// 2016-10-22T07:03:01
|
||||||
const (
|
const (
|
||||||
JUST_NOW = "几秒前"
|
JustNow = "几秒前"
|
||||||
MINUTES_AGO = "%d分钟前"
|
MinutesAgo = "%d分钟前"
|
||||||
HOURS_AGO = "%d小时前"
|
HoursAgo = "%d小时前"
|
||||||
DAYS_AGO = "%d天前"
|
DaysAgo = "%d天前"
|
||||||
MONTH_AGO = "%d月前"
|
MonthAgo = "%d月前"
|
||||||
YEARS_AGO = "%d年前"
|
YearsAgo = "%d年前"
|
||||||
)
|
)
|
||||||
|
|
||||||
// ConvertStr 时间转换为间隔
|
// ConvertStr 时间转换为间隔
|
||||||
func ConvertStr(str string) string {
|
func ConvertStr(str string) string {
|
||||||
t, err := time.ParseInLocation("2006-01-02T15:04:05", str, time.UTC)
|
t, err := time.ParseInLocation("2006-01-02T15:04:05", str, time.UTC)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return JUST_NOW
|
return JustNow
|
||||||
}
|
}
|
||||||
now := time.Now().UTC()
|
now := time.Now().UTC()
|
||||||
y1, m1, d1 := t.Date()
|
y1, m1, d1 := t.Date()
|
||||||
@@ -62,17 +63,17 @@ func ConvertStr(str string) string {
|
|||||||
h1, mi1, s1 := t.Clock()
|
h1, mi1, s1 := t.Clock()
|
||||||
h2, mi2, s2 := now.Clock()
|
h2, mi2, s2 := now.Clock()
|
||||||
if y := y2 - y1; y > 1 || (y == 1 && m2-m1 >= 0) {
|
if y := y2 - y1; y > 1 || (y == 1 && m2-m1 >= 0) {
|
||||||
return fmt.Sprintf(YEARS_AGO, y)
|
return fmt.Sprintf(YearsAgo, y)
|
||||||
} else if m := y*12 + int(m2-m1); m > 1 || (m == 1 && d2-d1 >= 0) {
|
} else if m := y*12 + int(m2-m1); m > 1 || (m == 1 && d2-d1 >= 0) {
|
||||||
return fmt.Sprintf(MONTH_AGO, m)
|
return fmt.Sprintf(MonthAgo, m)
|
||||||
} else if d := m*dayIn(y1, m1) + d2 - d1; d > 1 || (d == 1 && h2-h1 >= 0) {
|
} else if d := m*dayIn(y1, m1) + d2 - d1; d > 1 || (d == 1 && h2-h1 >= 0) {
|
||||||
return fmt.Sprintf(DAYS_AGO, d)
|
return fmt.Sprintf(DaysAgo, d)
|
||||||
} else if h := d*24 + h2 - h1; h > 1 || (h == 1 && mi2-mi1 >= 0) {
|
} else if h := d*24 + h2 - h1; h > 1 || (h == 1 && mi2-mi1 >= 0) {
|
||||||
return fmt.Sprintf(HOURS_AGO, h)
|
return fmt.Sprintf(HoursAgo, h)
|
||||||
} else if mi := h*60 + mi2 - mi1; mi > 1 || (mi == 1 && s2-s1 >= 0) {
|
} else if mi := h*60 + mi2 - mi1; mi > 1 || (mi == 1 && s2-s1 >= 0) {
|
||||||
return fmt.Sprintf(MINUTES_AGO, mi)
|
return fmt.Sprintf(MinutesAgo, mi)
|
||||||
}
|
}
|
||||||
return JUST_NOW
|
return JustNow
|
||||||
}
|
}
|
||||||
|
|
||||||
// dayIn 获取天数
|
// dayIn 获取天数
|
||||||
@@ -120,8 +121,8 @@ var (
|
|||||||
regexpEnter = regexp.MustCompile(`\s+`)
|
regexpEnter = regexp.MustCompile(`\s+`)
|
||||||
)
|
)
|
||||||
|
|
||||||
// IgnoreHtmlTag 去掉 html tag
|
// IgnoreHTMLTag 去掉 html tag
|
||||||
func IgnoreHtmlTag(src string) string {
|
func IgnoreHTMLTag(src string) string {
|
||||||
// 去除所有尖括号内的HTML代码
|
// 去除所有尖括号内的HTML代码
|
||||||
src = regexpBrackets.ReplaceAllString(src, "")
|
src = regexpBrackets.ReplaceAllString(src, "")
|
||||||
// 去除换行符
|
// 去除换行符
|
||||||
|
|||||||
Reference in New Issue
Block a user