2014-04-10 22:20:58 +04:00
|
|
|
// Copyright 2014 The Gogs Authors. All rights reserved.
|
|
|
|
// Use of this source code is governed by a MIT-style
|
|
|
|
// license that can be found in the LICENSE file.
|
|
|
|
|
2016-02-21 01:10:05 +03:00
|
|
|
package markdown
|
2014-04-10 22:20:58 +04:00
|
|
|
|
|
|
|
import (
|
|
|
|
"bytes"
|
|
|
|
"fmt"
|
2015-03-09 07:14:50 +03:00
|
|
|
"io"
|
2017-02-14 04:13:59 +03:00
|
|
|
"net/url"
|
2014-04-10 22:20:58 +04:00
|
|
|
"path"
|
|
|
|
"path/filepath"
|
|
|
|
"regexp"
|
|
|
|
"strings"
|
|
|
|
|
2015-11-20 13:37:51 +03:00
|
|
|
"github.com/Unknwon/com"
|
2014-10-05 01:15:22 +04:00
|
|
|
"github.com/russross/blackfriday"
|
2015-03-24 01:32:24 +03:00
|
|
|
"golang.org/x/net/html"
|
2014-10-05 01:15:22 +04:00
|
|
|
|
2016-11-10 19:24:48 +03:00
|
|
|
"code.gitea.io/gitea/modules/base"
|
2017-04-21 10:01:08 +03:00
|
|
|
"code.gitea.io/gitea/modules/markup"
|
2016-11-10 19:24:48 +03:00
|
|
|
"code.gitea.io/gitea/modules/setting"
|
2014-04-10 22:20:58 +04:00
|
|
|
)
|
|
|
|
|
2016-11-25 04:58:05 +03:00
|
|
|
// Issue name styles
|
2016-04-23 01:28:08 +03:00
|
|
|
const (
|
2016-11-25 04:58:05 +03:00
|
|
|
IssueNameStyleNumeric = "numeric"
|
|
|
|
IssueNameStyleAlphanumeric = "alphanumeric"
|
2016-04-23 01:28:08 +03:00
|
|
|
)
|
|
|
|
|
2016-02-21 01:10:05 +03:00
|
|
|
// IsMarkdownFile reports whether name looks like a Markdown file
|
|
|
|
// based on its extension.
|
2014-04-10 22:20:58 +04:00
|
|
|
func IsMarkdownFile(name string) bool {
|
2016-08-11 15:48:08 +03:00
|
|
|
extension := strings.ToLower(filepath.Ext(name))
|
2016-08-12 12:29:29 +03:00
|
|
|
for _, ext := range setting.Markdown.FileExtensions {
|
2016-08-11 15:48:08 +03:00
|
|
|
if strings.ToLower(ext) == extension {
|
|
|
|
return true
|
|
|
|
}
|
2014-04-10 22:20:58 +04:00
|
|
|
}
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
|
2016-01-09 05:59:04 +03:00
|
|
|
var (
|
2016-02-21 01:10:05 +03:00
|
|
|
// MentionPattern matches string that mentions someone, e.g. @Unknwon
|
2016-10-17 05:17:59 +03:00
|
|
|
MentionPattern = regexp.MustCompile(`(\s|^|\W)@[0-9a-zA-Z-_\.]+`)
|
2016-02-21 01:10:05 +03:00
|
|
|
|
2016-04-23 01:28:08 +03:00
|
|
|
// IssueNumericPattern matches string that references to a numeric issue, e.g. #1287
|
|
|
|
IssueNumericPattern = regexp.MustCompile(`( |^|\()#[0-9]+\b`)
|
|
|
|
// IssueAlphanumericPattern matches string that references to an alphanumeric issue, e.g. ABC-1234
|
|
|
|
IssueAlphanumericPattern = regexp.MustCompile(`( |^|\()[A-Z]{1,10}-[1-9][0-9]*\b`)
|
2016-12-26 13:52:04 +03:00
|
|
|
// CrossReferenceIssueNumericPattern matches string that references a numeric issue in a different repository
|
|
|
|
// e.g. gogits/gogs#12345
|
|
|
|
CrossReferenceIssueNumericPattern = regexp.MustCompile(`( |^)[0-9a-zA-Z]+/[0-9a-zA-Z]+#[0-9]+\b`)
|
2016-02-21 01:10:05 +03:00
|
|
|
|
|
|
|
// Sha1CurrentPattern matches string that represents a commit SHA, e.g. d8a994ef243349f321568f9e36d5c3f444b99cae
|
2017-02-14 04:13:59 +03:00
|
|
|
// FIXME: this pattern matches pure numbers as well, right now we do a hack to check in renderSha1CurrentPattern
|
2016-08-16 01:27:19 +03:00
|
|
|
// by converting string to a number.
|
2017-05-05 11:49:13 +03:00
|
|
|
Sha1CurrentPattern = regexp.MustCompile(`(?:^|\s|\()([0-9a-f]{40})\b`)
|
2017-02-14 04:13:59 +03:00
|
|
|
|
|
|
|
// ShortLinkPattern matches short but difficult to parse [[name|link|arg=test]] syntax
|
|
|
|
ShortLinkPattern = regexp.MustCompile(`(\[\[.*\]\]\w*)`)
|
|
|
|
|
|
|
|
// AnySHA1Pattern allows to split url containing SHA into parts
|
2017-02-24 17:59:56 +03:00
|
|
|
AnySHA1Pattern = regexp.MustCompile(`(http\S*)://(\S+)/(\S+)/(\S+)/(\S+)/([0-9a-f]{40})(?:/?([^#\s]+)?(?:#(\S+))?)?`)
|
2017-02-14 04:13:59 +03:00
|
|
|
|
|
|
|
// IssueFullPattern allows to split issue (and pull) URLs into parts
|
2017-02-24 17:59:56 +03:00
|
|
|
IssueFullPattern = regexp.MustCompile(`(?:^|\s|\()(http\S*)://((?:[^\s/]+/)+)((?:\w{1,10}-)?[1-9][0-9]*)([\?|#]\S+.(\S+)?)?\b`)
|
2017-02-14 04:13:59 +03:00
|
|
|
|
|
|
|
validLinksPattern = regexp.MustCompile(`^[a-z][\w-]+://`)
|
2016-01-09 05:59:04 +03:00
|
|
|
)
|
|
|
|
|
2017-02-14 04:13:59 +03:00
|
|
|
// isLink reports whether link fits valid format.
|
|
|
|
func isLink(link []byte) bool {
|
|
|
|
return validLinksPattern.Match(link)
|
|
|
|
}
|
|
|
|
|
2016-07-15 19:36:39 +03:00
|
|
|
// FindAllMentions matches mention patterns in given content
|
|
|
|
// and returns a list of found user names without @ prefix.
|
|
|
|
func FindAllMentions(content string) []string {
|
|
|
|
mentions := MentionPattern.FindAllString(content, -1)
|
|
|
|
for i := range mentions {
|
2016-10-17 05:17:59 +03:00
|
|
|
mentions[i] = mentions[i][strings.Index(mentions[i], "@")+1:] // Strip @ character
|
2016-07-15 19:36:39 +03:00
|
|
|
}
|
|
|
|
return mentions
|
|
|
|
}
|
|
|
|
|
2016-02-21 01:10:05 +03:00
|
|
|
// Renderer is a extended version of underlying render object.
|
|
|
|
type Renderer struct {
|
2014-10-05 01:15:22 +04:00
|
|
|
blackfriday.Renderer
|
2017-02-14 04:13:59 +03:00
|
|
|
urlPrefix string
|
|
|
|
isWikiMarkdown bool
|
2014-04-10 22:20:58 +04:00
|
|
|
}
|
|
|
|
|
2016-02-21 01:10:05 +03:00
|
|
|
// Link defines how formal links should be processed to produce corresponding HTML elements.
|
|
|
|
func (r *Renderer) Link(out *bytes.Buffer, link []byte, title []byte, content []byte) {
|
2014-04-10 22:20:58 +04:00
|
|
|
if len(link) > 0 && !isLink(link) {
|
2016-02-21 01:10:05 +03:00
|
|
|
if link[0] != '#' {
|
2017-02-24 17:59:56 +03:00
|
|
|
lnk := string(link)
|
2017-02-14 04:13:59 +03:00
|
|
|
if r.isWikiMarkdown {
|
2017-02-24 17:59:56 +03:00
|
|
|
lnk = URLJoin("wiki", lnk)
|
2017-02-14 04:13:59 +03:00
|
|
|
}
|
2017-02-24 17:59:56 +03:00
|
|
|
mLink := URLJoin(r.urlPrefix, lnk)
|
2017-02-14 04:13:59 +03:00
|
|
|
link = []byte(mLink)
|
2016-01-09 05:59:04 +03:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
r.Renderer.Link(out, link, title, content)
|
|
|
|
}
|
|
|
|
|
2017-02-14 04:13:59 +03:00
|
|
|
// List renders markdown bullet or digit lists to HTML
|
|
|
|
func (r *Renderer) List(out *bytes.Buffer, text func() bool, flags int) {
|
|
|
|
marker := out.Len()
|
|
|
|
if out.Len() > 0 {
|
|
|
|
out.WriteByte('\n')
|
2016-01-09 05:59:04 +03:00
|
|
|
}
|
|
|
|
|
2017-02-14 04:13:59 +03:00
|
|
|
if flags&blackfriday.LIST_TYPE_DEFINITION != 0 {
|
|
|
|
out.WriteString("<dl>")
|
|
|
|
} else if flags&blackfriday.LIST_TYPE_ORDERED != 0 {
|
|
|
|
out.WriteString("<ol class='ui list'>")
|
|
|
|
} else {
|
|
|
|
out.WriteString("<ul class='ui list'>")
|
|
|
|
}
|
|
|
|
if !text() {
|
|
|
|
out.Truncate(marker)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
if flags&blackfriday.LIST_TYPE_DEFINITION != 0 {
|
|
|
|
out.WriteString("</dl>\n")
|
|
|
|
} else if flags&blackfriday.LIST_TYPE_ORDERED != 0 {
|
|
|
|
out.WriteString("</ol>\n")
|
|
|
|
} else {
|
|
|
|
out.WriteString("</ul>\n")
|
2014-04-10 22:20:58 +04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-02-21 01:10:05 +03:00
|
|
|
// ListItem defines how list items should be processed to produce corresponding HTML elements.
|
2016-11-25 04:58:05 +03:00
|
|
|
func (r *Renderer) ListItem(out *bytes.Buffer, text []byte, flags int) {
|
2016-02-21 01:10:05 +03:00
|
|
|
// Detect procedures to draw checkboxes.
|
2017-02-14 04:13:59 +03:00
|
|
|
prefix := ""
|
|
|
|
if bytes.HasPrefix(text, []byte("<p>")) {
|
|
|
|
prefix = "<p>"
|
|
|
|
}
|
2016-01-13 15:25:52 +03:00
|
|
|
switch {
|
2017-02-14 04:13:59 +03:00
|
|
|
case bytes.HasPrefix(text, []byte(prefix+"[ ] ")):
|
2017-05-12 10:52:45 +03:00
|
|
|
text = append([]byte(`<span class="ui fitted disabled checkbox"><input type="checkbox" disabled="disabled" /><label /></span>`), text[3+len(prefix):]...)
|
2017-04-24 07:18:36 +03:00
|
|
|
if prefix != "" {
|
|
|
|
text = bytes.Replace(text, []byte(prefix), []byte{}, 1)
|
|
|
|
}
|
2017-02-14 04:13:59 +03:00
|
|
|
case bytes.HasPrefix(text, []byte(prefix+"[x] ")):
|
2017-05-12 10:52:45 +03:00
|
|
|
text = append([]byte(`<span class="ui checked fitted disabled checkbox"><input type="checkbox" checked="" disabled="disabled" /><label /></span>`), text[3+len(prefix):]...)
|
2017-04-24 07:18:36 +03:00
|
|
|
if prefix != "" {
|
|
|
|
text = bytes.Replace(text, []byte(prefix), []byte{}, 1)
|
|
|
|
}
|
2016-01-13 15:25:52 +03:00
|
|
|
}
|
2016-11-25 04:58:05 +03:00
|
|
|
r.Renderer.ListItem(out, text, flags)
|
2016-01-13 15:25:52 +03:00
|
|
|
}
|
|
|
|
|
2016-02-21 01:10:05 +03:00
|
|
|
// Note: this section is for purpose of increase performance and
|
|
|
|
// reduce memory allocation at runtime since they are constant literals.
|
2015-11-20 13:37:51 +03:00
|
|
|
var (
|
|
|
|
svgSuffix = []byte(".svg")
|
|
|
|
svgSuffixWithMark = []byte(".svg?")
|
|
|
|
)
|
|
|
|
|
2016-02-21 01:10:05 +03:00
|
|
|
// Image defines how images should be processed to produce corresponding HTML elements.
|
|
|
|
func (r *Renderer) Image(out *bytes.Buffer, link []byte, title []byte, alt []byte) {
|
2017-02-14 04:13:59 +03:00
|
|
|
prefix := r.urlPrefix
|
|
|
|
if r.isWikiMarkdown {
|
|
|
|
prefix = URLJoin(prefix, "wiki", "src")
|
|
|
|
}
|
|
|
|
prefix = strings.Replace(prefix, "/src/", "/raw/", 1)
|
2015-11-20 13:37:51 +03:00
|
|
|
if len(link) > 0 {
|
|
|
|
if isLink(link) {
|
|
|
|
// External link with .svg suffix usually means CI status.
|
2016-02-21 01:10:05 +03:00
|
|
|
// TODO: define a keyword to allow non-svg images render as external link.
|
2015-11-20 13:37:51 +03:00
|
|
|
if bytes.HasSuffix(link, svgSuffix) || bytes.Contains(link, svgSuffixWithMark) {
|
2016-01-09 05:59:04 +03:00
|
|
|
r.Renderer.Image(out, link, title, alt)
|
2015-11-20 13:37:51 +03:00
|
|
|
return
|
|
|
|
}
|
|
|
|
} else {
|
2017-02-24 17:59:56 +03:00
|
|
|
lnk := string(link)
|
|
|
|
lnk = URLJoin(prefix, lnk)
|
|
|
|
lnk = strings.Replace(lnk, " ", "+", -1)
|
|
|
|
link = []byte(lnk)
|
2015-11-06 19:10:27 +03:00
|
|
|
}
|
2014-10-15 07:44:34 +04:00
|
|
|
}
|
|
|
|
|
2015-11-06 19:10:27 +03:00
|
|
|
out.WriteString(`<a href="`)
|
|
|
|
out.Write(link)
|
|
|
|
out.WriteString(`">`)
|
2016-01-09 05:59:04 +03:00
|
|
|
r.Renderer.Image(out, link, title, alt)
|
2015-11-06 19:10:27 +03:00
|
|
|
out.WriteString("</a>")
|
2014-10-15 07:44:34 +04:00
|
|
|
}
|
|
|
|
|
2016-02-21 01:10:05 +03:00
|
|
|
// cutoutVerbosePrefix cutouts URL prefix including sub-path to
|
|
|
|
// return a clean unified string of request URL path.
|
2015-11-16 01:37:26 +03:00
|
|
|
func cutoutVerbosePrefix(prefix string) string {
|
2016-07-15 19:36:39 +03:00
|
|
|
if len(prefix) == 0 || prefix[0] != '/' {
|
|
|
|
return prefix
|
|
|
|
}
|
2015-11-16 01:37:26 +03:00
|
|
|
count := 0
|
|
|
|
for i := 0; i < len(prefix); i++ {
|
|
|
|
if prefix[i] == '/' {
|
|
|
|
count++
|
|
|
|
}
|
2016-11-27 13:14:25 +03:00
|
|
|
if count >= 3+setting.AppSubURLDepth {
|
2015-11-16 01:37:26 +03:00
|
|
|
return prefix[:i]
|
|
|
|
}
|
2015-11-16 00:22:25 +03:00
|
|
|
}
|
2015-11-16 01:37:26 +03:00
|
|
|
return prefix
|
|
|
|
}
|
|
|
|
|
2017-02-14 04:13:59 +03:00
|
|
|
// URLJoin joins url components, like path.Join, but preserving contents
|
|
|
|
func URLJoin(elem ...string) string {
|
|
|
|
res := ""
|
|
|
|
last := len(elem) - 1
|
|
|
|
for i, item := range elem {
|
|
|
|
res += item
|
2017-02-24 17:59:56 +03:00
|
|
|
if i != last && !strings.HasSuffix(res, "/") {
|
2017-02-14 04:13:59 +03:00
|
|
|
res += "/"
|
|
|
|
}
|
|
|
|
}
|
2017-02-24 17:59:56 +03:00
|
|
|
cwdIndex := strings.Index(res, "/./")
|
|
|
|
for cwdIndex != -1 {
|
|
|
|
res = strings.Replace(res, "/./", "/", 1)
|
|
|
|
cwdIndex = strings.Index(res, "/./")
|
|
|
|
}
|
|
|
|
upIndex := strings.Index(res, "/..")
|
|
|
|
for upIndex != -1 {
|
|
|
|
res = strings.Replace(res, "/..", "", 1)
|
|
|
|
prevStart := -1
|
|
|
|
for i := upIndex - 1; i >= 0; i-- {
|
|
|
|
if res[i] == '/' {
|
|
|
|
prevStart = i
|
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if prevStart != -1 {
|
|
|
|
res = res[:prevStart] + res[upIndex:]
|
|
|
|
}
|
|
|
|
upIndex = strings.Index(res, "/..")
|
|
|
|
}
|
2017-02-14 04:13:59 +03:00
|
|
|
return res
|
|
|
|
}
|
|
|
|
|
2016-02-21 01:10:05 +03:00
|
|
|
// RenderIssueIndexPattern renders issue indexes to corresponding links.
|
2015-12-05 05:30:33 +03:00
|
|
|
func RenderIssueIndexPattern(rawBytes []byte, urlPrefix string, metas map[string]string) []byte {
|
2015-11-16 01:37:26 +03:00
|
|
|
urlPrefix = cutoutVerbosePrefix(urlPrefix)
|
2016-04-23 01:28:08 +03:00
|
|
|
|
|
|
|
pattern := IssueNumericPattern
|
2016-11-25 04:58:05 +03:00
|
|
|
if metas["style"] == IssueNameStyleAlphanumeric {
|
2016-04-23 01:28:08 +03:00
|
|
|
pattern = IssueAlphanumericPattern
|
|
|
|
}
|
|
|
|
|
|
|
|
ms := pattern.FindAll(rawBytes, -1)
|
2014-04-10 22:20:58 +04:00
|
|
|
for _, m := range ms {
|
2016-06-29 18:07:39 +03:00
|
|
|
if m[0] == ' ' || m[0] == '(' {
|
2016-04-23 01:28:08 +03:00
|
|
|
m = m[1:] // ignore leading space or opening parentheses
|
2015-03-24 01:32:24 +03:00
|
|
|
}
|
2016-04-23 01:28:08 +03:00
|
|
|
var link string
|
2015-12-05 05:30:33 +03:00
|
|
|
if metas == nil {
|
2017-02-14 04:13:59 +03:00
|
|
|
link = fmt.Sprintf(`<a href="%s">%s</a>`, URLJoin(urlPrefix, "issues", string(m[1:])), m)
|
2015-12-05 05:30:33 +03:00
|
|
|
} else {
|
|
|
|
// Support for external issue tracker
|
2016-11-25 04:58:05 +03:00
|
|
|
if metas["style"] == IssueNameStyleAlphanumeric {
|
2016-04-23 01:28:08 +03:00
|
|
|
metas["index"] = string(m)
|
|
|
|
} else {
|
|
|
|
metas["index"] = string(m[1:])
|
|
|
|
}
|
|
|
|
link = fmt.Sprintf(`<a href="%s">%s</a>`, com.Expand(metas["format"], metas), m)
|
2015-12-05 05:30:33 +03:00
|
|
|
}
|
2016-04-23 01:28:08 +03:00
|
|
|
rawBytes = bytes.Replace(rawBytes, m, []byte(link), 1)
|
2015-12-05 05:30:33 +03:00
|
|
|
}
|
|
|
|
return rawBytes
|
|
|
|
}
|
|
|
|
|
2017-02-14 04:13:59 +03:00
|
|
|
// IsSameDomain checks if given url string has the same hostname as current Gitea instance
|
|
|
|
func IsSameDomain(s string) bool {
|
2017-02-24 17:59:56 +03:00
|
|
|
if strings.HasPrefix(s, "/") {
|
|
|
|
return true
|
|
|
|
}
|
2017-02-14 04:13:59 +03:00
|
|
|
if uapp, err := url.Parse(setting.AppURL); err == nil {
|
|
|
|
if u, err := url.Parse(s); err == nil {
|
|
|
|
return u.Host == uapp.Host
|
|
|
|
}
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
|
|
|
|
// renderFullSha1Pattern renders SHA containing URLs
|
|
|
|
func renderFullSha1Pattern(rawBytes []byte, urlPrefix string) []byte {
|
|
|
|
ms := AnySHA1Pattern.FindAllSubmatch(rawBytes, -1)
|
|
|
|
for _, m := range ms {
|
|
|
|
all := m[0]
|
2017-02-24 17:59:56 +03:00
|
|
|
protocol := string(m[1])
|
|
|
|
paths := string(m[2])
|
|
|
|
path := protocol + "://" + paths
|
|
|
|
author := string(m[3])
|
|
|
|
repoName := string(m[4])
|
2017-02-14 04:13:59 +03:00
|
|
|
path = URLJoin(path, author, repoName)
|
|
|
|
ltype := "src"
|
2017-02-24 17:59:56 +03:00
|
|
|
itemType := m[5]
|
2017-02-14 04:13:59 +03:00
|
|
|
if IsSameDomain(paths) {
|
|
|
|
ltype = string(itemType)
|
|
|
|
} else if string(itemType) == "commit" {
|
|
|
|
ltype = "commit"
|
|
|
|
}
|
2017-02-24 17:59:56 +03:00
|
|
|
sha := m[6]
|
2017-02-14 04:13:59 +03:00
|
|
|
var subtree string
|
2017-02-24 17:59:56 +03:00
|
|
|
if len(m) > 7 && len(m[7]) > 0 {
|
|
|
|
subtree = string(m[7])
|
2017-02-14 04:13:59 +03:00
|
|
|
}
|
|
|
|
var line []byte
|
2017-02-24 17:59:56 +03:00
|
|
|
if len(m) > 8 && len(m[8]) > 0 {
|
|
|
|
line = m[8]
|
2017-02-14 04:13:59 +03:00
|
|
|
}
|
|
|
|
urlSuffix := ""
|
|
|
|
text := base.ShortSha(string(sha))
|
|
|
|
if subtree != "" {
|
|
|
|
urlSuffix = "/" + subtree
|
|
|
|
text += urlSuffix
|
|
|
|
}
|
|
|
|
if line != nil {
|
|
|
|
value := string(line)
|
|
|
|
urlSuffix += "#"
|
|
|
|
urlSuffix += value
|
|
|
|
text += " ("
|
|
|
|
text += value
|
|
|
|
text += ")"
|
|
|
|
}
|
|
|
|
rawBytes = bytes.Replace(rawBytes, all, []byte(fmt.Sprintf(
|
|
|
|
`<a href="%s">%s</a>`, URLJoin(path, ltype, string(sha))+urlSuffix, text)), -1)
|
|
|
|
}
|
|
|
|
return rawBytes
|
|
|
|
}
|
|
|
|
|
|
|
|
// renderFullIssuePattern renders issues-like URLs
|
|
|
|
func renderFullIssuePattern(rawBytes []byte, urlPrefix string) []byte {
|
|
|
|
ms := IssueFullPattern.FindAllSubmatch(rawBytes, -1)
|
|
|
|
for _, m := range ms {
|
|
|
|
all := m[0]
|
2017-02-24 17:59:56 +03:00
|
|
|
protocol := string(m[1])
|
|
|
|
paths := bytes.Split(m[2], []byte("/"))
|
2017-02-14 04:13:59 +03:00
|
|
|
paths = paths[:len(paths)-1]
|
|
|
|
if bytes.HasPrefix(paths[0], []byte("gist.")) {
|
|
|
|
continue
|
|
|
|
}
|
2017-02-24 17:59:56 +03:00
|
|
|
path := protocol + "://" + string(m[2])
|
|
|
|
id := string(m[3])
|
2017-02-14 04:13:59 +03:00
|
|
|
path = URLJoin(path, id)
|
|
|
|
var comment []byte
|
|
|
|
if len(m) > 3 {
|
2017-02-24 17:59:56 +03:00
|
|
|
comment = m[4]
|
2017-02-14 04:13:59 +03:00
|
|
|
}
|
|
|
|
urlSuffix := ""
|
|
|
|
text := "#" + id
|
|
|
|
if comment != nil {
|
|
|
|
urlSuffix += string(comment)
|
|
|
|
text += " <i class='comment icon'></i>"
|
|
|
|
}
|
|
|
|
rawBytes = bytes.Replace(rawBytes, all, []byte(fmt.Sprintf(
|
|
|
|
`<a href="%s%s">%s</a>`, path, urlSuffix, text)), -1)
|
|
|
|
}
|
|
|
|
return rawBytes
|
|
|
|
}
|
|
|
|
|
|
|
|
func firstIndexOfByte(sl []byte, target byte) int {
|
|
|
|
for i := 0; i < len(sl); i++ {
|
|
|
|
if sl[i] == target {
|
|
|
|
return i
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return -1
|
|
|
|
}
|
|
|
|
|
|
|
|
func lastIndexOfByte(sl []byte, target byte) int {
|
|
|
|
for i := len(sl) - 1; i >= 0; i-- {
|
|
|
|
if sl[i] == target {
|
|
|
|
return i
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return -1
|
|
|
|
}
|
|
|
|
|
2017-02-24 17:59:56 +03:00
|
|
|
// RenderShortLinks processes [[syntax]]
|
|
|
|
//
|
|
|
|
// noLink flag disables making link tags when set to true
|
|
|
|
// so this function just replaces the whole [[...]] with the content text
|
|
|
|
//
|
|
|
|
// isWikiMarkdown is a flag to choose linking url prefix
|
|
|
|
func RenderShortLinks(rawBytes []byte, urlPrefix string, noLink bool, isWikiMarkdown bool) []byte {
|
2017-02-14 04:13:59 +03:00
|
|
|
ms := ShortLinkPattern.FindAll(rawBytes, -1)
|
|
|
|
for _, m := range ms {
|
|
|
|
orig := bytes.TrimSpace(m)
|
|
|
|
m = orig[2:]
|
|
|
|
tailPos := lastIndexOfByte(m, ']') + 1
|
|
|
|
tail := []byte{}
|
|
|
|
if tailPos < len(m) {
|
|
|
|
tail = m[tailPos:]
|
|
|
|
m = m[:tailPos-1]
|
|
|
|
}
|
|
|
|
m = m[:len(m)-2]
|
|
|
|
props := map[string]string{}
|
|
|
|
|
|
|
|
// MediaWiki uses [[link|text]], while GitHub uses [[text|link]]
|
|
|
|
// It makes page handling terrible, but we prefer GitHub syntax
|
|
|
|
// And fall back to MediaWiki only when it is obvious from the look
|
|
|
|
// Of text and link contents
|
|
|
|
sl := bytes.Split(m, []byte("|"))
|
|
|
|
for _, v := range sl {
|
|
|
|
switch bytes.Count(v, []byte("=")) {
|
|
|
|
|
|
|
|
// Piped args without = sign, these are mandatory arguments
|
|
|
|
case 0:
|
|
|
|
{
|
|
|
|
sv := string(v)
|
|
|
|
if props["name"] == "" {
|
|
|
|
if isLink(v) {
|
|
|
|
// If we clearly see it is a link, we save it so
|
|
|
|
|
|
|
|
// But first we need to ensure, that if both mandatory args provided
|
|
|
|
// look like links, we stick to GitHub syntax
|
|
|
|
if props["link"] != "" {
|
|
|
|
props["name"] = props["link"]
|
|
|
|
}
|
|
|
|
|
|
|
|
props["link"] = strings.TrimSpace(sv)
|
|
|
|
} else {
|
|
|
|
props["name"] = sv
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
props["link"] = strings.TrimSpace(sv)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Piped args with = sign, these are optional arguments
|
|
|
|
case 1:
|
|
|
|
{
|
|
|
|
sep := firstIndexOfByte(v, '=')
|
|
|
|
key, val := string(v[:sep]), html.UnescapeString(string(v[sep+1:]))
|
|
|
|
lastCharIndex := len(val) - 1
|
|
|
|
if (val[0] == '"' || val[0] == '\'') && (val[lastCharIndex] == '"' || val[lastCharIndex] == '\'') {
|
|
|
|
val = val[1:lastCharIndex]
|
|
|
|
}
|
|
|
|
props[key] = val
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
var name string
|
|
|
|
var link string
|
|
|
|
if props["link"] != "" {
|
|
|
|
link = props["link"]
|
|
|
|
} else if props["name"] != "" {
|
|
|
|
link = props["name"]
|
|
|
|
}
|
|
|
|
if props["title"] != "" {
|
|
|
|
name = props["title"]
|
|
|
|
} else if props["name"] != "" {
|
|
|
|
name = props["name"]
|
|
|
|
} else {
|
|
|
|
name = link
|
|
|
|
}
|
|
|
|
|
|
|
|
name += string(tail)
|
|
|
|
image := false
|
|
|
|
ext := filepath.Ext(string(link))
|
|
|
|
if ext != "" {
|
|
|
|
switch ext {
|
|
|
|
case ".jpg", ".jpeg", ".png", ".tif", ".tiff", ".webp", ".gif", ".bmp", ".ico", ".svg":
|
|
|
|
{
|
|
|
|
image = true
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
absoluteLink := isLink([]byte(link))
|
|
|
|
if !absoluteLink {
|
2017-02-24 17:59:56 +03:00
|
|
|
link = strings.Replace(link, " ", "+", -1)
|
2017-02-14 04:13:59 +03:00
|
|
|
}
|
|
|
|
if image {
|
|
|
|
if !absoluteLink {
|
2017-02-24 17:59:56 +03:00
|
|
|
if IsSameDomain(urlPrefix) {
|
|
|
|
urlPrefix = strings.Replace(urlPrefix, "/src/", "/raw/", 1)
|
|
|
|
}
|
|
|
|
if isWikiMarkdown {
|
|
|
|
link = URLJoin("wiki", "raw", link)
|
|
|
|
}
|
|
|
|
link = URLJoin(urlPrefix, link)
|
2017-02-14 04:13:59 +03:00
|
|
|
}
|
|
|
|
title := props["title"]
|
|
|
|
if title == "" {
|
|
|
|
title = props["alt"]
|
|
|
|
}
|
|
|
|
if title == "" {
|
|
|
|
title = path.Base(string(name))
|
|
|
|
}
|
|
|
|
alt := props["alt"]
|
|
|
|
if alt == "" {
|
|
|
|
alt = name
|
|
|
|
}
|
|
|
|
if alt != "" {
|
|
|
|
alt = `alt="` + alt + `"`
|
|
|
|
}
|
|
|
|
name = fmt.Sprintf(`<img src="%s" %s title="%s" />`, link, alt, title)
|
|
|
|
} else if !absoluteLink {
|
2017-02-24 17:59:56 +03:00
|
|
|
if isWikiMarkdown {
|
|
|
|
link = URLJoin("wiki", link)
|
|
|
|
}
|
|
|
|
link = URLJoin(urlPrefix, link)
|
2017-02-14 04:13:59 +03:00
|
|
|
}
|
|
|
|
if noLink {
|
|
|
|
rawBytes = bytes.Replace(rawBytes, orig, []byte(name), -1)
|
|
|
|
} else {
|
|
|
|
rawBytes = bytes.Replace(rawBytes, orig,
|
|
|
|
[]byte(fmt.Sprintf(`<a href="%s">%s</a>`, link, name)), -1)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return rawBytes
|
|
|
|
}
|
|
|
|
|
2016-12-26 13:52:04 +03:00
|
|
|
// RenderCrossReferenceIssueIndexPattern renders issue indexes from other repositories to corresponding links.
|
|
|
|
func RenderCrossReferenceIssueIndexPattern(rawBytes []byte, urlPrefix string, metas map[string]string) []byte {
|
|
|
|
ms := CrossReferenceIssueNumericPattern.FindAll(rawBytes, -1)
|
|
|
|
for _, m := range ms {
|
|
|
|
if m[0] == ' ' || m[0] == '(' {
|
|
|
|
m = m[1:] // ignore leading space or opening parentheses
|
|
|
|
}
|
|
|
|
|
|
|
|
repo := string(bytes.Split(m, []byte("#"))[0])
|
|
|
|
issue := string(bytes.Split(m, []byte("#"))[1])
|
|
|
|
|
2017-02-24 17:59:56 +03:00
|
|
|
link := fmt.Sprintf(`<a href="%s">%s</a>`, URLJoin(setting.AppURL, repo, "issues", issue), m)
|
2016-12-26 13:52:04 +03:00
|
|
|
rawBytes = bytes.Replace(rawBytes, m, []byte(link), 1)
|
|
|
|
}
|
|
|
|
return rawBytes
|
|
|
|
}
|
|
|
|
|
2017-02-14 04:13:59 +03:00
|
|
|
// renderSha1CurrentPattern renders SHA1 strings to corresponding links that assumes in the same repository.
|
|
|
|
func renderSha1CurrentPattern(rawBytes []byte, urlPrefix string) []byte {
|
|
|
|
ms := Sha1CurrentPattern.FindAllSubmatch(rawBytes, -1)
|
|
|
|
for _, m := range ms {
|
2017-05-05 11:49:13 +03:00
|
|
|
hash := m[1]
|
|
|
|
if com.StrTo(hash).MustInt() > 0 {
|
2017-02-14 04:13:59 +03:00
|
|
|
continue
|
2016-08-16 01:27:19 +03:00
|
|
|
}
|
2017-05-05 11:49:13 +03:00
|
|
|
rawBytes = bytes.Replace(rawBytes, hash, []byte(fmt.Sprintf(
|
|
|
|
`<a href="%s">%s</a>`, URLJoin(urlPrefix, "commit", string(hash)), base.ShortSha(string(hash)))), -1)
|
2017-02-14 04:13:59 +03:00
|
|
|
}
|
|
|
|
return rawBytes
|
2016-02-21 01:10:05 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
// RenderSpecialLink renders mentions, indexes and SHA1 strings to corresponding links.
|
2017-02-24 17:59:56 +03:00
|
|
|
func RenderSpecialLink(rawBytes []byte, urlPrefix string, metas map[string]string, isWikiMarkdown bool) []byte {
|
2015-12-05 05:30:33 +03:00
|
|
|
ms := MentionPattern.FindAll(rawBytes, -1)
|
|
|
|
for _, m := range ms {
|
2016-10-17 05:17:59 +03:00
|
|
|
m = m[bytes.Index(m, []byte("@")):]
|
2015-12-05 05:30:33 +03:00
|
|
|
rawBytes = bytes.Replace(rawBytes, m,
|
2017-02-14 04:13:59 +03:00
|
|
|
[]byte(fmt.Sprintf(`<a href="%s">%s</a>`, URLJoin(setting.AppURL, string(m[1:])), m)), -1)
|
2015-12-05 05:30:33 +03:00
|
|
|
}
|
|
|
|
|
2017-02-24 17:59:56 +03:00
|
|
|
rawBytes = RenderShortLinks(rawBytes, urlPrefix, false, isWikiMarkdown)
|
2015-12-05 05:30:33 +03:00
|
|
|
rawBytes = RenderIssueIndexPattern(rawBytes, urlPrefix, metas)
|
2016-12-26 13:52:04 +03:00
|
|
|
rawBytes = RenderCrossReferenceIssueIndexPattern(rawBytes, urlPrefix, metas)
|
2017-02-14 04:13:59 +03:00
|
|
|
rawBytes = renderFullSha1Pattern(rawBytes, urlPrefix)
|
|
|
|
rawBytes = renderSha1CurrentPattern(rawBytes, urlPrefix)
|
|
|
|
rawBytes = renderFullIssuePattern(rawBytes, urlPrefix)
|
2015-12-05 05:30:33 +03:00
|
|
|
return rawBytes
|
|
|
|
}
|
|
|
|
|
2016-02-21 01:10:05 +03:00
|
|
|
// RenderRaw renders Markdown to HTML without handling special links.
|
2017-02-14 04:13:59 +03:00
|
|
|
func RenderRaw(body []byte, urlPrefix string, wikiMarkdown bool) []byte {
|
2014-04-10 22:20:58 +04:00
|
|
|
htmlFlags := 0
|
2014-10-05 01:15:22 +04:00
|
|
|
htmlFlags |= blackfriday.HTML_SKIP_STYLE
|
|
|
|
htmlFlags |= blackfriday.HTML_OMIT_CONTENTS
|
2016-02-21 01:10:05 +03:00
|
|
|
renderer := &Renderer{
|
2017-02-14 04:13:59 +03:00
|
|
|
Renderer: blackfriday.HtmlRenderer(htmlFlags, "", ""),
|
|
|
|
urlPrefix: urlPrefix,
|
|
|
|
isWikiMarkdown: wikiMarkdown,
|
2014-04-10 22:20:58 +04:00
|
|
|
}
|
|
|
|
|
|
|
|
// set up the parser
|
|
|
|
extensions := 0
|
2014-10-05 01:15:22 +04:00
|
|
|
extensions |= blackfriday.EXTENSION_NO_INTRA_EMPHASIS
|
|
|
|
extensions |= blackfriday.EXTENSION_TABLES
|
|
|
|
extensions |= blackfriday.EXTENSION_FENCED_CODE
|
|
|
|
extensions |= blackfriday.EXTENSION_STRIKETHROUGH
|
|
|
|
extensions |= blackfriday.EXTENSION_NO_EMPTY_LINE_BEFORE_BLOCK
|
|
|
|
|
2015-09-01 15:32:02 +03:00
|
|
|
if setting.Markdown.EnableHardLineBreak {
|
|
|
|
extensions |= blackfriday.EXTENSION_HARD_LINE_BREAK
|
|
|
|
}
|
|
|
|
|
2014-10-05 01:15:22 +04:00
|
|
|
body = blackfriday.Markdown(body, renderer, extensions)
|
2014-05-05 21:08:01 +04:00
|
|
|
return body
|
|
|
|
}
|
|
|
|
|
2015-11-20 09:52:11 +03:00
|
|
|
var (
|
|
|
|
leftAngleBracket = []byte("</")
|
|
|
|
rightAngleBracket = []byte(">")
|
|
|
|
)
|
|
|
|
|
2015-11-20 13:37:51 +03:00
|
|
|
var noEndTags = []string{"img", "input", "br", "hr"}
|
|
|
|
|
2016-02-21 01:10:05 +03:00
|
|
|
// PostProcess treats different types of HTML differently,
|
2015-03-24 01:32:24 +03:00
|
|
|
// and only renders special links for plain text blocks.
|
2017-02-24 17:59:56 +03:00
|
|
|
func PostProcess(rawHTML []byte, urlPrefix string, metas map[string]string, isWikiMarkdown bool) []byte {
|
2015-11-20 13:37:51 +03:00
|
|
|
startTags := make([]string, 0, 5)
|
2015-03-09 07:14:50 +03:00
|
|
|
var buf bytes.Buffer
|
2016-11-25 04:58:05 +03:00
|
|
|
tokenizer := html.NewTokenizer(bytes.NewReader(rawHTML))
|
2015-11-20 13:37:51 +03:00
|
|
|
|
|
|
|
OUTER_LOOP:
|
2015-03-09 07:14:50 +03:00
|
|
|
for html.ErrorToken != tokenizer.Next() {
|
|
|
|
token := tokenizer.Token()
|
|
|
|
switch token.Type {
|
2015-03-24 01:32:24 +03:00
|
|
|
case html.TextToken:
|
2017-02-24 17:59:56 +03:00
|
|
|
buf.Write(RenderSpecialLink([]byte(token.String()), urlPrefix, metas, isWikiMarkdown))
|
2015-03-24 01:32:24 +03:00
|
|
|
|
|
|
|
case html.StartTagToken:
|
|
|
|
buf.WriteString(token.String())
|
|
|
|
tagName := token.Data
|
|
|
|
// If this is an excluded tag, we skip processing all output until a close tag is encountered.
|
|
|
|
if strings.EqualFold("a", tagName) || strings.EqualFold("code", tagName) || strings.EqualFold("pre", tagName) {
|
2015-11-22 05:06:11 +03:00
|
|
|
stackNum := 1
|
2015-03-24 01:32:24 +03:00
|
|
|
for html.ErrorToken != tokenizer.Next() {
|
|
|
|
token = tokenizer.Token()
|
2015-11-20 13:37:51 +03:00
|
|
|
|
2015-03-24 01:32:24 +03:00
|
|
|
// Copy the token to the output verbatim
|
2017-02-24 17:59:56 +03:00
|
|
|
buf.Write(RenderShortLinks([]byte(token.String()), urlPrefix, true, isWikiMarkdown))
|
2015-11-22 05:06:11 +03:00
|
|
|
|
2017-04-24 07:18:36 +03:00
|
|
|
if token.Type == html.StartTagToken && !com.IsSliceContainsStr(noEndTags, token.Data) {
|
|
|
|
stackNum++
|
2015-11-22 05:06:11 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
// If this is the close tag to the outer-most, we are done
|
2016-02-20 01:39:50 +03:00
|
|
|
if token.Type == html.EndTagToken {
|
2015-11-22 05:06:11 +03:00
|
|
|
stackNum--
|
|
|
|
|
2016-02-20 01:39:50 +03:00
|
|
|
if stackNum <= 0 && strings.EqualFold(tagName, token.Data) {
|
2015-11-22 05:06:11 +03:00
|
|
|
break
|
|
|
|
}
|
2015-03-09 07:14:50 +03:00
|
|
|
}
|
|
|
|
}
|
2015-11-20 13:37:51 +03:00
|
|
|
continue OUTER_LOOP
|
|
|
|
}
|
|
|
|
|
2017-04-24 07:18:36 +03:00
|
|
|
if !com.IsSliceContainsStr(noEndTags, tagName) {
|
|
|
|
startTags = append(startTags, tagName)
|
2015-03-24 01:32:24 +03:00
|
|
|
}
|
2015-03-09 07:14:50 +03:00
|
|
|
|
2015-11-20 09:52:11 +03:00
|
|
|
case html.EndTagToken:
|
2015-11-25 03:28:24 +03:00
|
|
|
if len(startTags) == 0 {
|
2015-11-25 03:29:35 +03:00
|
|
|
buf.WriteString(token.String())
|
2015-11-25 03:28:24 +03:00
|
|
|
break
|
|
|
|
}
|
|
|
|
|
2015-11-20 09:52:11 +03:00
|
|
|
buf.Write(leftAngleBracket)
|
2015-11-20 13:37:51 +03:00
|
|
|
buf.WriteString(startTags[len(startTags)-1])
|
2015-11-20 09:52:11 +03:00
|
|
|
buf.Write(rightAngleBracket)
|
2015-11-20 13:37:51 +03:00
|
|
|
startTags = startTags[:len(startTags)-1]
|
2015-03-24 01:32:24 +03:00
|
|
|
default:
|
|
|
|
buf.WriteString(token.String())
|
2015-03-09 07:14:50 +03:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if io.EOF == tokenizer.Err() {
|
|
|
|
return buf.Bytes()
|
|
|
|
}
|
|
|
|
|
2015-03-24 01:32:24 +03:00
|
|
|
// If we are not at the end of the input, then some other parsing error has occurred,
|
|
|
|
// so return the input verbatim.
|
2016-11-25 04:58:05 +03:00
|
|
|
return rawHTML
|
2015-03-09 07:14:50 +03:00
|
|
|
}
|
2015-03-24 01:32:24 +03:00
|
|
|
|
2017-02-14 04:13:59 +03:00
|
|
|
// Render renders Markdown to HTML with all specific handling stuff.
|
|
|
|
func render(rawBytes []byte, urlPrefix string, metas map[string]string, isWikiMarkdown bool) []byte {
|
2017-02-24 17:59:56 +03:00
|
|
|
urlPrefix = strings.Replace(urlPrefix, " ", "+", -1)
|
2017-02-14 04:13:59 +03:00
|
|
|
result := RenderRaw(rawBytes, urlPrefix, isWikiMarkdown)
|
2017-02-24 17:59:56 +03:00
|
|
|
result = PostProcess(result, urlPrefix, metas, isWikiMarkdown)
|
2017-04-13 05:52:24 +03:00
|
|
|
result = SanitizeBytes(result)
|
2015-03-24 01:32:24 +03:00
|
|
|
return result
|
|
|
|
}
|
|
|
|
|
2017-02-14 04:13:59 +03:00
|
|
|
// Render renders Markdown to HTML with all specific handling stuff.
|
|
|
|
func Render(rawBytes []byte, urlPrefix string, metas map[string]string) []byte {
|
|
|
|
return render(rawBytes, urlPrefix, metas, false)
|
|
|
|
}
|
|
|
|
|
2016-02-21 01:10:05 +03:00
|
|
|
// RenderString renders Markdown to HTML with special links and returns string type.
|
|
|
|
func RenderString(raw, urlPrefix string, metas map[string]string) string {
|
2017-02-14 04:13:59 +03:00
|
|
|
return string(render([]byte(raw), urlPrefix, metas, false))
|
|
|
|
}
|
|
|
|
|
|
|
|
// RenderWiki renders markdown wiki page to HTML and return HTML string
|
|
|
|
func RenderWiki(rawBytes []byte, urlPrefix string, metas map[string]string) string {
|
|
|
|
return string(render(rawBytes, urlPrefix, metas, true))
|
2015-03-24 01:32:24 +03:00
|
|
|
}
|
2017-04-21 10:01:08 +03:00
|
|
|
|
|
|
|
var (
|
|
|
|
// MarkupName describes markup's name
|
|
|
|
MarkupName = "markdown"
|
|
|
|
)
|
|
|
|
|
|
|
|
func init() {
|
|
|
|
markup.RegisterParser(Parser{})
|
|
|
|
}
|
|
|
|
|
|
|
|
// Parser implements markup.Parser
|
|
|
|
type Parser struct {
|
|
|
|
}
|
|
|
|
|
|
|
|
// Name implements markup.Parser
|
|
|
|
func (Parser) Name() string {
|
|
|
|
return MarkupName
|
|
|
|
}
|
|
|
|
|
|
|
|
// Extensions implements markup.Parser
|
|
|
|
func (Parser) Extensions() []string {
|
|
|
|
return setting.Markdown.FileExtensions
|
|
|
|
}
|
|
|
|
|
|
|
|
// Render implements markup.Parser
|
|
|
|
func (Parser) Render(rawBytes []byte, urlPrefix string, metas map[string]string, isWiki bool) []byte {
|
|
|
|
return render(rawBytes, urlPrefix, metas, isWiki)
|
|
|
|
}
|