|
|
@ -10,6 +10,7 @@ import ( |
|
|
|
"path" |
|
|
|
"path" |
|
|
|
"path/filepath" |
|
|
|
"path/filepath" |
|
|
|
"regexp" |
|
|
|
"regexp" |
|
|
|
|
|
|
|
"slices" |
|
|
|
"strings" |
|
|
|
"strings" |
|
|
|
"sync" |
|
|
|
"sync" |
|
|
|
|
|
|
|
|
|
|
@ -54,7 +55,7 @@ var ( |
|
|
|
shortLinkPattern = regexp.MustCompile(`\[\[(.*?)\]\](\w*)`) |
|
|
|
shortLinkPattern = regexp.MustCompile(`\[\[(.*?)\]\](\w*)`) |
|
|
|
|
|
|
|
|
|
|
|
// anyHashPattern splits url containing SHA into parts
|
|
|
|
// anyHashPattern splits url containing SHA into parts
|
|
|
|
anyHashPattern = regexp.MustCompile(`https?://(?:\S+/){4,5}([0-9a-f]{40,64})(/[-+~_%.a-zA-Z0-9/]+)?(#[-+~_%.a-zA-Z0-9]+)?`) |
|
|
|
anyHashPattern = regexp.MustCompile(`https?://(?:\S+/){4,5}([0-9a-f]{40,64})(/[-+~%./\w]+)?(\?[-+~%.\w&=]+)?(#[-+~%.\w]+)?`) |
|
|
|
|
|
|
|
|
|
|
|
// comparePattern matches "http://domain/org/repo/compare/COMMIT1...COMMIT2#hash"
|
|
|
|
// comparePattern matches "http://domain/org/repo/compare/COMMIT1...COMMIT2#hash"
|
|
|
|
comparePattern = regexp.MustCompile(`https?://(?:\S+/){4,5}([0-9a-f]{7,64})(\.\.\.?)([0-9a-f]{7,64})?(#[-+~_%.a-zA-Z0-9]+)?`) |
|
|
|
comparePattern = regexp.MustCompile(`https?://(?:\S+/){4,5}([0-9a-f]{7,64})(\.\.\.?)([0-9a-f]{7,64})?(#[-+~_%.a-zA-Z0-9]+)?`) |
|
|
@ -591,7 +592,8 @@ func replaceContentList(node *html.Node, i, j int, newNodes []*html.Node) { |
|
|
|
|
|
|
|
|
|
|
|
func mentionProcessor(ctx *RenderContext, node *html.Node) { |
|
|
|
func mentionProcessor(ctx *RenderContext, node *html.Node) { |
|
|
|
start := 0 |
|
|
|
start := 0 |
|
|
|
for node != nil { |
|
|
|
nodeStop := node.NextSibling |
|
|
|
|
|
|
|
for node != nodeStop { |
|
|
|
found, loc := references.FindFirstMentionBytes(util.UnsafeStringToBytes(node.Data[start:])) |
|
|
|
found, loc := references.FindFirstMentionBytes(util.UnsafeStringToBytes(node.Data[start:])) |
|
|
|
if !found { |
|
|
|
if !found { |
|
|
|
node = node.NextSibling |
|
|
|
node = node.NextSibling |
|
|
@ -962,57 +964,68 @@ func commitCrossReferencePatternProcessor(ctx *RenderContext, node *html.Node) { |
|
|
|
} |
|
|
|
} |
|
|
|
} |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
// fullHashPatternProcessor renders SHA containing URLs
|
|
|
|
type anyHashPatternResult struct { |
|
|
|
func fullHashPatternProcessor(ctx *RenderContext, node *html.Node) { |
|
|
|
PosStart int |
|
|
|
if ctx.Metas == nil { |
|
|
|
PosEnd int |
|
|
|
return |
|
|
|
FullURL string |
|
|
|
|
|
|
|
CommitID string |
|
|
|
|
|
|
|
SubPath string |
|
|
|
|
|
|
|
QueryHash string |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
func anyHashPatternExtract(s string) (ret anyHashPatternResult, ok bool) { |
|
|
|
|
|
|
|
m := anyHashPattern.FindStringSubmatchIndex(s) |
|
|
|
|
|
|
|
if m == nil { |
|
|
|
|
|
|
|
return ret, false |
|
|
|
} |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
next := node.NextSibling |
|
|
|
ret.PosStart, ret.PosEnd = m[0], m[1] |
|
|
|
for node != nil && node != next { |
|
|
|
ret.FullURL = s[ret.PosStart:ret.PosEnd] |
|
|
|
m := anyHashPattern.FindStringSubmatchIndex(node.Data) |
|
|
|
if strings.HasSuffix(ret.FullURL, ".") { |
|
|
|
if m == nil { |
|
|
|
// if url ends in '.', it's very likely that it is not part of the actual url but used to finish a sentence.
|
|
|
|
return |
|
|
|
ret.PosEnd-- |
|
|
|
|
|
|
|
ret.FullURL = ret.FullURL[:len(ret.FullURL)-1] |
|
|
|
|
|
|
|
for i := 0; i < len(m); i++ { |
|
|
|
|
|
|
|
m[i] = min(m[i], ret.PosEnd) |
|
|
|
} |
|
|
|
} |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
urlFull := node.Data[m[0]:m[1]] |
|
|
|
ret.CommitID = s[m[2]:m[3]] |
|
|
|
text := base.ShortSha(node.Data[m[2]:m[3]]) |
|
|
|
if m[5] > 0 { |
|
|
|
|
|
|
|
ret.SubPath = s[m[4]:m[5]] |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
// 3rd capture group matches a optional path
|
|
|
|
lastStart, lastEnd := m[len(m)-2], m[len(m)-1] |
|
|
|
subpath := "" |
|
|
|
if lastEnd > 0 { |
|
|
|
if m[5] > 0 { |
|
|
|
ret.QueryHash = s[lastStart:lastEnd][1:] |
|
|
|
subpath = node.Data[m[4]:m[5]] |
|
|
|
} |
|
|
|
} |
|
|
|
return ret, true |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
// 4th capture group matches a optional url hash
|
|
|
|
// fullHashPatternProcessor renders SHA containing URLs
|
|
|
|
hash := "" |
|
|
|
func fullHashPatternProcessor(ctx *RenderContext, node *html.Node) { |
|
|
|
if m[7] > 0 { |
|
|
|
if ctx.Metas == nil { |
|
|
|
hash = node.Data[m[6]:m[7]][1:] |
|
|
|
return |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
nodeStop := node.NextSibling |
|
|
|
|
|
|
|
for node != nodeStop { |
|
|
|
|
|
|
|
if node.Type != html.TextNode { |
|
|
|
|
|
|
|
node = node.NextSibling |
|
|
|
|
|
|
|
continue |
|
|
|
} |
|
|
|
} |
|
|
|
|
|
|
|
ret, ok := anyHashPatternExtract(node.Data) |
|
|
|
start := m[0] |
|
|
|
if !ok { |
|
|
|
end := m[1] |
|
|
|
node = node.NextSibling |
|
|
|
|
|
|
|
continue |
|
|
|
// If url ends in '.', it's very likely that it is not part of the
|
|
|
|
|
|
|
|
// actual url but used to finish a sentence.
|
|
|
|
|
|
|
|
if strings.HasSuffix(urlFull, ".") { |
|
|
|
|
|
|
|
end-- |
|
|
|
|
|
|
|
urlFull = urlFull[:len(urlFull)-1] |
|
|
|
|
|
|
|
if hash != "" { |
|
|
|
|
|
|
|
hash = hash[:len(hash)-1] |
|
|
|
|
|
|
|
} else if subpath != "" { |
|
|
|
|
|
|
|
subpath = subpath[:len(subpath)-1] |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
} |
|
|
|
} |
|
|
|
|
|
|
|
text := base.ShortSha(ret.CommitID) |
|
|
|
if subpath != "" { |
|
|
|
if ret.SubPath != "" { |
|
|
|
text += subpath |
|
|
|
text += ret.SubPath |
|
|
|
} |
|
|
|
} |
|
|
|
|
|
|
|
if ret.QueryHash != "" { |
|
|
|
if hash != "" { |
|
|
|
text += " (" + ret.QueryHash + ")" |
|
|
|
text += " (" + hash + ")" |
|
|
|
|
|
|
|
} |
|
|
|
} |
|
|
|
replaceContent(node, start, end, createCodeLink(urlFull, text, "commit")) |
|
|
|
replaceContent(node, ret.PosStart, ret.PosEnd, createCodeLink(ret.FullURL, text, "commit")) |
|
|
|
node = node.NextSibling.NextSibling |
|
|
|
node = node.NextSibling.NextSibling |
|
|
|
} |
|
|
|
} |
|
|
|
} |
|
|
|
} |
|
|
@ -1021,19 +1034,16 @@ func comparePatternProcessor(ctx *RenderContext, node *html.Node) { |
|
|
|
if ctx.Metas == nil { |
|
|
|
if ctx.Metas == nil { |
|
|
|
return |
|
|
|
return |
|
|
|
} |
|
|
|
} |
|
|
|
|
|
|
|
nodeStop := node.NextSibling |
|
|
|
next := node.NextSibling |
|
|
|
for node != nodeStop { |
|
|
|
for node != nil && node != next { |
|
|
|
if node.Type != html.TextNode { |
|
|
|
m := comparePattern.FindStringSubmatchIndex(node.Data) |
|
|
|
node = node.NextSibling |
|
|
|
if m == nil { |
|
|
|
continue |
|
|
|
return |
|
|
|
|
|
|
|
} |
|
|
|
} |
|
|
|
|
|
|
|
m := comparePattern.FindStringSubmatchIndex(node.Data) |
|
|
|
// Ensure that every group (m[0]...m[7]) has a match
|
|
|
|
if m == nil || slices.Contains(m[:8], -1) { // ensure that every group (m[0]...m[7]) has a match
|
|
|
|
for i := 0; i < 8; i++ { |
|
|
|
node = node.NextSibling |
|
|
|
if m[i] == -1 { |
|
|
|
continue |
|
|
|
return |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
} |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
urlFull := node.Data[m[0]:m[1]] |
|
|
|
urlFull := node.Data[m[0]:m[1]] |
|
|
|