Permalink
Browse files

Source file based relative linking

ala GitHub repository markdown for both md files and non-md files

Signed-off-by: Sven Dowideit <SvenDowideit@home.org.au>
  • Loading branch information...
SvenDowideit authored and spf13 committed Sep 9, 2015
1 parent b78f13b commit 0f6b334b6715253b030c4e783b88e911b6e53e56
Showing with 452 additions and 34 deletions.
  1. +28 −22 helpers/content.go
  2. +33 −0 helpers/content_renderer.go
  3. +22 −2 hugolib/page.go
  4. +163 −1 hugolib/site.go
  5. +206 −9 hugolib/site_test.go
View
@@ -43,27 +43,29 @@ var SummaryDivider = []byte("<!--more-->")
// Blackfriday holds configuration values for Blackfriday rendering.
type Blackfriday struct {
Smartypants bool
AngledQuotes bool
Fractions bool
HrefTargetBlank bool
SmartDashes bool
LatexDashes bool
PlainIDAnchors bool
Extensions []string
ExtensionsMask []string
Smartypants bool
AngledQuotes bool
Fractions bool
HrefTargetBlank bool
SmartDashes bool
LatexDashes bool
PlainIDAnchors bool
SourceRelativeLinksEval bool
Extensions []string
ExtensionsMask []string
}
// NewBlackfriday creates a new Blackfriday filled with site config or some sane defaults
func NewBlackfriday() *Blackfriday {
combinedParam := map[string]interface{}{
"smartypants": true,
"angledQuotes": false,
"fractions": true,
"hrefTargetBlank": false,
"smartDashes": true,
"latexDashes": true,
"plainIDAnchors": false,
"smartypants": true,
"angledQuotes": false,
"fractions": true,
"hrefTargetBlank": false,
"smartDashes": true,
"latexDashes": true,
"plainIDAnchors": false,
"sourceRelativeLinks": false,
}
siteParam := viper.GetStringMap("blackfriday")
@@ -198,7 +200,9 @@ func GetHTMLRenderer(defaultFlags int, ctx *RenderingContext) blackfriday.Render
}
return &HugoHtmlRenderer{
blackfriday.HtmlRendererWithParameters(htmlFlags, "", "", renderParameters),
FileResolver: ctx.FileResolver,
LinkResolver: ctx.LinkResolver,
Renderer: blackfriday.HtmlRendererWithParameters(htmlFlags, "", "", renderParameters),
}
}
@@ -329,11 +333,13 @@ func ExtractTOC(content []byte) (newcontent []byte, toc []byte) {
// RenderingContext holds contextual information, like content and configuration,
// for a given content renderin.g
type RenderingContext struct {
Content []byte
PageFmt string
DocumentID string
Config *Blackfriday
configInit sync.Once
Content []byte
PageFmt string
DocumentID string
Config *Blackfriday
FileResolver FileResolverFunc
LinkResolver LinkResolverFunc
configInit sync.Once
}
func (c *RenderingContext) getConfig() *Blackfriday {
@@ -19,12 +19,18 @@ import (
"github.com/miekg/mmark"
"github.com/russross/blackfriday"
jww "github.com/spf13/jwalterweatherman"
"github.com/spf13/viper"
)
type LinkResolverFunc func(ref string) (string, error)
type FileResolverFunc func(ref string) (string, error)
// Wraps a blackfriday.Renderer, typically a blackfriday.Html
// Enabling Hugo to customise the rendering experience
type HugoHtmlRenderer struct {
FileResolver FileResolverFunc
LinkResolver LinkResolverFunc
blackfriday.Renderer
}
@@ -38,6 +44,33 @@ func (renderer *HugoHtmlRenderer) BlockCode(out *bytes.Buffer, text []byte, lang
}
}
func (renderer *HugoHtmlRenderer) Link(out *bytes.Buffer, link []byte, title []byte, content []byte) {
if renderer.LinkResolver == nil || bytes.HasPrefix(link, []byte("{@{@HUGOSHORTCODE")) {
// Use the blackfriday built in Link handler
renderer.Renderer.Link(out, link, title, content)
} else {
newLink, err := renderer.LinkResolver(string(link))
if err != nil {
newLink = string(link)
jww.ERROR.Printf("LinkResolver: %s", err)
}
renderer.Renderer.Link(out, []byte(newLink), title, content)
}
}
func (renderer *HugoHtmlRenderer) Image(out *bytes.Buffer, link []byte, title []byte, alt []byte) {
if renderer.FileResolver == nil || bytes.HasPrefix(link, []byte("{@{@HUGOSHORTCODE")) {
// Use the blackfriday built in Image handler
renderer.Renderer.Image(out, link, title, alt)
} else {
newLink, err := renderer.FileResolver(string(link))
if err != nil {
newLink = string(link)
jww.ERROR.Printf("FileResolver: %s", err)
}
renderer.Renderer.Image(out, []byte(newLink), title, alt)
}
}
// Wraps a mmark.Renderer, typically a mmark.html
// Enabling Hugo to customise the rendering experience
type HugoMmarkHtmlRenderer struct {
View
@@ -234,14 +234,34 @@ func (p *Page) setSummary() {
}
func (p *Page) renderBytes(content []byte) []byte {
var fn helpers.LinkResolverFunc
var fileFn helpers.FileResolverFunc
if p.getRenderingConfig().SourceRelativeLinksEval {
fn = func(ref string) (string, error) {
return p.Node.Site.GitHub(ref, p)
}
fileFn = func(ref string) (string, error) {
return p.Node.Site.GitHubFileLink(ref, p)
}
}
return helpers.RenderBytes(
&helpers.RenderingContext{Content: content, PageFmt: p.guessMarkupType(),
DocumentID: p.UniqueID(), Config: p.getRenderingConfig()})
DocumentID: p.UniqueID(), Config: p.getRenderingConfig(), LinkResolver: fn, FileResolver: fileFn})
}
func (p *Page) renderContent(content []byte) []byte {
var fn helpers.LinkResolverFunc
var fileFn helpers.FileResolverFunc
if p.getRenderingConfig().SourceRelativeLinksEval {
fn = func(ref string) (string, error) {
return p.Node.Site.GitHub(ref, p)
}
fileFn = func(ref string) (string, error) {
return p.Node.Site.GitHubFileLink(ref, p)
}
}
return helpers.RenderBytesWithTOC(&helpers.RenderingContext{Content: content, PageFmt: p.guessMarkupType(),
DocumentID: p.UniqueID(), Config: p.getRenderingConfig()})
DocumentID: p.UniqueID(), Config: p.getRenderingConfig(), LinkResolver: fn, FileResolver: fileFn})
}
func (p *Page) getRenderingConfig() *helpers.Blackfriday {
View
@@ -100,7 +100,7 @@ type SiteInfo struct {
Social SiteSocial
Sections Taxonomy
Pages *Pages
Files []*source.File
Files *[]*source.File
Menus *Menus
Hugo *HugoInfo
Title string
@@ -217,6 +217,166 @@ func (s *SiteInfo) RelRef(ref string, page *Page) (string, error) {
return s.refLink(ref, page, true)
}
func (s *SiteInfo) GitHub(ref string, page *Page) (string, error) {
return s.githubLink(ref, page, true)
}
func (s *SiteInfo) githubLink(ref string, currentPage *Page, relative bool) (string, error) {
var refURL *url.URL
var err error
// TODO can I make this a param to `hugo --use-github-links=/docs`?
// SVEN: add more tests - the prefix might be a real dir inside tho - add some pages that have it as a legitimate path
repositoryPathPrefix := "/docs"
refURL, err = url.Parse(strings.TrimPrefix(ref, repositoryPathPrefix))
if err != nil {
return "", err
}
if refURL.Scheme != "" {
// TODO: consider looking for http(s?)://github.com/user/project/prefix and replacing it - tho this may be intentional, so idk
//return "", fmt.Errorf("Not a plain filepath link (%s)", ref)
// Treat this as not an error, as the link is used as-is
return ref, nil
}
var target *Page
var link string
if refURL.Path != "" {
refPath := filepath.Clean(filepath.FromSlash(refURL.Path))
if strings.IndexRune(refPath, os.PathSeparator) == 0 { // filepath.IsAbs fails to me.
refPath = refPath[1:]
} else {
if currentPage != nil {
refPath = filepath.Join(currentPage.Source.Dir(), refURL.Path)
}
}
for _, page := range []*Page(*s.Pages) {
if page.Source.Path() == refPath {
target = page
break
}
}
// need to exhaust the test, then try with the others :/
// if the refPath doesn't end in a filename with extension `.md`, then try with `.md` , and then `/index.md`
mdPath := strings.TrimSuffix(refPath, string(os.PathSeparator)) + ".md"
for _, page := range []*Page(*s.Pages) {
if page.Source.Path() == mdPath {
target = page
break
}
}
indexPath := filepath.Join(refPath, "index.md")
for _, page := range []*Page(*s.Pages) {
if page.Source.Path() == indexPath {
target = page
break
}
}
if target == nil {
return "", fmt.Errorf("No page found for \"%s\" on page \"%s\".\n", ref, currentPage.Source.Path())
}
// SVEN: look at filepath.Rel() it might help, got the rel/non-rel url's (dangerous tho)
if relative {
link, err = target.RelPermalink()
} else {
link, err = target.Permalink()
}
if err != nil {
return "", err
}
}
// SVEN: add tests for github style relative fragments
if refURL.Fragment != "" {
link = link + "#" + refURL.Fragment
if refURL.Path != "" && target != nil && !target.getRenderingConfig().PlainIDAnchors {
link = link + ":" + target.UniqueID()
} else if currentPage != nil && !currentPage.getRenderingConfig().PlainIDAnchors {
link = link + ":" + currentPage.UniqueID()
}
}
return link, nil
}
func (s *SiteInfo) GitHubFileLink(ref string, page *Page) (string, error) {
return s.githubFileLink(ref, page, false)
}
// for non-pages in the site tree
func (s *SiteInfo) githubFileLink(ref string, currentPage *Page, relative bool) (string, error) {
var refURL *url.URL
var err error
// TODO can I make this a param to `hugo --use-github-links=/docs`?
// SVEN: add more tests - the prefix might be a real dir inside tho - add some pages that have it as a legitimate path
repositoryPathPrefix := "/docs"
refURL, err = url.Parse(strings.TrimPrefix(ref, repositoryPathPrefix))
if err != nil {
return "", err
}
if refURL.Scheme != "" {
// TODO: consider looking for http(s?)://github.com/user/project/prefix and replacing it - tho this may be intentional, so idk
//return "", fmt.Errorf("Not a plain filepath link (%s)", ref)
// Treat this as not an error, as the link is used as-is
return ref, nil
}
var target *source.File
var link string
if refURL.Path != "" {
refPath := filepath.Clean(filepath.FromSlash(refURL.Path))
if strings.IndexRune(refPath, os.PathSeparator) == 0 { // filepath.IsAbs fails to me.
refPath = refPath[1:]
} else {
if currentPage != nil {
refPath = filepath.Join(currentPage.Source.Dir(), refURL.Path)
}
}
for _, file := range []*source.File(*s.Files) {
if file.Path() == refPath {
target = file
break
}
}
if target == nil {
return "", fmt.Errorf("No file found for \"%s\" on page \"%s\".\n", ref, currentPage.Source.Path())
}
link = target.Path()
// SVEN: look at filepath.Rel() it might help, got the rel/non-rel url's (dangerous tho)
// SVEN: reconsider the fact I hardcoded the `relative` bool in both github resolvers
if relative {
return "./" + filepath.ToSlash(link), nil
} else {
return "/" + filepath.ToSlash(link), nil
}
if err != nil {
return "", err
}
return link, nil
}
return "", fmt.Errorf("failed to find a file to match \"%s\" on page \"%s\"", ref, currentPage.Source.Path())
}
func (s *SiteInfo) addToPaginationPageCount(cnt uint64) {
atomic.AddUint64(&s.paginationPageCount, cnt)
}
@@ -479,6 +639,7 @@ func (s *Site) initializeSiteInfo() {
canonifyURLs: viper.GetBool("CanonifyURLs"),
preserveTaxonomyNames: viper.GetBool("PreserveTaxonomyNames"),
Pages: &s.Pages,
Files: &s.Files,
Menus: &s.Menus,
Params: params,
Permalinks: permalinks,
@@ -1395,6 +1556,7 @@ func (s *Site) Stats() {
jww.FEEDBACK.Println(s.draftStats())
jww.FEEDBACK.Println(s.futureStats())
jww.FEEDBACK.Printf("%d pages created\n", len(s.Pages))
jww.FEEDBACK.Printf("%d non-page files copied\n", len(s.Files))
jww.FEEDBACK.Printf("%d paginator pages created\n", s.Info.paginationPageCount)
taxonomies := viper.GetStringMapString("Taxonomies")
Oops, something went wrong.

0 comments on commit 0f6b334

Please sign in to comment.