Skip to content

Commit

Permalink
Add support for URLs relative to context root
Browse files Browse the repository at this point in the history
Setting `RelativeURLs` to `true` will make all relative URLs in the site *really* relative.

And will do so with speed.

So:

In `/post/myblogpost.html`:

`/mycss.css` becomes `../mycss.css`

The same in `/index.html` will become:

`./mycss.css` etc.

Note that absolute URLs will not be touched (either external resources, or URLs constructed with `BaseURL`).

The speediness is about the same as before:

```
benchmark                    old ns/op     new ns/op     delta
BenchmarkAbsURL              17462         18164         +4.02%
BenchmarkAbsURLSrcset        18842         19632         +4.19%
BenchmarkXMLAbsURLSrcset     18643         19313         +3.59%
BenchmarkXMLAbsURL           9283          9656          +4.02%

benchmark                    old allocs     new allocs     delta
BenchmarkAbsURL              24             28             +16.67%
BenchmarkAbsURLSrcset        29             32             +10.34%
BenchmarkXMLAbsURLSrcset     27             30             +11.11%
BenchmarkXMLAbsURL           12             14             +16.67%

benchmark                    old bytes     new bytes     delta
BenchmarkAbsURL              3154          3404          +7.93%
BenchmarkAbsURLSrcset        2376          2573          +8.29%
BenchmarkXMLAbsURLSrcset     2569          2763          +7.55%
BenchmarkXMLAbsURL           1888          1998          +5.83%

```

Fixes #1104
Fixes #622
Fixes #937
Fixes #157
  • Loading branch information
bep committed May 15, 2015
1 parent e522e5f commit beaa8b1
Show file tree
Hide file tree
Showing 10 changed files with 181 additions and 110 deletions.
1 change: 1 addition & 0 deletions commands/hugo.go
Original file line number Diff line number Diff line change
Expand Up @@ -139,6 +139,7 @@ func InitializeConfig() {
viper.SetDefault("Verbose", false)
viper.SetDefault("IgnoreCache", false)
viper.SetDefault("CanonifyURLs", false)
viper.SetDefault("RelativeURLs", false)
viper.SetDefault("Taxonomies", map[string]string{"tag": "tags", "category": "categories"})
viper.SetDefault("Permalinks", make(hugolib.PermalinkOverrides, 0))
viper.SetDefault("Sitemap", hugolib.Sitemap{Priority: -1})
Expand Down
35 changes: 35 additions & 0 deletions helpers/path.go
Original file line number Diff line number Diff line change
Expand Up @@ -232,6 +232,41 @@ func MakePathRelative(inPath string, possibleDirectories ...string) (string, err
return inPath, errors.New("Can't extract relative path, unknown prefix")
}

// Should be good enough for Hugo.
var isFileRe = regexp.MustCompile(".*\\..{1,6}$")

// Expects a relative path starting after the content directory.
func GetDottedRelativePath(inPath string) string {
inPath = filepath.Clean(filepath.FromSlash(inPath))
if inPath == "." {
return "./"
}
isFile := isFileRe.MatchString(inPath)
if !isFile {
if !strings.HasSuffix(inPath, FilePathSeparator) {
inPath += FilePathSeparator
}
}
if !strings.HasPrefix(inPath, FilePathSeparator) {
inPath = FilePathSeparator + inPath
}
dir, _ := filepath.Split(inPath)

sectionCount := strings.Count(dir, FilePathSeparator)

if sectionCount == 0 || dir == FilePathSeparator {
return "./"
}

var dottedPath string

for i := 1; i < sectionCount; i++ {
dottedPath += "../"
}

return dottedPath
}

// Filename takes a path, strips out the extension,
// and returns the name of the file.
func Filename(in string) (name string) {
Expand Down
39 changes: 39 additions & 0 deletions helpers/path_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -112,6 +112,45 @@ func TestMakePathRelative(t *testing.T) {
}
}

func TestGetDottedRelativePath(t *testing.T) {
// on Windows this will receive both kinds, both country and western ...
for _, f := range []func(string) string{filepath.FromSlash, func(s string) string { return s }} {
doTestGetDottedRelativePath(f, t)
}

}

func doTestGetDottedRelativePath(urlFixer func(string) string, t *testing.T) {
type test struct {
input, expected string
}
data := []test{
{"", "./"},
{urlFixer("/"), "./"},
{urlFixer("post"), "../"},
{urlFixer("/post"), "../"},
{urlFixer("post/"), "../"},
{urlFixer("tags/foo.html"), "../"},
{urlFixer("/tags/foo.html"), "../"},
{urlFixer("/post/"), "../"},
{urlFixer("////post/////"), "../"},
{urlFixer("/foo/bar/index.html"), "../../"},
{urlFixer("/foo/bar/foo/"), "../../../"},
{urlFixer("/foo/bar/foo"), "../../../"},
{urlFixer("foo/bar/foo/"), "../../../"},
{urlFixer("foo/bar/foo/bar"), "../../../../"},
{"404.html", "./"},
{"404.xml", "./"},
{"/404.html", "./"},
}
for i, d := range data {
output := GetDottedRelativePath(d.input)
if d.expected != output {
t.Errorf("Test %d failed. Expected %q got %q", i, d.expected, output)
}
}
}

func TestMakeTitle(t *testing.T) {
type test struct {
input, expected string
Expand Down
45 changes: 31 additions & 14 deletions hugolib/site.go
Original file line number Diff line number Diff line change
Expand Up @@ -1397,16 +1397,21 @@ func (s *Site) renderAndWriteXML(name string, dest string, d interface{}, layout

err := s.render(name, d, renderBuffer, layouts...)

absURLInXML, err := transform.AbsURLInXML()
if err != nil {
return err
}

outBuffer := bp.GetBuffer()
defer bp.PutBuffer(outBuffer)

transformer := transform.NewChain(absURLInXML...)
transformer.Apply(outBuffer, renderBuffer)
var path []byte
if viper.GetBool("RelativeURLs") {
path = []byte(helpers.GetDottedRelativePath(dest))
} else {
s := viper.GetString("BaseURL")
if !strings.HasSuffix(s, "/") {
s += "/"
}
path = []byte(s)
}
transformer := transform.NewChain(transform.AbsURLInXML)
transformer.Apply(outBuffer, renderBuffer, path)

if err == nil {
err = s.WriteDestFile(dest, outBuffer)
Expand All @@ -1426,20 +1431,32 @@ func (s *Site) renderAndWritePage(name string, dest string, d interface{}, layou

transformLinks := transform.NewEmptyTransforms()

if viper.GetBool("CanonifyURLs") {
absURL, err := transform.AbsURL()
if err != nil {
return err
}
transformLinks = append(transformLinks, absURL...)
if viper.GetBool("RelativeURLs") || viper.GetBool("CanonifyURLs") {
transformLinks = append(transformLinks, transform.AbsURL)
}

if viper.GetBool("watch") && !viper.GetBool("DisableLiveReload") {
transformLinks = append(transformLinks, transform.LiveReloadInject)
}

var path []byte

if viper.GetBool("RelativeURLs") {
translated, err := s.PageTarget().(target.OptionalTranslator).TranslateRelative(dest)
if err != nil {
return err
}
path = []byte(helpers.GetDottedRelativePath(translated))
} else if viper.GetBool("CanonifyURLs") {
s := viper.GetString("BaseURL")
if !strings.HasSuffix(s, "/") {
s += "/"
}
path = []byte(s)
}

transformer := transform.NewChain(transformLinks...)
transformer.Apply(outBuffer, renderBuffer)
transformer.Apply(outBuffer, renderBuffer, path)

if err == nil {
if err = s.WriteDestPage(dest, outBuffer); err != nil {
Expand Down
5 changes: 5 additions & 0 deletions target/file.go
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,11 @@ type Translator interface {
Translate(string) (string, error)
}

// TODO(bep) consider other ways to solve this.
type OptionalTranslator interface {
TranslateRelative(string) (string, error)
}

type Output interface {
Publisher
Translator
Expand Down
17 changes: 11 additions & 6 deletions target/page.go
Original file line number Diff line number Diff line change
Expand Up @@ -32,20 +32,25 @@ func (pp *PagePub) Publish(path string, r io.Reader) (err error) {
}

func (pp *PagePub) Translate(src string) (dest string, err error) {
dir, err := pp.TranslateRelative(src)
if err != nil {
return dir, err
}
if pp.PublishDir != "" {
dir = filepath.Join(pp.PublishDir, dir)
}
return dir, nil
}

func (pp *PagePub) TranslateRelative(src string) (dest string, err error) {
if src == helpers.FilePathSeparator {
if pp.PublishDir != "" {
return filepath.Join(pp.PublishDir, "index.html"), nil
}
return "index.html", nil
}

dir, file := filepath.Split(src)
isRoot := dir == ""
ext := pp.extension(filepath.Ext(file))
name := filename(file)
if pp.PublishDir != "" {
dir = filepath.Join(pp.PublishDir, dir)
}

if pp.UglyURLs || file == "index.html" || (isRoot && file == "404.html") {
return filepath.Join(dir, fmt.Sprintf("%s%s", name, ext)), nil
Expand Down
57 changes: 5 additions & 52 deletions transform/absurl.go
Original file line number Diff line number Diff line change
@@ -1,58 +1,11 @@
package transform

import (
"github.com/spf13/viper"
"sync"
)
var ar *absURLReplacer = newAbsURLReplacer()

// to be used in tests; the live site will get its value from Viper.
var AbsBaseUrl string

var absURLInit sync.Once
var ar *absURLReplacer

func AbsURL() (trs []link, err error) {
initAbsURLReplacer()
return absURLFromReplacer(ar)
}

func absURLFromURL(URL string) (trs []link, err error) {
return absURLFromReplacer(newAbsURLReplacer(URL))
}

func absURLFromReplacer(ar *absURLReplacer) (trs []link, err error) {
trs = append(trs, func(ct contentTransformer) {
ar.replaceInHTML(ct)
})
return
}

func AbsURLInXML() (trs []link, err error) {
initAbsURLReplacer()
return absURLInXMLFromReplacer(ar)
}

func absURLInXMLFromURL(URL string) (trs []link, err error) {
return absURLInXMLFromReplacer(newAbsURLReplacer(URL))
var AbsURL = func(ct contentTransformer) {
ar.replaceInHTML(ct)
}

func absURLInXMLFromReplacer(ar *absURLReplacer) (trs []link, err error) {
trs = append(trs, func(ct contentTransformer) {
ar.replaceInXML(ct)
})
return
}

func initAbsURLReplacer() {
absURLInit.Do(func() {
var url string

if AbsBaseUrl != "" {
url = AbsBaseUrl
} else {
url = viper.GetString("BaseURL")
}

ar = newAbsURLReplacer(url)
})
var AbsURLInXML = func(ct contentTransformer) {
ar.replaceInXML(ct)
}
29 changes: 14 additions & 15 deletions transform/absurlreplacer.go
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,6 @@ package transform
import (
"bytes"
"io"
"net/url"
"strings"
"unicode/utf8"
)

Expand All @@ -23,6 +21,9 @@ type absurllexer struct {
// the target for the new absurlified content
w io.Writer

// path may be set to a "." relative path
path []byte

pos int // input position
start int // item start position
width int // width of last element
Expand Down Expand Up @@ -54,9 +55,8 @@ var prefixes = []*prefix{
}

type absURLMatcher struct {
match []byte
quote []byte
replacementURL []byte
match []byte
quote []byte
}

// match check rune inside word. Will be != ' '.
Expand Down Expand Up @@ -147,7 +147,7 @@ func checkCandidateBase(l *absurllexer) {
}
l.pos += len(m.match)
l.w.Write(m.quote)
l.w.Write(m.replacementURL)
l.w.Write(l.path)
l.start = l.pos
}
}
Expand Down Expand Up @@ -188,7 +188,7 @@ func checkCandidateSrcset(l *absurllexer) {
l.w.Write([]byte(m.quote))
for i, f := range fields {
if f[0] == '/' {
l.w.Write(m.replacementURL)
l.w.Write(l.path)
l.w.Write(f[1:])

} else {
Expand Down Expand Up @@ -252,9 +252,11 @@ func (l *absurllexer) replace() {
}

func doReplace(ct contentTransformer, matchers []absURLMatcher) {

lexer := &absurllexer{
content: ct.Content(),
w: ct,
path: ct.Path(),
matchers: matchers}

lexer.replace()
Expand All @@ -265,9 +267,7 @@ type absURLReplacer struct {
xmlMatchers []absURLMatcher
}

func newAbsURLReplacer(baseURL string) *absURLReplacer {
u, _ := url.Parse(baseURL)
base := []byte(strings.TrimRight(u.String(), "/") + "/")
func newAbsURLReplacer() *absURLReplacer {

// HTML
dqHTMLMatch := []byte("\"/")
Expand All @@ -285,14 +285,13 @@ func newAbsURLReplacer(baseURL string) *absURLReplacer {

return &absURLReplacer{
htmlMatchers: []absURLMatcher{
{dqHTMLMatch, dqHTML, base},
{sqHTMLMatch, sqHTML, base},
{dqHTMLMatch, dqHTML},
{sqHTMLMatch, sqHTML},
},
xmlMatchers: []absURLMatcher{
{dqXMLMatch, dqXML, base},
{sqXMLMatch, sqXML, base},
{dqXMLMatch, dqXML},
{sqXMLMatch, sqXML},
}}

}

func (au *absURLReplacer) replaceInHTML(ct contentTransformer) {
Expand Down
Loading

0 comments on commit beaa8b1

Please sign in to comment.