summaryrefslogtreecommitdiff
path: root/caddyconfig/caddyfile
diff options
context:
space:
mode:
Diffstat (limited to 'caddyconfig/caddyfile')
-rw-r--r--caddyconfig/caddyfile/adapter.go93
-rwxr-xr-xcaddyconfig/caddyfile/dispenser.go333
-rwxr-xr-xcaddyconfig/caddyfile/dispenser_test.go316
-rwxr-xr-xcaddyconfig/caddyfile/lexer.go150
-rwxr-xr-xcaddyconfig/caddyfile/lexer_test.go196
-rwxr-xr-xcaddyconfig/caddyfile/parse.go492
-rwxr-xr-xcaddyconfig/caddyfile/parse_test.go718
-rwxr-xr-xcaddyconfig/caddyfile/testdata/import_glob0.txt6
-rwxr-xr-xcaddyconfig/caddyfile/testdata/import_glob1.txt4
-rwxr-xr-xcaddyconfig/caddyfile/testdata/import_glob2.txt3
-rwxr-xr-xcaddyconfig/caddyfile/testdata/import_test1.txt2
-rwxr-xr-xcaddyconfig/caddyfile/testdata/import_test2.txt4
12 files changed, 2317 insertions, 0 deletions
diff --git a/caddyconfig/caddyfile/adapter.go b/caddyconfig/caddyfile/adapter.go
new file mode 100644
index 0000000..ab4905a
--- /dev/null
+++ b/caddyconfig/caddyfile/adapter.go
@@ -0,0 +1,93 @@
+// Copyright 2015 Matthew Holt and The Caddy Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package caddyfile
+
+import (
+ "bytes"
+ "encoding/json"
+ "fmt"
+
+ "github.com/caddyserver/caddy/v2"
+ "github.com/caddyserver/caddy/v2/caddyconfig"
+)
+
+// Adapter adapts Caddyfile to Caddy JSON.
+type Adapter struct {
+ ServerType ServerType
+}
+
+// Adapt converts the Caddyfile config in body to Caddy JSON.
+func (a Adapter) Adapt(body []byte, options map[string]string) ([]byte, []caddyconfig.Warning, error) {
+ if a.ServerType == nil {
+ return nil, nil, fmt.Errorf("no server type")
+ }
+ if options == nil {
+ options = make(map[string]string)
+ }
+
+ directives := a.ServerType.ValidDirectives()
+
+ filename := options["filename"]
+ if filename == "" {
+ filename = "Caddyfile"
+ }
+
+ serverBlocks, err := Parse(filename, bytes.NewReader(body), directives)
+ if err != nil {
+ return nil, nil, err
+ }
+
+ cfg, warnings, err := a.ServerType.Setup(serverBlocks, options)
+ if err != nil {
+ return nil, warnings, err
+ }
+
+ marshalFunc := json.Marshal
+ if options["pretty"] == "true" {
+ marshalFunc = caddyconfig.JSONIndent
+ }
+ result, err := marshalFunc(cfg)
+
+ return result, warnings, err
+}
+
+// Unmarshaler is a type that can unmarshal
+// Caddyfile tokens to set itself up for a
+// JSON encoding. The goal of an unmarshaler
+// is not to set itself up for actual use,
+// but to set itself up for being marshaled
+// into JSON. Caddyfile-unmarshaled values
+// will not be used directly; they will be
+// encoded as JSON and then used from that.
+type Unmarshaler interface {
+ UnmarshalCaddyfile(d *Dispenser) error
+}
+
+// ServerType is a type that can evaluate a Caddyfile and set up a caddy config.
+type ServerType interface {
+ // ValidDirectives returns a list of the
+ // server type's recognized directives.
+ ValidDirectives() []string
+
+ // Setup takes the server blocks which
+ // contain tokens, as well as options
+ // (e.g. CLI flags) and creates a Caddy
+ // config, along with any warnings or
+ // an error.
+ Setup([]ServerBlock, map[string]string) (*caddy.Config, []caddyconfig.Warning, error)
+}
+
+// Interface guard
+var _ caddyconfig.Adapter = (*Adapter)(nil)
diff --git a/caddyconfig/caddyfile/dispenser.go b/caddyconfig/caddyfile/dispenser.go
new file mode 100755
index 0000000..1cf5d04
--- /dev/null
+++ b/caddyconfig/caddyfile/dispenser.go
@@ -0,0 +1,333 @@
+// Copyright 2015 Matthew Holt and The Caddy Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package caddyfile
+
+import (
+ "errors"
+ "fmt"
+ "strings"
+)
+
+// Dispenser is a type that dispenses tokens, similarly to a lexer,
+// except that it can do so with some notion of structure. An empty
+// Dispenser is invalid; call NewDispenser to make a proper instance.
+type Dispenser struct {
+ filename string
+ tokens []Token
+ cursor int
+ nesting int
+}
+
+// NewDispenser returns a Dispenser filled with the given tokens.
+func NewDispenser(filename string, tokens []Token) *Dispenser {
+ return &Dispenser{
+ filename: filename,
+ tokens: tokens,
+ cursor: -1,
+ }
+}
+
+// Next loads the next token. Returns true if a token
+// was loaded; false otherwise. If false, all tokens
+// have been consumed.
+func (d *Dispenser) Next() bool {
+ if d.cursor < len(d.tokens)-1 {
+ d.cursor++
+ return true
+ }
+ return false
+}
+
+// Prev moves to the previous token. It does the inverse
+// of Next(). Generally, this should only be used in
+// special cases such as deleting a token from the slice
+// that d is iterating. In that case, without using Prev(),
+// the dispenser would be pointing at the wrong token since
+// deleting a token implicitly advances the cursor.
+func (d *Dispenser) Prev() bool {
+ if d.cursor > 0 {
+ d.cursor--
+ return true
+ }
+ return false
+}
+
+// NextArg loads the next token if it is on the same
+// line and if it is not a block opening (open curly
+// brace). Returns true if an argument token was
+// loaded; false otherwise. If false, all tokens on
+// the line have been consumed except for potentially
+// a block opening. It handles imported tokens
+// correctly.
+func (d *Dispenser) NextArg() bool {
+ if !d.nextOnSameLine() {
+ return false
+ }
+ if d.Val() == "{" {
+ // roll back; a block opening is not an argument
+ d.cursor--
+ return false
+ }
+ return true
+}
+
+// nextOnSameLine advances the cursor if the next
+// token is on the same line of the same file.
+func (d *Dispenser) nextOnSameLine() bool {
+ if d.cursor < 0 {
+ d.cursor++
+ return true
+ }
+ if d.cursor >= len(d.tokens) {
+ return false
+ }
+ if d.cursor < len(d.tokens)-1 &&
+ d.tokens[d.cursor].File == d.tokens[d.cursor+1].File &&
+ d.tokens[d.cursor].Line+d.numLineBreaks(d.cursor) == d.tokens[d.cursor+1].Line {
+ d.cursor++
+ return true
+ }
+ return false
+}
+
+// NextLine loads the next token only if it is not on the same
+// line as the current token, and returns true if a token was
+// loaded; false otherwise. If false, there is not another token
+// or it is on the same line. It handles imported tokens correctly.
+func (d *Dispenser) NextLine() bool {
+ if d.cursor < 0 {
+ d.cursor++
+ return true
+ }
+ if d.cursor >= len(d.tokens) {
+ return false
+ }
+ if d.cursor < len(d.tokens)-1 &&
+ (d.tokens[d.cursor].File != d.tokens[d.cursor+1].File ||
+ d.tokens[d.cursor].Line+d.numLineBreaks(d.cursor) < d.tokens[d.cursor+1].Line) {
+ d.cursor++
+ return true
+ }
+ return false
+}
+
+// NextBlock can be used as the condition of a for loop
+// to load the next token as long as it opens a block or
+// is already in a block. It returns true if a token was
+// loaded, or false when the block's closing curly brace
+// was loaded and thus the block ended. Nested blocks are
+// not supported.
+func (d *Dispenser) NextBlock() bool {
+ if d.nesting > 0 {
+ d.Next()
+ if d.Val() == "}" {
+ d.nesting--
+ return false
+ }
+ return true
+ }
+ if !d.nextOnSameLine() { // block must open on same line
+ return false
+ }
+ if d.Val() != "{" {
+ d.cursor-- // roll back if not opening brace
+ return false
+ }
+ d.Next()
+ if d.Val() == "}" {
+ // open and then closed right away
+ return false
+ }
+ d.nesting++
+ return true
+}
+
+// Nested returns true if the token is currently nested
+// inside a block (i.e. an open curly brace was consumed).
+func (d *Dispenser) Nested() bool {
+ return d.nesting > 0
+}
+
+// Val gets the text of the current token. If there is no token
+// loaded, it returns empty string.
+func (d *Dispenser) Val() string {
+ if d.cursor < 0 || d.cursor >= len(d.tokens) {
+ return ""
+ }
+ return d.tokens[d.cursor].Text
+}
+
+// Line gets the line number of the current token. If there is no token
+// loaded, it returns 0.
+func (d *Dispenser) Line() int {
+ if d.cursor < 0 || d.cursor >= len(d.tokens) {
+ return 0
+ }
+ return d.tokens[d.cursor].Line
+}
+
+// File gets the filename of the current token. If there is no token loaded,
+// it returns the filename originally given when parsing started.
+func (d *Dispenser) File() string {
+ if d.cursor < 0 || d.cursor >= len(d.tokens) {
+ return d.filename
+ }
+ if tokenFilename := d.tokens[d.cursor].File; tokenFilename != "" {
+ return tokenFilename
+ }
+ return d.filename
+}
+
+// Args is a convenience function that loads the next arguments
+// (tokens on the same line) into an arbitrary number of strings
+// pointed to in targets. If there are fewer tokens available
+// than string pointers, the remaining strings will not be changed
+// and false will be returned. If there were enough tokens available
+// to fill the arguments, then true will be returned.
+func (d *Dispenser) Args(targets ...*string) bool {
+ for i := 0; i < len(targets); i++ {
+ if !d.NextArg() {
+ return false
+ }
+ *targets[i] = d.Val()
+ }
+ return true
+}
+
+// RemainingArgs loads any more arguments (tokens on the same line)
+// into a slice and returns them. Open curly brace tokens also indicate
+// the end of arguments, and the curly brace is not included in
+// the return value nor is it loaded.
+func (d *Dispenser) RemainingArgs() []string {
+ var args []string
+ for d.NextArg() {
+ args = append(args, d.Val())
+ }
+ return args
+}
+
+// NewFromNextTokens returns a new dispenser with a copy of
+// the tokens from the current token until the end of the
+// "directive" whether that be to the end of the line or
+// the end of a block that starts at the end of the line.
+func (d *Dispenser) NewFromNextTokens() *Dispenser {
+ var tkns []Token
+ tkns = append(tkns, d.Token())
+ for d.NextArg() {
+ tkns = append(tkns, d.Token())
+ }
+ if d.Next() && d.Val() == "{" {
+ tkns = append(tkns, d.Token())
+ for d.NextBlock() {
+ for d.Nested() {
+ tkns = append(tkns, d.Token())
+ d.NextBlock()
+ }
+ }
+ tkns = append(tkns, d.Token())
+ } else {
+ d.cursor--
+ }
+ return NewDispenser(d.filename, tkns)
+}
+
+// Token returns the current token.
+func (d *Dispenser) Token() Token {
+ if d.cursor < 0 || d.cursor >= len(d.tokens) {
+ return Token{}
+ }
+ return d.tokens[d.cursor]
+}
+
+// Cursor returns the current cursor (token index).
+func (d *Dispenser) Cursor() int {
+ return d.cursor
+}
+
+// ArgErr returns an argument error, meaning that another
+// argument was expected but not found. In other words,
+// a line break or open curly brace was encountered instead of
+// an argument.
+func (d *Dispenser) ArgErr() error {
+ if d.Val() == "{" {
+ return d.Err("Unexpected token '{', expecting argument")
+ }
+ return d.Errf("Wrong argument count or unexpected line ending after '%s'", d.Val())
+}
+
+// SyntaxErr creates a generic syntax error which explains what was
+// found and what was expected.
+func (d *Dispenser) SyntaxErr(expected string) error {
+ msg := fmt.Sprintf("%s:%d - Syntax error: Unexpected token '%s', expecting '%s'", d.File(), d.Line(), d.Val(), expected)
+ return errors.New(msg)
+}
+
+// EOFErr returns an error indicating that the dispenser reached
+// the end of the input when searching for the next token.
+func (d *Dispenser) EOFErr() error {
+ return d.Errf("Unexpected EOF")
+}
+
+// Err generates a custom parse-time error with a message of msg.
+func (d *Dispenser) Err(msg string) error {
+ msg = fmt.Sprintf("%s:%d - Error during parsing: %s", d.File(), d.Line(), msg)
+ return errors.New(msg)
+}
+
+// Errf is like Err, but for formatted error messages
+func (d *Dispenser) Errf(format string, args ...interface{}) error {
+ return d.Err(fmt.Sprintf(format, args...))
+}
+
+// Delete deletes the current token and returns the updated slice
+// of tokens. The cursor is not advanced to the next token.
+// Because deletion modifies the underlying slice, this method
+// should only be called if you have access to the original slice
+// of tokens and/or are using the slice of tokens outside this
+// Dispenser instance. If you do not re-assign the slice with the
+// return value of this method, inconsistencies in the token
+// array will become apparent (or worse, hide from you like they
+// did me for 3 and a half freaking hours late one night).
+func (d *Dispenser) Delete() []Token {
+ if d.cursor >= 0 && d.cursor < len(d.tokens)-1 {
+ d.tokens = append(d.tokens[:d.cursor], d.tokens[d.cursor+1:]...)
+ d.cursor--
+ }
+ return d.tokens
+}
+
+// numLineBreaks counts how many line breaks are in the token
+// value given by the token index tknIdx. It returns 0 if the
+// token does not exist or there are no line breaks.
+func (d *Dispenser) numLineBreaks(tknIdx int) int {
+ if tknIdx < 0 || tknIdx >= len(d.tokens) {
+ return 0
+ }
+ return strings.Count(d.tokens[tknIdx].Text, "\n")
+}
+
+// isNewLine determines whether the current token is on a different
+// line (higher line number) than the previous token. It handles imported
+// tokens correctly. If there isn't a previous token, it returns true.
+func (d *Dispenser) isNewLine() bool {
+ if d.cursor < 1 {
+ return true
+ }
+ if d.cursor > len(d.tokens)-1 {
+ return false
+ }
+ return d.tokens[d.cursor-1].File != d.tokens[d.cursor].File ||
+ d.tokens[d.cursor-1].Line+d.numLineBreaks(d.cursor-1) < d.tokens[d.cursor].Line
+}
diff --git a/caddyconfig/caddyfile/dispenser_test.go b/caddyconfig/caddyfile/dispenser_test.go
new file mode 100755
index 0000000..9860bed
--- /dev/null
+++ b/caddyconfig/caddyfile/dispenser_test.go
@@ -0,0 +1,316 @@
+// Copyright 2015 Matthew Holt and The Caddy Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package caddyfile
+
+import (
+ "io"
+ "log"
+ "reflect"
+ "strings"
+ "testing"
+)
+
+func TestDispenser_Val_Next(t *testing.T) {
+ input := `host:port
+ dir1 arg1
+ dir2 arg2 arg3
+ dir3`
+ d := newTestDispenser(input)
+
+ if val := d.Val(); val != "" {
+ t.Fatalf("Val(): Should return empty string when no token loaded; got '%s'", val)
+ }
+
+ assertNext := func(shouldLoad bool, expectedCursor int, expectedVal string) {
+ if loaded := d.Next(); loaded != shouldLoad {
+ t.Errorf("Next(): Expected %v but got %v instead (val '%s')", shouldLoad, loaded, d.Val())
+ }
+ if d.cursor != expectedCursor {
+ t.Errorf("Expected cursor to be %d, but was %d", expectedCursor, d.cursor)
+ }
+ if d.nesting != 0 {
+ t.Errorf("Nesting should be 0, was %d instead", d.nesting)
+ }
+ if val := d.Val(); val != expectedVal {
+ t.Errorf("Val(): Expected '%s' but got '%s'", expectedVal, val)
+ }
+ }
+
+ assertNext(true, 0, "host:port")
+ assertNext(true, 1, "dir1")
+ assertNext(true, 2, "arg1")
+ assertNext(true, 3, "dir2")
+ assertNext(true, 4, "arg2")
+ assertNext(true, 5, "arg3")
+ assertNext(true, 6, "dir3")
+ // Note: This next test simply asserts existing behavior.
+ // If desired, we may wish to empty the token value after
+ // reading past the EOF. Open an issue if you want this change.
+ assertNext(false, 6, "dir3")
+}
+
+func TestDispenser_NextArg(t *testing.T) {
+ input := `dir1 arg1
+ dir2 arg2 arg3
+ dir3`
+ d := newTestDispenser(input)
+
+ assertNext := func(shouldLoad bool, expectedVal string, expectedCursor int) {
+ if d.Next() != shouldLoad {
+ t.Errorf("Next(): Should load token but got false instead (val: '%s')", d.Val())
+ }
+ if d.cursor != expectedCursor {
+ t.Errorf("Next(): Expected cursor to be at %d, but it was %d", expectedCursor, d.cursor)
+ }
+ if val := d.Val(); val != expectedVal {
+ t.Errorf("Val(): Expected '%s' but got '%s'", expectedVal, val)
+ }
+ }
+
+ assertNextArg := func(expectedVal string, loadAnother bool, expectedCursor int) {
+ if !d.NextArg() {
+ t.Error("NextArg(): Should load next argument but got false instead")
+ }
+ if d.cursor != expectedCursor {
+ t.Errorf("NextArg(): Expected cursor to be at %d, but it was %d", expectedCursor, d.cursor)
+ }
+ if val := d.Val(); val != expectedVal {
+ t.Errorf("Val(): Expected '%s' but got '%s'", expectedVal, val)
+ }
+ if !loadAnother {
+ if d.NextArg() {
+ t.Fatalf("NextArg(): Should NOT load another argument, but got true instead (val: '%s')", d.Val())
+ }
+ if d.cursor != expectedCursor {
+ t.Errorf("NextArg(): Expected cursor to remain at %d, but it was %d", expectedCursor, d.cursor)
+ }
+ }
+ }
+
+ assertNext(true, "dir1", 0)
+ assertNextArg("arg1", false, 1)
+ assertNext(true, "dir2", 2)
+ assertNextArg("arg2", true, 3)
+ assertNextArg("arg3", false, 4)
+ assertNext(true, "dir3", 5)
+ assertNext(false, "dir3", 5)
+}
+
+func TestDispenser_NextLine(t *testing.T) {
+ input := `host:port
+ dir1 arg1
+ dir2 arg2 arg3`
+ d := newTestDispenser(input)
+
+ assertNextLine := func(shouldLoad bool, expectedVal string, expectedCursor int) {
+ if d.NextLine() != shouldLoad {
+ t.Errorf("NextLine(): Should load token but got false instead (val: '%s')", d.Val())
+ }
+ if d.cursor != expectedCursor {
+ t.Errorf("NextLine(): Expected cursor to be %d, instead was %d", expectedCursor, d.cursor)
+ }
+ if val := d.Val(); val != expectedVal {
+ t.Errorf("Val(): Expected '%s' but got '%s'", expectedVal, val)
+ }
+ }
+
+ assertNextLine(true, "host:port", 0)
+ assertNextLine(true, "dir1", 1)
+ assertNextLine(false, "dir1", 1)
+ d.Next() // arg1
+ assertNextLine(true, "dir2", 3)
+ assertNextLine(false, "dir2", 3)
+ d.Next() // arg2
+ assertNextLine(false, "arg2", 4)
+ d.Next() // arg3
+ assertNextLine(false, "arg3", 5)
+}
+
+func TestDispenser_NextBlock(t *testing.T) {
+ input := `foobar1 {
+ sub1 arg1
+ sub2
+ }
+ foobar2 {
+ }`
+ d := newTestDispenser(input)
+
+ assertNextBlock := func(shouldLoad bool, expectedCursor, expectedNesting int) {
+ if loaded := d.NextBlock(); loaded != shouldLoad {
+ t.Errorf("NextBlock(): Should return %v but got %v", shouldLoad, loaded)
+ }
+ if d.cursor != expectedCursor {
+ t.Errorf("NextBlock(): Expected cursor to be %d, was %d", expectedCursor, d.cursor)
+ }
+ if d.nesting != expectedNesting {
+ t.Errorf("NextBlock(): Nesting should be %d, not %d", expectedNesting, d.nesting)
+ }
+ }
+
+ assertNextBlock(false, -1, 0)
+ d.Next() // foobar1
+ assertNextBlock(true, 2, 1)
+ assertNextBlock(true, 3, 1)
+ assertNextBlock(true, 4, 1)
+ assertNextBlock(false, 5, 0)
+ d.Next() // foobar2
+ assertNextBlock(false, 8, 0) // empty block is as if it didn't exist
+}
+
+func TestDispenser_Args(t *testing.T) {
+ var s1, s2, s3 string
+ input := `dir1 arg1 arg2 arg3
+ dir2 arg4 arg5
+ dir3 arg6 arg7
+ dir4`
+ d := newTestDispenser(input)
+
+ d.Next() // dir1
+
+ // As many strings as arguments
+ if all := d.Args(&s1, &s2, &s3); !all {
+ t.Error("Args(): Expected true, got false")
+ }
+ if s1 != "arg1" {
+ t.Errorf("Args(): Expected s1 to be 'arg1', got '%s'", s1)
+ }
+ if s2 != "arg2" {
+ t.Errorf("Args(): Expected s2 to be 'arg2', got '%s'", s2)
+ }
+ if s3 != "arg3" {
+ t.Errorf("Args(): Expected s3 to be 'arg3', got '%s'", s3)
+ }
+
+ d.Next() // dir2
+
+ // More strings than arguments
+ if all := d.Args(&s1, &s2, &s3); all {
+ t.Error("Args(): Expected false, got true")
+ }
+ if s1 != "arg4" {
+ t.Errorf("Args(): Expected s1 to be 'arg4', got '%s'", s1)
+ }
+ if s2 != "arg5" {
+ t.Errorf("Args(): Expected s2 to be 'arg5', got '%s'", s2)
+ }
+ if s3 != "arg3" {
+ t.Errorf("Args(): Expected s3 to be unchanged ('arg3'), instead got '%s'", s3)
+ }
+
+ // (quick cursor check just for kicks and giggles)
+ if d.cursor != 6 {
+ t.Errorf("Cursor should be 6, but is %d", d.cursor)
+ }
+
+ d.Next() // dir3
+
+ // More arguments than strings
+ if all := d.Args(&s1); !all {
+ t.Error("Args(): Expected true, got false")
+ }
+ if s1 != "arg6" {
+ t.Errorf("Args(): Expected s1 to be 'arg6', got '%s'", s1)
+ }
+
+ d.Next() // dir4
+
+ // No arguments or strings
+ if all := d.Args(); !all {
+ t.Error("Args(): Expected true, got false")
+ }
+
+ // No arguments but at least one string
+ if all := d.Args(&s1); all {
+ t.Error("Args(): Expected false, got true")
+ }
+}
+
+func TestDispenser_RemainingArgs(t *testing.T) {
+ input := `dir1 arg1 arg2 arg3
+ dir2 arg4 arg5
+ dir3 arg6 { arg7
+ dir4`
+ d := newTestDispenser(input)
+
+ d.Next() // dir1
+
+ args := d.RemainingArgs()
+ if expected := []string{"arg1", "arg2", "arg3"}; !reflect.DeepEqual(args, expected) {
+ t.Errorf("RemainingArgs(): Expected %v, got %v", expected, args)
+ }
+
+ d.Next() // dir2
+
+ args = d.RemainingArgs()
+ if expected := []string{"arg4", "arg5"}; !reflect.DeepEqual(args, expected) {
+ t.Errorf("RemainingArgs(): Expected %v, got %v", expected, args)
+ }
+
+ d.Next() // dir3
+
+ args = d.RemainingArgs()
+ if expected := []string{"arg6"}; !reflect.DeepEqual(args, expected) {
+ t.Errorf("RemainingArgs(): Expected %v, got %v", expected, args)
+ }
+
+ d.Next() // {
+ d.Next() // arg7
+ d.Next() // dir4
+
+ args = d.RemainingArgs()
+ if len(args) != 0 {
+ t.Errorf("RemainingArgs(): Expected %v, got %v", []string{}, args)
+ }
+}
+
+func TestDispenser_ArgErr_Err(t *testing.T) {
+ input := `dir1 {
+ }
+ dir2 arg1 arg2`
+ d := newTestDispenser(input)
+
+ d.cursor = 1 // {
+
+ if err := d.ArgErr(); err == nil || !strings.Contains(err.Error(), "{") {
+ t.Errorf("ArgErr(): Expected an error message with { in it, but got '%v'", err)
+ }
+
+ d.cursor = 5 // arg2
+
+ if err := d.ArgErr(); err == nil || !strings.Contains(err.Error(), "arg2") {
+ t.Errorf("ArgErr(): Expected an error message with 'arg2' in it; got '%v'", err)
+ }
+
+ err := d.Err("foobar")
+ if err == nil {
+ t.Fatalf("Err(): Expected an error, got nil")
+ }
+
+ if !strings.Contains(err.Error(), "Testfile:3") {
+ t.Errorf("Expected error message with filename:line in it; got '%v'", err)
+ }
+
+ if !strings.Contains(err.Error(), "foobar") {
+ t.Errorf("Expected error message with custom message in it ('foobar'); got '%v'", err)
+ }
+}
+
+func newTestDispenser(input string) *Dispenser {
+ tokens, err := allTokens(strings.NewReader(input))
+ if err != nil && err != io.EOF {
+ log.Fatalf("getting all tokens from input: %v", err)
+ }
+ return NewDispenser("Testfile", tokens)
+}
diff --git a/caddyconfig/caddyfile/lexer.go b/caddyconfig/caddyfile/lexer.go
new file mode 100755
index 0000000..efe648d
--- /dev/null
+++ b/caddyconfig/caddyfile/lexer.go
@@ -0,0 +1,150 @@
+// Copyright 2015 Light Code Labs, LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package caddyfile
+
+import (
+ "bufio"
+ "io"
+ "unicode"
+)
+
+type (
+ // lexer is a utility which can get values, token by
+ // token, from a Reader. A token is a word, and tokens
+ // are separated by whitespace. A word can be enclosed
+ // in quotes if it contains whitespace.
+ lexer struct {
+ reader *bufio.Reader
+ token Token
+ line int
+ }
+
+ // Token represents a single parsable unit.
+ Token struct {
+ File string
+ Line int
+ Text string
+ }
+)
+
+// load prepares the lexer to scan an input for tokens.
+// It discards any leading byte order mark.
+func (l *lexer) load(input io.Reader) error {
+ l.reader = bufio.NewReader(input)
+ l.line = 1
+
+ // discard byte order mark, if present
+ firstCh, _, err := l.reader.ReadRune()
+ if err != nil {
+ return err
+ }
+ if firstCh != 0xFEFF {
+ err := l.reader.UnreadRune()
+ if err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
+
+// next loads the next token into the lexer.
+// A token is delimited by whitespace, unless
+// the token starts with a quotes character (")
+// in which case the token goes until the closing
+// quotes (the enclosing quotes are not included).
+// Inside quoted strings, quotes may be escaped
+// with a preceding \ character. No other chars
+// may be escaped. The rest of the line is skipped
+// if a "#" character is read in. Returns true if
+// a token was loaded; false otherwise.
+func (l *lexer) next() bool {
+ var val []rune
+ var comment, quoted, escaped bool
+
+ makeToken := func() bool {
+ l.token.Text = string(val)
+ return true
+ }
+
+ for {
+ ch, _, err := l.reader.ReadRune()
+ if err != nil {
+ if len(val) > 0 {
+ return makeToken()
+ }
+ if err == io.EOF {
+ return false
+ }
+ panic(err)
+ }
+
+ if quoted {
+ if !escaped {
+ if ch == '\\' {
+ escaped = true
+ continue
+ } else if ch == '"' {
+ quoted = false
+ return makeToken()
+ }
+ }
+ if ch == '\n' {
+ l.line++
+ }
+ if escaped {
+ // only escape quotes and newlines
+ if ch != '"' && ch != '\n' {
+ val = append(val, '\\')
+ }
+ }
+ val = append(val, ch)
+ escaped = false
+ continue
+ }
+
+ if unicode.IsSpace(ch) {
+ if ch == '\r' {
+ continue
+ }
+ if ch == '\n' {
+ l.line++
+ comment = false
+ }
+ if len(val) > 0 {
+ return makeToken()
+ }
+ continue
+ }
+
+ if ch == '#' {
+ comment = true
+ }
+
+ if comment {
+ continue
+ }
+
+ if len(val) == 0 {
+ l.token = Token{Line: l.line}
+ if ch == '"' {
+ quoted = true
+ continue
+ }
+ }
+
+ val = append(val, ch)
+ }
+}
diff --git a/caddyconfig/caddyfile/lexer_test.go b/caddyconfig/caddyfile/lexer_test.go
new file mode 100755
index 0000000..f9a843c
--- /dev/null
+++ b/caddyconfig/caddyfile/lexer_test.go
@@ -0,0 +1,196 @@
+// Copyright 2015 Light Code Labs, LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package caddyfile
+
+import (
+ "log"
+ "strings"
+ "testing"
+)
+
+type lexerTestCase struct {
+ input string
+ expected []Token
+}
+
+func TestLexer(t *testing.T) {
+ testCases := []lexerTestCase{
+ {
+ input: `host:123`,
+ expected: []Token{
+ {Line: 1, Text: "host:123"},
+ },
+ },
+ {
+ input: `host:123
+
+ directive`,
+ expected: []Token{
+ {Line: 1, Text: "host:123"},
+ {Line: 3, Text: "directive"},
+ },
+ },
+ {
+ input: `host:123 {
+ directive
+ }`,
+ expected: []Token{
+ {Line: 1, Text: "host:123"},
+ {Line: 1, Text: "{"},
+ {Line: 2, Text: "directive"},
+ {Line: 3, Text: "}"},
+ },
+ },
+ {
+ input: `host:123 { directive }`,
+ expected: []Token{
+ {Line: 1, Text: "host:123"},
+ {Line: 1, Text: "{"},
+ {Line: 1, Text: "directive"},
+ {Line: 1, Text: "}"},
+ },
+ },
+ {
+ input: `host:123 {
+ #comment
+ directive
+ # comment
+ foobar # another comment
+ }`,
+ expected: []Token{
+ {Line: 1, Text: "host:123"},
+ {Line: 1, Text: "{"},
+ {Line: 3, Text: "directive"},
+ {Line: 5, Text: "foobar"},
+ {Line: 6, Text: "}"},
+ },
+ },
+ {
+ input: `a "quoted value" b
+ foobar`,
+ expected: []Token{
+ {Line: 1, Text: "a"},
+ {Line: 1, Text: "quoted value"},
+ {Line: 1, Text: "b"},
+ {Line: 2, Text: "foobar"},
+ },
+ },
+ {
+ input: `A "quoted \"value\" inside" B`,
+ expected: []Token{
+ {Line: 1, Text: "A"},
+ {Line: 1, Text: `quoted "value" inside`},
+ {Line: 1, Text: "B"},
+ },
+ },
+ {
+ input: "A \"newline \\\ninside\" quotes",
+ expected: []Token{
+ {Line: 1, Text: "A"},
+ {Line: 1, Text: "newline \ninside"},
+ {Line: 2, Text: "quotes"},
+ },
+ },
+ {
+ input: `"don't\escape"`,
+ expected: []Token{
+ {Line: 1, Text: `don't\escape`},
+ },
+ },
+ {
+ input: `"don't\\escape"`,
+ expected: []Token{
+ {Line: 1, Text: `don't\\escape`},
+ },
+ },
+ {
+ input: `A "quoted value with line
+ break inside" {
+ foobar
+ }`,
+ expected: []Token{
+ {Line: 1, Text: "A"},
+ {Line: 1, Text: "quoted value with line\n\t\t\t\t\tbreak inside"},
+ {Line: 2, Text: "{"},
+ {Line: 3, Text: "foobar"},
+ {Line: 4, Text: "}"},
+ },
+ },
+ {
+ input: `"C:\php\php-cgi.exe"`,
+ expected: []Token{
+ {Line: 1, Text: `C:\php\php-cgi.exe`},
+ },
+ },
+ {
+ input: `empty "" string`,
+ expected: []Token{
+ {Line: 1, Text: `empty`},
+ {Line: 1, Text: ``},
+ {Line: 1, Text: `string`},
+ },
+ },
+ {
+ input: "skip those\r\nCR characters",
+ expected: []Token{
+ {Line: 1, Text: "skip"},
+ {Line: 1, Text: "those"},
+ {Line: 2, Text: "CR"},
+ {Line: 2, Text: "characters"},
+ },
+ },
+ {
+ input: "\xEF\xBB\xBF:8080", // test with leading byte order mark
+ expected: []Token{
+ {Line: 1, Text: ":8080"},
+ },
+ },
+ }
+
+ for i, testCase := range testCases {
+ actual := tokenize(testCase.input)
+ lexerCompare(t, i, testCase.expected, actual)
+ }
+}
+
+func tokenize(input string) (tokens []Token) {
+ l := lexer{}
+ if err := l.load(strings.NewReader(input)); err != nil {
+ log.Printf("[ERROR] load failed: %v", err)
+ }
+ for l.next() {
+ tokens = append(tokens, l.token)
+ }
+ return
+}
+
+func lexerCompare(t *testing.T, n int, expected, actual []Token) {
+ if len(expected) != len(actual) {
+ t.Errorf("Test case %d: expected %d token(s) but got %d", n, len(expected), len(actual))
+ }
+
+ for i := 0; i < len(actual) && i < len(expected); i++ {
+ if actual[i].Line != expected[i].Line {
+ t.Errorf("Test case %d token %d ('%s'): expected line %d but was line %d",
+ n, i, expected[i].Text, expected[i].Line, actual[i].Line)
+ break
+ }
+ if actual[i].Text != expected[i].Text {
+ t.Errorf("Test case %d token %d: expected text '%s' but was '%s'",
+ n, i, expected[i].Text, actual[i].Text)
+ break
+ }
+ }
+}
diff --git a/caddyconfig/caddyfile/parse.go b/caddyconfig/caddyfile/parse.go
new file mode 100755
index 0000000..cc91e3d
--- /dev/null
+++ b/caddyconfig/caddyfile/parse.go
@@ -0,0 +1,492 @@
+// Copyright 2015 Light Code Labs, LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package caddyfile
+
+import (
+ "io"
+ "log"
+ "os"
+ "path/filepath"
+ "strings"
+)
+
+// Parse parses the input just enough to group tokens, in
+// order, by server block. No further parsing is performed.
+// Server blocks are returned in the order in which they appear.
+// Directives that do not appear in validDirectives will cause
+// an error. If you do not want to check for valid directives,
+// pass in nil instead.
+func Parse(filename string, input io.Reader, validDirectives []string) ([]ServerBlock, error) {
+ tokens, err := allTokens(input)
+ if err != nil {
+ return nil, err
+ }
+ p := parser{Dispenser: NewDispenser(filename, tokens), validDirectives: validDirectives}
+ return p.parseAll()
+}
+
+// allTokens lexes the entire input, but does not parse it.
+// It returns all the tokens from the input, unstructured
+// and in order.
+func allTokens(input io.Reader) ([]Token, error) {
+ l := new(lexer)
+ err := l.load(input)
+ if err != nil {
+ return nil, err
+ }
+ var tokens []Token
+ for l.next() {
+ tokens = append(tokens, l.token)
+ }
+ return tokens, nil
+}
+
+type parser struct {
+ *Dispenser
+ block ServerBlock // current server block being parsed
+ validDirectives []string // a directive must be valid or it's an error
+ eof bool // if we encounter a valid EOF in a hard place
+ definedSnippets map[string][]Token
+}
+
+func (p *parser) parseAll() ([]ServerBlock, error) {
+ var blocks []ServerBlock
+
+ for p.Next() {
+ err := p.parseOne()
+ if err != nil {
+ return blocks, err
+ }
+ if len(p.block.Keys) > 0 {
+ blocks = append(blocks, p.block)
+ }
+ }
+
+ return blocks, nil
+}
+
+func (p *parser) parseOne() error {
+ p.block = ServerBlock{Tokens: make(map[string][]Token)}
+
+ return p.begin()
+}
+
+func (p *parser) begin() error {
+ if len(p.tokens) == 0 {
+ return nil
+ }
+
+ err := p.addresses()
+
+ if err != nil {
+ return err
+ }
+
+ if p.eof {
+ // this happens if the Caddyfile consists of only
+ // a line of addresses and nothing else
+ return nil
+ }
+
+ if ok, name := p.isSnippet(); ok {
+ if p.definedSnippets == nil {
+ p.definedSnippets = map[string][]Token{}
+ }
+ if _, found := p.definedSnippets[name]; found {
+ return p.Errf("redeclaration of previously declared snippet %s", name)
+ }
+ // consume all tokens til matched close brace
+ tokens, err := p.snippetTokens()
+ if err != nil {
+ return err
+ }
+ p.definedSnippets[name] = tokens
+ // empty block keys so we don't save this block as a real server.
+ p.block.Keys = nil
+ return nil
+ }
+
+ return p.blockContents()
+}
+
+func (p *parser) addresses() error {
+ var expectingAnother bool
+
+ for {
+ tkn := replaceEnvVars(p.Val())
+
+ // special case: import directive replaces tokens during parse-time
+ if tkn == "import" && p.isNewLine() {
+ err := p.doImport()
+ if err != nil {
+ return err
+ }
+ continue
+ }
+
+ // Open brace definitely indicates end of addresses
+ if tkn == "{" {
+ if expectingAnother {
+ return p.Errf("Expected another address but had '%s' - check for extra comma", tkn)
+ }
+ break
+ }
+
+ if tkn != "" { // empty token possible if user typed ""
+ // Trailing comma indicates another address will follow, which
+ // may possibly be on the next line
+ if tkn[len(tkn)-1] == ',' {
+ tkn = tkn[:len(tkn)-1]
+ expectingAnother = true
+ } else {
+ expectingAnother = false // but we may still see another one on this line
+ }
+
+ p.block.Keys = append(p.block.Keys, tkn)
+ }
+
+ // Advance token and possibly break out of loop or return error
+ hasNext := p.Next()
+ if expectingAnother && !hasNext {
+ return p.EOFErr()
+ }
+ if !hasNext {
+ p.eof = true
+ break // EOF
+ }
+ if !expectingAnother && p.isNewLine() {
+ break
+ }
+ }
+
+ return nil
+}
+
+func (p *parser) blockContents() error {
+ errOpenCurlyBrace := p.openCurlyBrace()
+ if errOpenCurlyBrace != nil {
+ // single-server configs don't need curly braces
+ p.cursor--
+ }
+
+ err := p.directives()
+ if err != nil {
+ return err
+ }
+
+ // Only look for close curly brace if there was an opening
+ if errOpenCurlyBrace == nil {
+ err = p.closeCurlyBrace()
+ if err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
+
+// directives parses through all the lines for directives
+// and it expects the next token to be the first
+// directive. It goes until EOF or closing curly brace
+// which ends the server block.
+func (p *parser) directives() error {
+ for p.Next() {
+ // end of server block
+ if p.Val() == "}" {
+ break
+ }
+
+ // special case: import directive replaces tokens during parse-time
+ if p.Val() == "import" {
+ err := p.doImport()
+ if err != nil {
+ return err
+ }
+ p.cursor-- // cursor is advanced when we continue, so roll back one more
+ continue
+ }
+
+ // normal case: parse a directive on this line
+ if err := p.directive(); err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+// doImport swaps out the import directive and its argument
+// (a total of 2 tokens) with the tokens in the specified file
+// or globbing pattern. When the function returns, the cursor
+// is on the token before where the import directive was. In
+// other words, call Next() to access the first token that was
+// imported.
+func (p *parser) doImport() error {
+ // syntax checks
+ if !p.NextArg() {
+ return p.ArgErr()
+ }
+ importPattern := replaceEnvVars(p.Val())
+ if importPattern == "" {
+ return p.Err("Import requires a non-empty filepath")
+ }
+ if p.NextArg() {
+ return p.Err("Import takes only one argument (glob pattern or file)")
+ }
+ // splice out the import directive and its argument (2 tokens total)
+ tokensBefore := p.tokens[:p.cursor-1]
+ tokensAfter := p.tokens[p.cursor+1:]
+ var importedTokens []Token
+
+ // first check snippets. That is a simple, non-recursive replacement
+ if p.definedSnippets != nil && p.definedSnippets[importPattern] != nil {
+ importedTokens = p.definedSnippets[importPattern]
+ } else {
+ // make path relative to the file of the _token_ being processed rather
+ // than current working directory (issue #867) and then use glob to get
+ // list of matching filenames
+ absFile, err := filepath.Abs(p.Dispenser.File())
+ if err != nil {
+ return p.Errf("Failed to get absolute path of file: %s: %v", p.Dispenser.filename, err)
+ }
+
+ var matches []string
+ var globPattern string
+ if !filepath.IsAbs(importPattern) {
+ globPattern = filepath.Join(filepath.Dir(absFile), importPattern)
+ } else {
+ globPattern = importPattern
+ }
+ if strings.Count(globPattern, "*") > 1 || strings.Count(globPattern, "?") > 1 ||
+ (strings.Contains(globPattern, "[") && strings.Contains(globPattern, "]")) {
+ // See issue #2096 - a pattern with many glob expansions can hang for too long
+ return p.Errf("Glob pattern may only contain one wildcard (*), but has others: %s", globPattern)
+ }
+ matches, err = filepath.Glob(globPattern)
+
+ if err != nil {
+ return p.Errf("Failed to use import pattern %s: %v", importPattern, err)
+ }
+ if len(matches) == 0 {
+ if strings.ContainsAny(globPattern, "*?[]") {
+ log.Printf("[WARNING] No files matching import glob pattern: %s", importPattern)
+ } else {
+ return p.Errf("File to import not found: %s", importPattern)
+ }
+ }
+
+ // collect all the imported tokens
+
+ for _, importFile := range matches {
+ newTokens, err := p.doSingleImport(importFile)
+ if err != nil {
+ return err
+ }
+ importedTokens = append(importedTokens, newTokens...)
+ }
+ }
+
+ // splice the imported tokens in the place of the import statement
+ // and rewind cursor so Next() will land on first imported token
+ p.tokens = append(tokensBefore, append(importedTokens, tokensAfter...)...)
+ p.cursor--
+
+ return nil
+}
+
+// doSingleImport lexes the individual file at importFile and returns
+// its tokens or an error, if any.
+func (p *parser) doSingleImport(importFile string) ([]Token, error) {
+ file, err := os.Open(importFile)
+ if err != nil {
+ return nil, p.Errf("Could not import %s: %v", importFile, err)
+ }
+ defer file.Close()
+
+ if info, err := file.Stat(); err != nil {
+ return nil, p.Errf("Could not import %s: %v", importFile, err)
+ } else if info.IsDir() {
+ return nil, p.Errf("Could not import %s: is a directory", importFile)
+ }
+
+ importedTokens, err := allTokens(file)
+ if err != nil {
+ return nil, p.Errf("Could not read tokens while importing %s: %v", importFile, err)
+ }
+
+ // Tack the file path onto these tokens so errors show the imported file's name
+ // (we use full, absolute path to avoid bugs: issue #1892)
+ filename, err := filepath.Abs(importFile)
+ if err != nil {
+ return nil, p.Errf("Failed to get absolute path of file: %s: %v", p.Dispenser.filename, err)
+ }
+ for i := 0; i < len(importedTokens); i++ {
+ importedTokens[i].File = filename
+ }
+
+ return importedTokens, nil
+}
+
+// directive collects tokens until the directive's scope
+// closes (either end of line or end of curly brace block).
+// It expects the currently-loaded token to be a directive
+// (or } that ends a server block). The collected tokens
+// are loaded into the current server block for later use
+// by directive setup functions.
+func (p *parser) directive() error {
+ dir := replaceEnvVars(p.Val())
+ nesting := 0
+
+ if !p.validDirective(dir) {
+ return p.Errf("Unknown directive '%s'", dir)
+ }
+
+ // The directive itself is appended as a relevant token
+ p.block.Tokens[dir] = append(p.block.Tokens[dir], p.tokens[p.cursor])
+
+ for p.Next() {
+ if p.Val() == "{" {
+ nesting++
+ } else if p.isNewLine() && nesting == 0 {
+ p.cursor-- // read too far
+ break
+ } else if p.Val() == "}" && nesting > 0 {
+ nesting--
+ } else if p.Val() == "}" && nesting == 0 {
+ return p.Err("Unexpected '}' because no matching opening brace")
+ } else if p.Val() == "import" && p.isNewLine() {
+ if err := p.doImport(); err != nil {
+ return err
+ }
+ p.cursor-- // cursor is advanced when we continue, so roll back one more
+ continue
+ }
+ p.tokens[p.cursor].Text = replaceEnvVars(p.tokens[p.cursor].Text)
+ p.block.Tokens[dir] = append(p.block.Tokens[dir], p.tokens[p.cursor])
+ }
+
+ if nesting > 0 {
+ return p.EOFErr()
+ }
+ return nil
+}
+
+// openCurlyBrace expects the current token to be an
+// opening curly brace. This acts like an assertion
+// because it returns an error if the token is not
+// a opening curly brace. It does NOT advance the token.
+func (p *parser) openCurlyBrace() error {
+ if p.Val() != "{" {
+ return p.SyntaxErr("{")
+ }
+ return nil
+}
+
+// closeCurlyBrace expects the current token to be
+// a closing curly brace. This acts like an assertion
+// because it returns an error if the token is not
+// a closing curly brace. It does NOT advance the token.
+func (p *parser) closeCurlyBrace() error {
+ if p.Val() != "}" {
+ return p.SyntaxErr("}")
+ }
+ return nil
+}
+
+// validDirective returns true if dir is in p.validDirectives.
+func (p *parser) validDirective(dir string) bool {
+ if p.validDirectives == nil {
+ return true
+ }
+ for _, d := range p.validDirectives {
+ if d == dir {
+ return true
+ }
+ }
+ return false
+}
+
+// replaceEnvVars replaces environment variables that appear in the token
+// and understands both the $UNIX and %WINDOWS% syntaxes.
+func replaceEnvVars(s string) string {
+ s = replaceEnvReferences(s, "{%", "%}")
+ s = replaceEnvReferences(s, "{$", "}")
+ return s
+}
+
+// replaceEnvReferences performs the actual replacement of env variables
+// in s, given the placeholder start and placeholder end strings.
+func replaceEnvReferences(s, refStart, refEnd string) string {
+ index := strings.Index(s, refStart)
+ for index != -1 {
+ endIndex := strings.Index(s[index:], refEnd)
+ if endIndex == -1 {
+ break
+ }
+
+ endIndex += index
+ if endIndex > index+len(refStart) {
+ ref := s[index : endIndex+len(refEnd)]
+ s = strings.Replace(s, ref, os.Getenv(ref[len(refStart):len(ref)-len(refEnd)]), -1)
+ } else {
+ return s
+ }
+ index = strings.Index(s, refStart)
+ }
+ return s
+}
+
+// ServerBlock associates any number of keys (usually addresses
+// of some sort) with tokens (grouped by directive name).
+type ServerBlock struct {
+ Keys []string
+ Tokens map[string][]Token
+}
+
+func (p *parser) isSnippet() (bool, string) {
+ keys := p.block.Keys
+ // A snippet block is a single key with parens. Nothing else qualifies.
+ if len(keys) == 1 && strings.HasPrefix(keys[0], "(") && strings.HasSuffix(keys[0], ")") {
+ return true, strings.TrimSuffix(keys[0][1:], ")")
+ }
+ return false, ""
+}
+
+// read and store everything in a block for later replay.
+func (p *parser) snippetTokens() ([]Token, error) {
+ // snippet must have curlies.
+ err := p.openCurlyBrace()
+ if err != nil {
+ return nil, err
+ }
+ count := 1
+ tokens := []Token{}
+ for p.Next() {
+ if p.Val() == "}" {
+ count--
+ if count == 0 {
+ break
+ }
+ }
+ if p.Val() == "{" {
+ count++
+ }
+ tokens = append(tokens, p.tokens[p.cursor])
+ }
+ // make sure we're matched up
+ if count != 0 {
+ return nil, p.SyntaxErr("}")
+ }
+ return tokens, nil
+}
diff --git a/caddyconfig/caddyfile/parse_test.go b/caddyconfig/caddyfile/parse_test.go
new file mode 100755
index 0000000..654c68d
--- /dev/null
+++ b/caddyconfig/caddyfile/parse_test.go
@@ -0,0 +1,718 @@
+// Copyright 2015 Light Code Labs, LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package caddyfile
+
+import (
+ "io/ioutil"
+ "os"
+ "path/filepath"
+ "strings"
+ "testing"
+)
+
+func TestAllTokens(t *testing.T) {
+ input := strings.NewReader("a b c\nd e")
+ expected := []string{"a", "b", "c", "d", "e"}
+ tokens, err := allTokens(input)
+
+ if err != nil {
+ t.Fatalf("Expected no error, got %v", err)
+ }
+ if len(tokens) != len(expected) {
+ t.Fatalf("Expected %d tokens, got %d", len(expected), len(tokens))
+ }
+
+ for i, val := range expected {
+ if tokens[i].Text != val {
+ t.Errorf("Token %d should be '%s' but was '%s'", i, val, tokens[i].Text)
+ }
+ }
+}
+
+func TestParseOneAndImport(t *testing.T) {
+ testParseOne := func(input string) (ServerBlock, error) {
+ p := testParser(input)
+ p.Next() // parseOne doesn't call Next() to start, so we must
+ err := p.parseOne()
+ return p.block, err
+ }
+
+ for i, test := range []struct {
+ input string
+ shouldErr bool
+ keys []string
+ tokens map[string]int // map of directive name to number of tokens expected
+ }{
+ {`localhost`, false, []string{
+ "localhost",
+ }, map[string]int{}},
+
+ {`localhost
+ dir1`, false, []string{
+ "localhost",
+ }, map[string]int{
+ "dir1": 1,
+ }},
+
+ {`localhost:1234
+ dir1 foo bar`, false, []string{
+ "localhost:1234",
+ }, map[string]int{
+ "dir1": 3,
+ }},
+
+ {`localhost {
+ dir1
+ }`, false, []string{
+ "localhost",
+ }, map[string]int{
+ "dir1": 1,
+ }},
+
+ {`localhost:1234 {
+ dir1 foo bar
+ dir2
+ }`, false, []string{
+ "localhost:1234",
+ }, map[string]int{
+ "dir1": 3,
+ "dir2": 1,
+ }},
+
+ {`http://localhost https://localhost
+ dir1 foo bar`, false, []string{
+ "http://localhost",
+ "https://localhost",
+ }, map[string]int{
+ "dir1": 3,
+ }},
+
+ {`http://localhost https://localhost {
+ dir1 foo bar
+ }`, false, []string{
+ "http://localhost",
+ "https://localhost",
+ }, map[string]int{
+ "dir1": 3,
+ }},
+
+ {`http://localhost, https://localhost {
+ dir1 foo bar
+ }`, false, []string{
+ "http://localhost",
+ "https://localhost",
+ }, map[string]int{
+ "dir1": 3,
+ }},
+
+ {`http://localhost, {
+ }`, true, []string{
+ "http://localhost",
+ }, map[string]int{}},
+
+ {`host1:80, http://host2.com
+ dir1 foo bar
+ dir2 baz`, false, []string{
+ "host1:80",
+ "http://host2.com",
+ }, map[string]int{
+ "dir1": 3,
+ "dir2": 2,
+ }},
+
+ {`http://host1.com,
+ http://host2.com,
+ https://host3.com`, false, []string{
+ "http://host1.com",
+ "http://host2.com",
+ "https://host3.com",
+ }, map[string]int{}},
+
+ {`http://host1.com:1234, https://host2.com
+ dir1 foo {
+ bar baz
+ }
+ dir2`, false, []string{
+ "http://host1.com:1234",
+ "https://host2.com",
+ }, map[string]int{
+ "dir1": 6,
+ "dir2": 1,
+ }},
+
+ {`127.0.0.1
+ dir1 {
+ bar baz
+ }
+ dir2 {
+ foo bar
+ }`, false, []string{
+ "127.0.0.1",
+ }, map[string]int{
+ "dir1": 5,
+ "dir2": 5,
+ }},
+
+ {`localhost
+ dir1 {
+ foo`, true, []string{
+ "localhost",
+ }, map[string]int{
+ "dir1": 3,
+ }},
+
+ {`localhost
+ dir1 {
+ }`, false, []string{
+ "localhost",
+ }, map[string]int{
+ "dir1": 3,
+ }},
+
+ {`localhost
+ dir1 {
+ } }`, true, []string{
+ "localhost",
+ }, map[string]int{
+ "dir1": 3,
+ }},
+
+ {`localhost
+ dir1 {
+ nested {
+ foo
+ }
+ }
+ dir2 foo bar`, false, []string{
+ "localhost",
+ }, map[string]int{
+ "dir1": 7,
+ "dir2": 3,
+ }},
+
+ {``, false, []string{}, map[string]int{}},
+
+ {`localhost
+ dir1 arg1
+ import testdata/import_test1.txt`, false, []string{
+ "localhost",
+ }, map[string]int{
+ "dir1": 2,
+ "dir2": 3,
+ "dir3": 1,
+ }},
+
+ {`import testdata/import_test2.txt`, false, []string{
+ "host1",
+ }, map[string]int{
+ "dir1": 1,
+ "dir2": 2,
+ }},
+
+ {`import testdata/import_test1.txt testdata/import_test2.txt`, true, []string{}, map[string]int{}},
+
+ {`import testdata/not_found.txt`, true, []string{}, map[string]int{}},
+
+ {`""`, false, []string{}, map[string]int{}},
+
+ {``, false, []string{}, map[string]int{}},
+
+ // test cases found by fuzzing!
+ {`import }{$"`, true, []string{}, map[string]int{}},
+ {`import /*/*.txt`, true, []string{}, map[string]int{}},
+ {`import /???/?*?o`, true, []string{}, map[string]int{}},
+ {`import /??`, true, []string{}, map[string]int{}},
+ {`import /[a-z]`, true, []string{}, map[string]int{}},
+ {`import {$}`, true, []string{}, map[string]int{}},
+ {`import {%}`, true, []string{}, map[string]int{}},
+ {`import {$$}`, true, []string{}, map[string]int{}},
+ {`import {%%}`, true, []string{}, map[string]int{}},
+ } {
+ result, err := testParseOne(test.input)
+
+ if test.shouldErr && err == nil {
+ t.Errorf("Test %d: Expected an error, but didn't get one", i)
+ }
+ if !test.shouldErr && err != nil {
+ t.Errorf("Test %d: Expected no error, but got: %v", i, err)
+ }
+
+ if len(result.Keys) != len(test.keys) {
+ t.Errorf("Test %d: Expected %d keys, got %d",
+ i, len(test.keys), len(result.Keys))
+ continue
+ }
+ for j, addr := range result.Keys {
+ if addr != test.keys[j] {
+ t.Errorf("Test %d, key %d: Expected '%s', but was '%s'",
+ i, j, test.keys[j], addr)
+ }
+ }
+
+ if len(result.Tokens) != len(test.tokens) {
+ t.Errorf("Test %d: Expected %d directives, had %d",
+ i, len(test.tokens), len(result.Tokens))
+ continue
+ }
+ for directive, tokens := range result.Tokens {
+ if len(tokens) != test.tokens[directive] {
+ t.Errorf("Test %d, directive '%s': Expected %d tokens, counted %d",
+ i, directive, test.tokens[directive], len(tokens))
+ continue
+ }
+ }
+ }
+}
+
+func TestRecursiveImport(t *testing.T) {
+ testParseOne := func(input string) (ServerBlock, error) {
+ p := testParser(input)
+ p.Next() // parseOne doesn't call Next() to start, so we must
+ err := p.parseOne()
+ return p.block, err
+ }
+
+ isExpected := func(got ServerBlock) bool {
+ if len(got.Keys) != 1 || got.Keys[0] != "localhost" {
+ t.Errorf("got keys unexpected: expect localhost, got %v", got.Keys)
+ return false
+ }
+ if len(got.Tokens) != 2 {
+ t.Errorf("got wrong number of tokens: expect 2, got %d", len(got.Tokens))
+ return false
+ }
+ if len(got.Tokens["dir1"]) != 1 || len(got.Tokens["dir2"]) != 2 {
+ t.Errorf("got unexpect tokens: %v", got.Tokens)
+ return false
+ }
+ return true
+ }
+
+ recursiveFile1, err := filepath.Abs("testdata/recursive_import_test1")
+ if err != nil {
+ t.Fatal(err)
+ }
+ recursiveFile2, err := filepath.Abs("testdata/recursive_import_test2")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ // test relative recursive import
+ err = ioutil.WriteFile(recursiveFile1, []byte(
+ `localhost
+ dir1
+ import recursive_import_test2`), 0644)
+ if err != nil {
+ t.Fatal(err)
+ }
+ defer os.Remove(recursiveFile1)
+
+ err = ioutil.WriteFile(recursiveFile2, []byte("dir2 1"), 0644)
+ if err != nil {
+ t.Fatal(err)
+ }
+ defer os.Remove(recursiveFile2)
+
+ // import absolute path
+ result, err := testParseOne("import " + recursiveFile1)
+ if err != nil {
+ t.Fatal(err)
+ }
+ if !isExpected(result) {
+ t.Error("absolute+relative import failed")
+ }
+
+ // import relative path
+ result, err = testParseOne("import testdata/recursive_import_test1")
+ if err != nil {
+ t.Fatal(err)
+ }
+ if !isExpected(result) {
+ t.Error("relative+relative import failed")
+ }
+
+ // test absolute recursive import
+ err = ioutil.WriteFile(recursiveFile1, []byte(
+ `localhost
+ dir1
+ import `+recursiveFile2), 0644)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ // import absolute path
+ result, err = testParseOne("import " + recursiveFile1)
+ if err != nil {
+ t.Fatal(err)
+ }
+ if !isExpected(result) {
+ t.Error("absolute+absolute import failed")
+ }
+
+ // import relative path
+ result, err = testParseOne("import testdata/recursive_import_test1")
+ if err != nil {
+ t.Fatal(err)
+ }
+ if !isExpected(result) {
+ t.Error("relative+absolute import failed")
+ }
+}
+
+func TestDirectiveImport(t *testing.T) {
+ testParseOne := func(input string) (ServerBlock, error) {
+ p := testParser(input)
+ p.Next() // parseOne doesn't call Next() to start, so we must
+ err := p.parseOne()
+ return p.block, err
+ }
+
+ isExpected := func(got ServerBlock) bool {
+ if len(got.Keys) != 1 || got.Keys[0] != "localhost" {
+ t.Errorf("got keys unexpected: expect localhost, got %v", got.Keys)
+ return false
+ }
+ if len(got.Tokens) != 2 {
+ t.Errorf("got wrong number of tokens: expect 2, got %d", len(got.Tokens))
+ return false
+ }
+ if len(got.Tokens["dir1"]) != 1 || len(got.Tokens["proxy"]) != 8 {
+ t.Errorf("got unexpect tokens: %v", got.Tokens)
+ return false
+ }
+ return true
+ }
+
+ directiveFile, err := filepath.Abs("testdata/directive_import_test")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ err = ioutil.WriteFile(directiveFile, []byte(`prop1 1
+ prop2 2`), 0644)
+ if err != nil {
+ t.Fatal(err)
+ }
+ defer os.Remove(directiveFile)
+
+ // import from existing file
+ result, err := testParseOne(`localhost
+ dir1
+ proxy {
+ import testdata/directive_import_test
+ transparent
+ }`)
+ if err != nil {
+ t.Fatal(err)
+ }
+ if !isExpected(result) {
+ t.Error("directive import failed")
+ }
+
+ // import from nonexistent file
+ _, err = testParseOne(`localhost
+ dir1
+ proxy {
+ import testdata/nonexistent_file
+ transparent
+ }`)
+ if err == nil {
+ t.Fatal("expected error when importing a nonexistent file")
+ }
+}
+
+func TestParseAll(t *testing.T) {
+ for i, test := range []struct {
+ input string
+ shouldErr bool
+ keys [][]string // keys per server block, in order
+ }{
+ {`localhost`, false, [][]string{
+ {"localhost"},
+ }},
+
+ {`localhost:1234`, false, [][]string{
+ {"localhost:1234"},
+ }},
+
+ {`localhost:1234 {
+ }
+ localhost:2015 {
+ }`, false, [][]string{
+ {"localhost:1234"},
+ {"localhost:2015"},
+ }},
+
+ {`localhost:1234, http://host2`, false, [][]string{
+ {"localhost:1234", "http://host2"},
+ }},
+
+ {`localhost:1234, http://host2,`, true, [][]string{}},
+
+ {`http://host1.com, http://host2.com {
+ }
+ https://host3.com, https://host4.com {
+ }`, false, [][]string{
+ {"http://host1.com", "http://host2.com"},
+ {"https://host3.com", "https://host4.com"},
+ }},
+
+ {`import testdata/import_glob*.txt`, false, [][]string{
+ {"glob0.host0"},
+ {"glob0.host1"},
+ {"glob1.host0"},
+ {"glob2.host0"},
+ }},
+
+ {`import notfound/*`, false, [][]string{}}, // glob needn't error with no matches
+ {`import notfound/file.conf`, true, [][]string{}}, // but a specific file should
+ } {
+ p := testParser(test.input)
+ blocks, err := p.parseAll()
+
+ if test.shouldErr && err == nil {
+ t.Errorf("Test %d: Expected an error, but didn't get one", i)
+ }
+ if !test.shouldErr && err != nil {
+ t.Errorf("Test %d: Expected no error, but got: %v", i, err)
+ }
+
+ if len(blocks) != len(test.keys) {
+ t.Errorf("Test %d: Expected %d server blocks, got %d",
+ i, len(test.keys), len(blocks))
+ continue
+ }
+ for j, block := range blocks {
+ if len(block.Keys) != len(test.keys[j]) {
+ t.Errorf("Test %d: Expected %d keys in block %d, got %d",
+ i, len(test.keys[j]), j, len(block.Keys))
+ continue
+ }
+ for k, addr := range block.Keys {
+ if addr != test.keys[j][k] {
+ t.Errorf("Test %d, block %d, key %d: Expected '%s', but got '%s'",
+ i, j, k, test.keys[j][k], addr)
+ }
+ }
+ }
+ }
+}
+
+func TestEnvironmentReplacement(t *testing.T) {
+ os.Setenv("PORT", "8080")
+ os.Setenv("ADDRESS", "servername.com")
+ os.Setenv("FOOBAR", "foobar")
+ os.Setenv("PARTIAL_DIR", "r1")
+
+ // basic test; unix-style env vars
+ p := testParser(`{$ADDRESS}`)
+ blocks, _ := p.parseAll()
+ if actual, expected := blocks[0].Keys[0], "servername.com"; expected != actual {
+ t.Errorf("Expected key to be '%s' but was '%s'", expected, actual)
+ }
+
+ // basic test; unix-style env vars
+ p = testParser(`di{$PARTIAL_DIR}`)
+ blocks, _ = p.parseAll()
+ if actual, expected := blocks[0].Keys[0], "dir1"; expected != actual {
+ t.Errorf("Expected key to be '%s' but was '%s'", expected, actual)
+ }
+
+ // multiple vars per token
+ p = testParser(`{$ADDRESS}:{$PORT}`)
+ blocks, _ = p.parseAll()
+ if actual, expected := blocks[0].Keys[0], "servername.com:8080"; expected != actual {
+ t.Errorf("Expected key to be '%s' but was '%s'", expected, actual)
+ }
+
+ // windows-style var and unix style in same token
+ p = testParser(`{%ADDRESS%}:{$PORT}`)
+ blocks, _ = p.parseAll()
+ if actual, expected := blocks[0].Keys[0], "servername.com:8080"; expected != actual {
+ t.Errorf("Expected key to be '%s' but was '%s'", expected, actual)
+ }
+
+ // reverse order
+ p = testParser(`{$ADDRESS}:{%PORT%}`)
+ blocks, _ = p.parseAll()
+ if actual, expected := blocks[0].Keys[0], "servername.com:8080"; expected != actual {
+ t.Errorf("Expected key to be '%s' but was '%s'", expected, actual)
+ }
+
+ // env var in server block body as argument
+ p = testParser(":{%PORT%}\ndir1 {$FOOBAR}")
+ blocks, _ = p.parseAll()
+ if actual, expected := blocks[0].Keys[0], ":8080"; expected != actual {
+ t.Errorf("Expected key to be '%s' but was '%s'", expected, actual)
+ }
+ if actual, expected := blocks[0].Tokens["dir1"][1].Text, "foobar"; expected != actual {
+ t.Errorf("Expected argument to be '%s' but was '%s'", expected, actual)
+ }
+
+ // combined windows env vars in argument
+ p = testParser(":{%PORT%}\ndir1 {%ADDRESS%}/{%FOOBAR%}")
+ blocks, _ = p.parseAll()
+ if actual, expected := blocks[0].Tokens["dir1"][1].Text, "servername.com/foobar"; expected != actual {
+ t.Errorf("Expected argument to be '%s' but was '%s'", expected, actual)
+ }
+
+ // malformed env var (windows)
+ p = testParser(":1234\ndir1 {%ADDRESS}")
+ blocks, _ = p.parseAll()
+ if actual, expected := blocks[0].Tokens["dir1"][1].Text, "{%ADDRESS}"; expected != actual {
+ t.Errorf("Expected host to be '%s' but was '%s'", expected, actual)
+ }
+
+ // malformed (non-existent) env var (unix)
+ p = testParser(`:{$PORT$}`)
+ blocks, _ = p.parseAll()
+ if actual, expected := blocks[0].Keys[0], ":"; expected != actual {
+ t.Errorf("Expected key to be '%s' but was '%s'", expected, actual)
+ }
+
+ // in quoted field
+ p = testParser(":1234\ndir1 \"Test {$FOOBAR} test\"")
+ blocks, _ = p.parseAll()
+ if actual, expected := blocks[0].Tokens["dir1"][1].Text, "Test foobar test"; expected != actual {
+ t.Errorf("Expected argument to be '%s' but was '%s'", expected, actual)
+ }
+
+ // after end token
+ p = testParser(":1234\nanswer \"{{ .Name }} {$FOOBAR}\"")
+ blocks, _ = p.parseAll()
+ if actual, expected := blocks[0].Tokens["answer"][1].Text, "{{ .Name }} foobar"; expected != actual {
+ t.Errorf("Expected argument to be '%s' but was '%s'", expected, actual)
+ }
+}
+
+func testParser(input string) parser {
+ return parser{Dispenser: newTestDispenser(input)}
+}
+
+func TestSnippets(t *testing.T) {
+ p := testParser(`
+ (common) {
+ gzip foo
+ errors stderr
+ }
+ http://example.com {
+ import common
+ }
+ `)
+ blocks, err := p.parseAll()
+ if err != nil {
+ t.Fatal(err)
+ }
+ for _, b := range blocks {
+ t.Log(b.Keys)
+ t.Log(b.Tokens)
+ }
+ if len(blocks) != 1 {
+ t.Fatalf("Expect exactly one server block. Got %d.", len(blocks))
+ }
+ if actual, expected := blocks[0].Keys[0], "http://example.com"; expected != actual {
+ t.Errorf("Expected server name to be '%s' but was '%s'", expected, actual)
+ }
+ if len(blocks[0].Tokens) != 2 {
+ t.Fatalf("Server block should have tokens from import")
+ }
+ if actual, expected := blocks[0].Tokens["gzip"][0].Text, "gzip"; expected != actual {
+ t.Errorf("Expected argument to be '%s' but was '%s'", expected, actual)
+ }
+ if actual, expected := blocks[0].Tokens["errors"][1].Text, "stderr"; expected != actual {
+ t.Errorf("Expected argument to be '%s' but was '%s'", expected, actual)
+ }
+
+}
+
+func writeStringToTempFileOrDie(t *testing.T, str string) (pathToFile string) {
+ file, err := ioutil.TempFile("", t.Name())
+ if err != nil {
+ panic(err) // get a stack trace so we know where this was called from.
+ }
+ if _, err := file.WriteString(str); err != nil {
+ panic(err)
+ }
+ if err := file.Close(); err != nil {
+ panic(err)
+ }
+ return file.Name()
+}
+
+func TestImportedFilesIgnoreNonDirectiveImportTokens(t *testing.T) {
+ fileName := writeStringToTempFileOrDie(t, `
+ http://example.com {
+ # This isn't an import directive, it's just an arg with value 'import'
+ basicauth / import password
+ }
+ `)
+ // Parse the root file that imports the other one.
+ p := testParser(`import ` + fileName)
+ blocks, err := p.parseAll()
+ if err != nil {
+ t.Fatal(err)
+ }
+ for _, b := range blocks {
+ t.Log(b.Keys)
+ t.Log(b.Tokens)
+ }
+ auth := blocks[0].Tokens["basicauth"]
+ line := auth[0].Text + " " + auth[1].Text + " " + auth[2].Text + " " + auth[3].Text
+ if line != "basicauth / import password" {
+ // Previously, it would be changed to:
+ // basicauth / import /path/to/test/dir/password
+ // referencing a file that (probably) doesn't exist and changing the
+ // password!
+ t.Errorf("Expected basicauth tokens to be 'basicauth / import password' but got %#q", line)
+ }
+}
+
+func TestSnippetAcrossMultipleFiles(t *testing.T) {
+ // Make the derived Caddyfile that expects (common) to be defined.
+ fileName := writeStringToTempFileOrDie(t, `
+ http://example.com {
+ import common
+ }
+ `)
+
+ // Parse the root file that defines (common) and then imports the other one.
+ p := testParser(`
+ (common) {
+ gzip foo
+ }
+ import ` + fileName + `
+ `)
+
+ blocks, err := p.parseAll()
+ if err != nil {
+ t.Fatal(err)
+ }
+ for _, b := range blocks {
+ t.Log(b.Keys)
+ t.Log(b.Tokens)
+ }
+ if len(blocks) != 1 {
+ t.Fatalf("Expect exactly one server block. Got %d.", len(blocks))
+ }
+ if actual, expected := blocks[0].Keys[0], "http://example.com"; expected != actual {
+ t.Errorf("Expected server name to be '%s' but was '%s'", expected, actual)
+ }
+ if len(blocks[0].Tokens) != 1 {
+ t.Fatalf("Server block should have tokens from import")
+ }
+ if actual, expected := blocks[0].Tokens["gzip"][0].Text, "gzip"; expected != actual {
+ t.Errorf("Expected argument to be '%s' but was '%s'", expected, actual)
+ }
+}
diff --git a/caddyconfig/caddyfile/testdata/import_glob0.txt b/caddyconfig/caddyfile/testdata/import_glob0.txt
new file mode 100755
index 0000000..e610b5e
--- /dev/null
+++ b/caddyconfig/caddyfile/testdata/import_glob0.txt
@@ -0,0 +1,6 @@
+glob0.host0 {
+ dir2 arg1
+}
+
+glob0.host1 {
+}
diff --git a/caddyconfig/caddyfile/testdata/import_glob1.txt b/caddyconfig/caddyfile/testdata/import_glob1.txt
new file mode 100755
index 0000000..111eb04
--- /dev/null
+++ b/caddyconfig/caddyfile/testdata/import_glob1.txt
@@ -0,0 +1,4 @@
+glob1.host0 {
+ dir1
+ dir2 arg1
+}
diff --git a/caddyconfig/caddyfile/testdata/import_glob2.txt b/caddyconfig/caddyfile/testdata/import_glob2.txt
new file mode 100755
index 0000000..c09f784
--- /dev/null
+++ b/caddyconfig/caddyfile/testdata/import_glob2.txt
@@ -0,0 +1,3 @@
+glob2.host0 {
+ dir2 arg1
+}
diff --git a/caddyconfig/caddyfile/testdata/import_test1.txt b/caddyconfig/caddyfile/testdata/import_test1.txt
new file mode 100755
index 0000000..dac7b29
--- /dev/null
+++ b/caddyconfig/caddyfile/testdata/import_test1.txt
@@ -0,0 +1,2 @@
+dir2 arg1 arg2
+dir3 \ No newline at end of file
diff --git a/caddyconfig/caddyfile/testdata/import_test2.txt b/caddyconfig/caddyfile/testdata/import_test2.txt
new file mode 100755
index 0000000..140c879
--- /dev/null
+++ b/caddyconfig/caddyfile/testdata/import_test2.txt
@@ -0,0 +1,4 @@
+host1 {
+ dir1
+ dir2 arg1
+} \ No newline at end of file