summaryrefslogtreecommitdiff
path: root/caddyconfig
diff options
context:
space:
mode:
authorTom Barrett <tom@tombarrett.xyz>2023-11-01 17:57:48 +0100
committerTom Barrett <tom@tombarrett.xyz>2023-11-01 18:11:33 +0100
commit240c3d1338415e5d82ef7ca0e52c4284be6441bd (patch)
tree4b0ee5d208c2cdffa78d65f1b0abe0ec85f15652 /caddyconfig
parent73e78ab226f21e6c6c68961af88c4ab9c746f4f4 (diff)
parent0e204b730aa2b1fa0835336b1117eff8c420f713 (diff)
vbump to v2.7.5HEADcaddy-cgi
Diffstat (limited to 'caddyconfig')
-rw-r--r--caddyconfig/caddyfile/adapter.go2
-rw-r--r--caddyconfig/caddyfile/dispenser.go90
-rw-r--r--caddyconfig/caddyfile/importargs.go153
-rw-r--r--caddyconfig/caddyfile/importgraph.go3
-rw-r--r--caddyconfig/caddyfile/lexer.go246
-rw-r--r--caddyconfig/caddyfile/lexer_test.go227
-rw-r--r--caddyconfig/caddyfile/parse.go148
-rw-r--r--caddyconfig/caddyfile/parse_test.go120
-rw-r--r--caddyconfig/caddyfile/testdata/import_args0.txt2
-rw-r--r--caddyconfig/caddyfile/testdata/import_args1.txt2
-rw-r--r--caddyconfig/httpcaddyfile/addresses.go20
-rw-r--r--caddyconfig/httpcaddyfile/builtins.go113
-rw-r--r--caddyconfig/httpcaddyfile/builtins_test.go142
-rw-r--r--caddyconfig/httpcaddyfile/directives.go38
-rw-r--r--caddyconfig/httpcaddyfile/httptype.go270
-rw-r--r--caddyconfig/httpcaddyfile/options.go6
-rw-r--r--caddyconfig/httpcaddyfile/pkiapp.go1
-rw-r--r--caddyconfig/httpcaddyfile/serveroptions.go25
-rw-r--r--caddyconfig/httpcaddyfile/shorthands.go92
-rw-r--r--caddyconfig/httpcaddyfile/testdata/import_variadic.txt9
-rw-r--r--caddyconfig/httpcaddyfile/testdata/import_variadic_snippet.txt9
-rw-r--r--caddyconfig/httpcaddyfile/testdata/import_variadic_with_import.txt15
-rw-r--r--caddyconfig/httpcaddyfile/tlsapp.go48
-rw-r--r--caddyconfig/httpcaddyfile/tlsapp_test.go4
-rw-r--r--caddyconfig/httploader.go22
25 files changed, 1509 insertions, 298 deletions
diff --git a/caddyconfig/caddyfile/adapter.go b/caddyconfig/caddyfile/adapter.go
index b924325..d6ef602 100644
--- a/caddyconfig/caddyfile/adapter.go
+++ b/caddyconfig/caddyfile/adapter.go
@@ -88,7 +88,7 @@ func FormattingDifference(filename string, body []byte) (caddyconfig.Warning, bo
return caddyconfig.Warning{
File: filename,
Line: line,
- Message: "Caddyfile input is not formatted; run the 'caddy fmt' command to fix inconsistencies",
+ Message: "Caddyfile input is not formatted; run 'caddy fmt --overwrite' to fix inconsistencies",
}, true
}
diff --git a/caddyconfig/caddyfile/dispenser.go b/caddyconfig/caddyfile/dispenser.go
index 91bd9a5..215a164 100644
--- a/caddyconfig/caddyfile/dispenser.go
+++ b/caddyconfig/caddyfile/dispenser.go
@@ -101,12 +101,12 @@ func (d *Dispenser) nextOnSameLine() bool {
d.cursor++
return true
}
- if d.cursor >= len(d.tokens) {
+ if d.cursor >= len(d.tokens)-1 {
return false
}
- if d.cursor < len(d.tokens)-1 &&
- d.tokens[d.cursor].File == d.tokens[d.cursor+1].File &&
- d.tokens[d.cursor].Line+d.numLineBreaks(d.cursor) == d.tokens[d.cursor+1].Line {
+ curr := d.tokens[d.cursor]
+ next := d.tokens[d.cursor+1]
+ if !isNextOnNewLine(curr, next) {
d.cursor++
return true
}
@@ -122,12 +122,12 @@ func (d *Dispenser) NextLine() bool {
d.cursor++
return true
}
- if d.cursor >= len(d.tokens) {
+ if d.cursor >= len(d.tokens)-1 {
return false
}
- if d.cursor < len(d.tokens)-1 &&
- (d.tokens[d.cursor].File != d.tokens[d.cursor+1].File ||
- d.tokens[d.cursor].Line+d.numLineBreaks(d.cursor) < d.tokens[d.cursor+1].Line) {
+ curr := d.tokens[d.cursor]
+ next := d.tokens[d.cursor+1]
+ if isNextOnNewLine(curr, next) {
d.cursor++
return true
}
@@ -203,14 +203,17 @@ func (d *Dispenser) Val() string {
}
// ValRaw gets the raw text of the current token (including quotes).
+// If the token was a heredoc, then the delimiter is not included,
+// because that is not relevant to any unmarshaling logic at this time.
// If there is no token loaded, it returns empty string.
func (d *Dispenser) ValRaw() string {
if d.cursor < 0 || d.cursor >= len(d.tokens) {
return ""
}
quote := d.tokens[d.cursor].wasQuoted
- if quote > 0 {
- return string(quote) + d.tokens[d.cursor].Text + string(quote) // string literal
+ if quote > 0 && quote != '<' {
+ // string literal
+ return string(quote) + d.tokens[d.cursor].Text + string(quote)
}
return d.tokens[d.cursor].Text
}
@@ -388,22 +391,22 @@ func (d *Dispenser) Reset() {
// an argument.
func (d *Dispenser) ArgErr() error {
if d.Val() == "{" {
- return d.Err("Unexpected token '{', expecting argument")
+ return d.Err("unexpected token '{', expecting argument")
}
- return d.Errf("Wrong argument count or unexpected line ending after '%s'", d.Val())
+ return d.Errf("wrong argument count or unexpected line ending after '%s'", d.Val())
}
// SyntaxErr creates a generic syntax error which explains what was
// found and what was expected.
func (d *Dispenser) SyntaxErr(expected string) error {
- msg := fmt.Sprintf("%s:%d - Syntax error: Unexpected token '%s', expecting '%s'", d.File(), d.Line(), d.Val(), expected)
+ msg := fmt.Sprintf("syntax error: unexpected token '%s', expecting '%s', at %s:%d import chain: ['%s']", d.Val(), expected, d.File(), d.Line(), strings.Join(d.Token().imports, "','"))
return errors.New(msg)
}
// EOFErr returns an error indicating that the dispenser reached
// the end of the input when searching for the next token.
func (d *Dispenser) EOFErr() error {
- return d.Errf("Unexpected EOF")
+ return d.Errf("unexpected EOF")
}
// Err generates a custom parse-time error with a message of msg.
@@ -418,7 +421,10 @@ func (d *Dispenser) Errf(format string, args ...any) error {
// WrapErr takes an existing error and adds the Caddyfile file and line number.
func (d *Dispenser) WrapErr(err error) error {
- return fmt.Errorf("%s:%d - Error during parsing: %w", d.File(), d.Line(), err)
+ if len(d.Token().imports) > 0 {
+ return fmt.Errorf("%w, at %s:%d import chain ['%s']", err, d.File(), d.Line(), strings.Join(d.Token().imports, "','"))
+ }
+ return fmt.Errorf("%w, at %s:%d", err, d.File(), d.Line())
}
// Delete deletes the current token and returns the updated slice
@@ -438,14 +444,14 @@ func (d *Dispenser) Delete() []Token {
return d.tokens
}
-// numLineBreaks counts how many line breaks are in the token
-// value given by the token index tknIdx. It returns 0 if the
-// token does not exist or there are no line breaks.
-func (d *Dispenser) numLineBreaks(tknIdx int) int {
- if tknIdx < 0 || tknIdx >= len(d.tokens) {
- return 0
+// DeleteN is the same as Delete, but can delete many tokens at once.
+// If there aren't N tokens available to delete, none are deleted.
+func (d *Dispenser) DeleteN(amount int) []Token {
+ if amount > 0 && d.cursor >= (amount-1) && d.cursor <= len(d.tokens)-1 {
+ d.tokens = append(d.tokens[:d.cursor-(amount-1)], d.tokens[d.cursor+1:]...)
+ d.cursor -= amount
}
- return strings.Count(d.tokens[tknIdx].Text, "\n")
+ return d.tokens
}
// isNewLine determines whether the current token is on a different
@@ -461,25 +467,7 @@ func (d *Dispenser) isNewLine() bool {
prev := d.tokens[d.cursor-1]
curr := d.tokens[d.cursor]
-
- // If the previous token is from a different file,
- // we can assume it's from a different line
- if prev.File != curr.File {
- return true
- }
-
- // The previous token may contain line breaks if
- // it was quoted and spanned multiple lines. e.g:
- //
- // dir "foo
- // bar
- // baz"
- prevLineBreaks := d.numLineBreaks(d.cursor - 1)
-
- // If the previous token (incl line breaks) ends
- // on a line earlier than the current token,
- // then the current token is on a new line
- return prev.Line+prevLineBreaks < curr.Line
+ return isNextOnNewLine(prev, curr)
}
// isNextOnNewLine determines whether the current token is on a different
@@ -495,23 +483,5 @@ func (d *Dispenser) isNextOnNewLine() bool {
curr := d.tokens[d.cursor]
next := d.tokens[d.cursor+1]
-
- // If the next token is from a different file,
- // we can assume it's from a different line
- if curr.File != next.File {
- return true
- }
-
- // The current token may contain line breaks if
- // it was quoted and spanned multiple lines. e.g:
- //
- // dir "foo
- // bar
- // baz"
- currLineBreaks := d.numLineBreaks(d.cursor)
-
- // If the current token (incl line breaks) ends
- // on a line earlier than the next token,
- // then the next token is on a new line
- return curr.Line+currLineBreaks < next.Line
+ return isNextOnNewLine(curr, next)
}
diff --git a/caddyconfig/caddyfile/importargs.go b/caddyconfig/caddyfile/importargs.go
new file mode 100644
index 0000000..2e21a36
--- /dev/null
+++ b/caddyconfig/caddyfile/importargs.go
@@ -0,0 +1,153 @@
+// Copyright 2015 Matthew Holt and The Caddy Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package caddyfile
+
+import (
+ "regexp"
+ "strconv"
+ "strings"
+
+ "go.uber.org/zap"
+
+ "github.com/caddyserver/caddy/v2"
+)
+
+// parseVariadic determines if the token is a variadic placeholder,
+// and if so, determines the index range (start/end) of args to use.
+// Returns a boolean signaling whether a variadic placeholder was found,
+// and the start and end indices.
+func parseVariadic(token Token, argCount int) (bool, int, int) {
+ if !strings.HasPrefix(token.Text, "{args[") {
+ return false, 0, 0
+ }
+ if !strings.HasSuffix(token.Text, "]}") {
+ return false, 0, 0
+ }
+
+ argRange := strings.TrimSuffix(strings.TrimPrefix(token.Text, "{args["), "]}")
+ if argRange == "" {
+ caddy.Log().Named("caddyfile").Warn(
+ "Placeholder "+token.Text+" cannot have an empty index",
+ zap.String("file", token.File+":"+strconv.Itoa(token.Line)), zap.Strings("import_chain", token.imports))
+ return false, 0, 0
+ }
+
+ start, end, found := strings.Cut(argRange, ":")
+
+ // If no ":" delimiter is found, this is not a variadic.
+ // The replacer will pick this up.
+ if !found {
+ return false, 0, 0
+ }
+
+ var (
+ startIndex = 0
+ endIndex = argCount
+ err error
+ )
+ if start != "" {
+ startIndex, err = strconv.Atoi(start)
+ if err != nil {
+ caddy.Log().Named("caddyfile").Warn(
+ "Variadic placeholder "+token.Text+" has an invalid start index",
+ zap.String("file", token.File+":"+strconv.Itoa(token.Line)), zap.Strings("import_chain", token.imports))
+ return false, 0, 0
+ }
+ }
+ if end != "" {
+ endIndex, err = strconv.Atoi(end)
+ if err != nil {
+ caddy.Log().Named("caddyfile").Warn(
+ "Variadic placeholder "+token.Text+" has an invalid end index",
+ zap.String("file", token.File+":"+strconv.Itoa(token.Line)), zap.Strings("import_chain", token.imports))
+ return false, 0, 0
+ }
+ }
+
+ // bound check
+ if startIndex < 0 || startIndex > endIndex || endIndex > argCount {
+ caddy.Log().Named("caddyfile").Warn(
+ "Variadic placeholder "+token.Text+" indices are out of bounds, only "+strconv.Itoa(argCount)+" argument(s) exist",
+ zap.String("file", token.File+":"+strconv.Itoa(token.Line)), zap.Strings("import_chain", token.imports))
+ return false, 0, 0
+ }
+ return true, startIndex, endIndex
+}
+
+// makeArgsReplacer prepares a Replacer which can replace
+// non-variadic args placeholders in imported tokens.
+func makeArgsReplacer(args []string) *caddy.Replacer {
+ repl := caddy.NewEmptyReplacer()
+ repl.Map(func(key string) (any, bool) {
+ // TODO: Remove the deprecated {args.*} placeholder
+ // support at some point in the future
+ if matches := argsRegexpIndexDeprecated.FindStringSubmatch(key); len(matches) > 0 {
+ // What's matched may be a substring of the key
+ if matches[0] != key {
+ return nil, false
+ }
+
+ value, err := strconv.Atoi(matches[1])
+ if err != nil {
+ caddy.Log().Named("caddyfile").Warn(
+ "Placeholder {args." + matches[1] + "} has an invalid index")
+ return nil, false
+ }
+ if value >= len(args) {
+ caddy.Log().Named("caddyfile").Warn(
+ "Placeholder {args." + matches[1] + "} index is out of bounds, only " + strconv.Itoa(len(args)) + " argument(s) exist")
+ return nil, false
+ }
+ caddy.Log().Named("caddyfile").Warn(
+ "Placeholder {args." + matches[1] + "} deprecated, use {args[" + matches[1] + "]} instead")
+ return args[value], true
+ }
+
+ // Handle args[*] form
+ if matches := argsRegexpIndex.FindStringSubmatch(key); len(matches) > 0 {
+ // What's matched may be a substring of the key
+ if matches[0] != key {
+ return nil, false
+ }
+
+ if strings.Contains(matches[1], ":") {
+ caddy.Log().Named("caddyfile").Warn(
+ "Variadic placeholder {args[" + matches[1] + "]} must be a token on its own")
+ return nil, false
+ }
+ value, err := strconv.Atoi(matches[1])
+ if err != nil {
+ caddy.Log().Named("caddyfile").Warn(
+ "Placeholder {args[" + matches[1] + "]} has an invalid index")
+ return nil, false
+ }
+ if value >= len(args) {
+ caddy.Log().Named("caddyfile").Warn(
+ "Placeholder {args[" + matches[1] + "]} index is out of bounds, only " + strconv.Itoa(len(args)) + " argument(s) exist")
+ return nil, false
+ }
+ return args[value], true
+ }
+
+ // Not an args placeholder, ignore
+ return nil, false
+ })
+ return repl
+}
+
+var (
+ argsRegexpIndexDeprecated = regexp.MustCompile(`args\.(.+)`)
+ argsRegexpIndex = regexp.MustCompile(`args\[(.+)]`)
+)
diff --git a/caddyconfig/caddyfile/importgraph.go b/caddyconfig/caddyfile/importgraph.go
index 659c368..d27f471 100644
--- a/caddyconfig/caddyfile/importgraph.go
+++ b/caddyconfig/caddyfile/importgraph.go
@@ -34,6 +34,7 @@ func (i *importGraph) addNode(name string) {
}
i.nodes[name] = true
}
+
func (i *importGraph) addNodes(names []string) {
for _, name := range names {
i.addNode(name)
@@ -43,6 +44,7 @@ func (i *importGraph) addNodes(names []string) {
func (i *importGraph) removeNode(name string) {
delete(i.nodes, name)
}
+
func (i *importGraph) removeNodes(names []string) {
for _, name := range names {
i.removeNode(name)
@@ -73,6 +75,7 @@ func (i *importGraph) addEdge(from, to string) error {
i.edges[from] = append(i.edges[from], to)
return nil
}
+
func (i *importGraph) addEdges(from string, tos []string) error {
for _, to := range tos {
err := i.addEdge(from, to)
diff --git a/caddyconfig/caddyfile/lexer.go b/caddyconfig/caddyfile/lexer.go
index 5605a6a..bfd6c0f 100644
--- a/caddyconfig/caddyfile/lexer.go
+++ b/caddyconfig/caddyfile/lexer.go
@@ -17,7 +17,10 @@ package caddyfile
import (
"bufio"
"bytes"
+ "fmt"
"io"
+ "regexp"
+ "strings"
"unicode"
)
@@ -35,15 +38,41 @@ type (
// Token represents a single parsable unit.
Token struct {
- File string
- Line int
- Text string
- wasQuoted rune // enclosing quote character, if any
- inSnippet bool
- snippetName string
+ File string
+ imports []string
+ Line int
+ Text string
+ wasQuoted rune // enclosing quote character, if any
+ heredocMarker string
+ snippetName string
}
)
+// Tokenize takes bytes as input and lexes it into
+// a list of tokens that can be parsed as a Caddyfile.
+// Also takes a filename to fill the token's File as
+// the source of the tokens, which is important to
+// determine relative paths for `import` directives.
+func Tokenize(input []byte, filename string) ([]Token, error) {
+ l := lexer{}
+ if err := l.load(bytes.NewReader(input)); err != nil {
+ return nil, err
+ }
+ var tokens []Token
+ for {
+ found, err := l.next()
+ if err != nil {
+ return nil, err
+ }
+ if !found {
+ break
+ }
+ l.token.File = filename
+ tokens = append(tokens, l.token)
+ }
+ return tokens, nil
+}
+
// load prepares the lexer to scan an input for tokens.
// It discards any leading byte order mark.
func (l *lexer) load(input io.Reader) error {
@@ -75,28 +104,107 @@ func (l *lexer) load(input io.Reader) error {
// may be escaped. The rest of the line is skipped
// if a "#" character is read in. Returns true if
// a token was loaded; false otherwise.
-func (l *lexer) next() bool {
+func (l *lexer) next() (bool, error) {
var val []rune
- var comment, quoted, btQuoted, escaped bool
+ var comment, quoted, btQuoted, inHeredoc, heredocEscaped, escaped bool
+ var heredocMarker string
makeToken := func(quoted rune) bool {
l.token.Text = string(val)
l.token.wasQuoted = quoted
+ l.token.heredocMarker = heredocMarker
return true
}
for {
+ // Read a character in; if err then if we had
+ // read some characters, make a token. If we
+ // reached EOF, then no more tokens to read.
+ // If no EOF, then we had a problem.
ch, _, err := l.reader.ReadRune()
if err != nil {
if len(val) > 0 {
- return makeToken(0)
+ if inHeredoc {
+ return false, fmt.Errorf("incomplete heredoc <<%s on line #%d, expected ending marker %s", heredocMarker, l.line+l.skippedLines, heredocMarker)
+ }
+
+ return makeToken(0), nil
}
if err == io.EOF {
- return false
+ return false, nil
+ }
+ return false, err
+ }
+
+ // detect whether we have the start of a heredoc
+ if !(quoted || btQuoted) && !(inHeredoc || heredocEscaped) &&
+ len(val) > 1 && string(val[:2]) == "<<" {
+ // a space means it's just a regular token and not a heredoc
+ if ch == ' ' {
+ return makeToken(0), nil
+ }
+
+ // skip CR, we only care about LF
+ if ch == '\r' {
+ continue
+ }
+
+ // after hitting a newline, we know that the heredoc marker
+ // is the characters after the two << and the newline.
+ // we reset the val because the heredoc is syntax we don't
+ // want to keep.
+ if ch == '\n' {
+ if len(val) == 2 {
+ return false, fmt.Errorf("missing opening heredoc marker on line #%d; must contain only alpha-numeric characters, dashes and underscores; got empty string", l.line)
+ }
+
+ // check if there's too many <
+ if string(val[:3]) == "<<<" {
+ return false, fmt.Errorf("too many '<' for heredoc on line #%d; only use two, for example <<END", l.line)
+ }
+
+ heredocMarker = string(val[2:])
+ if !heredocMarkerRegexp.Match([]byte(heredocMarker)) {
+ return false, fmt.Errorf("heredoc marker on line #%d must contain only alpha-numeric characters, dashes and underscores; got '%s'", l.line, heredocMarker)
+ }
+
+ inHeredoc = true
+ l.skippedLines++
+ val = nil
+ continue
+ }
+ val = append(val, ch)
+ continue
+ }
+
+ // if we're in a heredoc, all characters are read as-is
+ if inHeredoc {
+ val = append(val, ch)
+
+ if ch == '\n' {
+ l.skippedLines++
+ }
+
+ // check if we're done, i.e. that the last few characters are the marker
+ if len(val) > len(heredocMarker) && heredocMarker == string(val[len(val)-len(heredocMarker):]) {
+ // set the final value
+ val, err = l.finalizeHeredoc(val, heredocMarker)
+ if err != nil {
+ return false, err
+ }
+
+ // set the line counter, and make the token
+ l.line += l.skippedLines
+ l.skippedLines = 0
+ return makeToken('<'), nil
}
- panic(err)
+
+ // stay in the heredoc until we find the ending marker
+ continue
}
+ // track whether we found an escape '\' for the next
+ // iteration to be contextually aware
if !escaped && !btQuoted && ch == '\\' {
escaped = true
continue
@@ -111,26 +219,29 @@ func (l *lexer) next() bool {
}
escaped = false
} else {
- if quoted && ch == '"' {
- return makeToken('"')
- }
- if btQuoted && ch == '`' {
- return makeToken('`')
+ if (quoted && ch == '"') || (btQuoted && ch == '`') {
+ return makeToken(ch), nil
}
}
+ // allow quoted text to wrap continue on multiple lines
if ch == '\n' {
l.line += 1 + l.skippedLines
l.skippedLines = 0
}
+ // collect this character as part of the quoted token
val = append(val, ch)
continue
}
if unicode.IsSpace(ch) {
+ // ignore CR altogether, we only actually care about LF (\n)
if ch == '\r' {
continue
}
+ // end of the line
if ch == '\n' {
+ // newlines can be escaped to chain arguments
+ // onto multiple lines; else, increment the line count
if escaped {
l.skippedLines++
escaped = false
@@ -138,14 +249,18 @@ func (l *lexer) next() bool {
l.line += 1 + l.skippedLines
l.skippedLines = 0
}
+ // comments (#) are single-line only
comment = false
}
+ // any kind of space means we're at the end of this token
if len(val) > 0 {
- return makeToken(0)
+ return makeToken(0), nil
}
continue
}
+ // comments must be at the start of a token,
+ // in other words, preceded by space or newline
if ch == '#' && len(val) == 0 {
comment = true
}
@@ -166,7 +281,12 @@ func (l *lexer) next() bool {
}
if escaped {
- val = append(val, '\\')
+ // allow escaping the first < to skip the heredoc syntax
+ if ch == '<' {
+ heredocEscaped = true
+ } else {
+ val = append(val, '\\')
+ }
escaped = false
}
@@ -174,24 +294,86 @@ func (l *lexer) next() bool {
}
}
-// Tokenize takes bytes as input and lexes it into
-// a list of tokens that can be parsed as a Caddyfile.
-// Also takes a filename to fill the token's File as
-// the source of the tokens, which is important to
-// determine relative paths for `import` directives.
-func Tokenize(input []byte, filename string) ([]Token, error) {
- l := lexer{}
- if err := l.load(bytes.NewReader(input)); err != nil {
- return nil, err
+// finalizeHeredoc takes the runes read as the heredoc text and the marker,
+// and processes the text to strip leading whitespace, returning the final
+// value without the leading whitespace.
+func (l *lexer) finalizeHeredoc(val []rune, marker string) ([]rune, error) {
+ stringVal := string(val)
+
+ // find the last newline of the heredoc, which is where the contents end
+ lastNewline := strings.LastIndex(stringVal, "\n")
+
+ // collapse the content, then split into separate lines
+ lines := strings.Split(stringVal[:lastNewline+1], "\n")
+
+ // figure out how much whitespace we need to strip from the front of every line
+ // by getting the string that precedes the marker, on the last line
+ paddingToStrip := stringVal[lastNewline+1 : len(stringVal)-len(marker)]
+
+ // iterate over each line and strip the whitespace from the front
+ var out string
+ for lineNum, lineText := range lines[:len(lines)-1] {
+ // find an exact match for the padding
+ index := strings.Index(lineText, paddingToStrip)
+
+ // if the padding doesn't match exactly at the start then we can't safely strip
+ if index != 0 {
+ return nil, fmt.Errorf("mismatched leading whitespace in heredoc <<%s on line #%d [%s], expected whitespace [%s] to match the closing marker", marker, l.line+lineNum+1, lineText, paddingToStrip)
+ }
+
+ // strip, then append the line, with the newline, to the output.
+ // also removes all "\r" because Windows.
+ out += strings.ReplaceAll(lineText[len(paddingToStrip):]+"\n", "\r", "")
}
- var tokens []Token
- for l.next() {
- l.token.File = filename
- tokens = append(tokens, l.token)
+
+ // Remove the trailing newline from the loop
+ if len(out) > 0 && out[len(out)-1] == '\n' {
+ out = out[:len(out)-1]
}
- return tokens, nil
+
+ // return the final value
+ return []rune(out), nil
}
func (t Token) Quoted() bool {
return t.wasQuoted > 0
}
+
+// NumLineBreaks counts how many line breaks are in the token text.
+func (t Token) NumLineBreaks() int {
+ lineBreaks := strings.Count(t.Text, "\n")
+ if t.wasQuoted == '<' {
+ // heredocs have an extra linebreak because the opening
+ // delimiter is on its own line and is not included in the
+ // token Text itself, and the trailing newline is removed.
+ lineBreaks += 2
+ }
+ return lineBreaks
+}
+
+var heredocMarkerRegexp = regexp.MustCompile("^[A-Za-z0-9_-]+$")
+
+// isNextOnNewLine tests whether t2 is on a different line from t1
+func isNextOnNewLine(t1, t2 Token) bool {
+ // If the second token is from a different file,
+ // we can assume it's from a different line
+ if t1.File != t2.File {
+ return true
+ }
+
+ // If the second token is from a different import chain,
+ // we can assume it's from a different line
+ if len(t1.imports) != len(t2.imports) {
+ return true
+ }
+ for i, im := range t1.imports {
+ if im != t2.imports[i] {
+ return true
+ }
+ }
+
+ // If the first token (incl line breaks) ends
+ // on a line earlier than the next token,
+ // then the second token is on a new line
+ return t1.Line+t1.NumLineBreaks() < t2.Line
+}
diff --git a/caddyconfig/caddyfile/lexer_test.go b/caddyconfig/caddyfile/lexer_test.go
index 30ee0f6..92acc4d 100644
--- a/caddyconfig/caddyfile/lexer_test.go
+++ b/caddyconfig/caddyfile/lexer_test.go
@@ -18,13 +18,13 @@ import (
"testing"
)
-type lexerTestCase struct {
- input []byte
- expected []Token
-}
-
func TestLexer(t *testing.T) {
- testCases := []lexerTestCase{
+ testCases := []struct {
+ input []byte
+ expected []Token
+ expectErr bool
+ errorMessage string
+ }{
{
input: []byte(`host:123`),
expected: []Token{
@@ -249,12 +249,219 @@ func TestLexer(t *testing.T) {
{Line: 1, Text: `quotes`},
},
},
+ {
+ input: []byte(`heredoc <<EOF
+content
+EOF same-line-arg
+ `),
+ expected: []Token{
+ {Line: 1, Text: `heredoc`},
+ {Line: 1, Text: "content"},
+ {Line: 3, Text: `same-line-arg`},
+ },
+ },
+ {
+ input: []byte(`heredoc <<VERY-LONG-MARKER
+content
+VERY-LONG-MARKER same-line-arg
+ `),
+ expected: []Token{
+ {Line: 1, Text: `heredoc`},
+ {Line: 1, Text: "content"},
+ {Line: 3, Text: `same-line-arg`},
+ },
+ },
+ {
+ input: []byte(`heredoc <<EOF
+extra-newline
+
+EOF same-line-arg
+ `),
+ expected: []Token{
+ {Line: 1, Text: `heredoc`},
+ {Line: 1, Text: "extra-newline\n"},
+ {Line: 4, Text: `same-line-arg`},
+ },
+ },
+ {
+ input: []byte(`heredoc <<EOF
+ EOF same-line-arg
+ `),
+ expected: []Token{
+ {Line: 1, Text: `heredoc`},
+ {Line: 1, Text: ""},
+ {Line: 2, Text: `same-line-arg`},
+ },
+ },
+ {
+ input: []byte(`heredoc <<EOF
+ content
+ EOF same-line-arg
+ `),
+ expected: []Token{
+ {Line: 1, Text: `heredoc`},
+ {Line: 1, Text: "content"},
+ {Line: 3, Text: `same-line-arg`},
+ },
+ },
+ {
+ input: []byte(`prev-line
+ heredoc <<EOF
+ multi
+ line
+ content
+ EOF same-line-arg
+ next-line
+ `),
+ expected: []Token{
+ {Line: 1, Text: `prev-line`},
+ {Line: 2, Text: `heredoc`},
+ {Line: 2, Text: "\tmulti\n\tline\n\tcontent"},
+ {Line: 6, Text: `same-line-arg`},
+ {Line: 7, Text: `next-line`},
+ },
+ },
+ {
+ input: []byte(`escaped-heredoc \<< >>`),
+ expected: []Token{
+ {Line: 1, Text: `escaped-heredoc`},
+ {Line: 1, Text: `<<`},
+ {Line: 1, Text: `>>`},
+ },
+ },
+ {
+ input: []byte(`not-a-heredoc <EOF
+ content
+ `),
+ expected: []Token{
+ {Line: 1, Text: `not-a-heredoc`},
+ {Line: 1, Text: `<EOF`},
+ {Line: 2, Text: `content`},
+ },
+ },
+ {
+ input: []byte(`not-a-heredoc <<<EOF content`),
+ expected: []Token{
+ {Line: 1, Text: `not-a-heredoc`},
+ {Line: 1, Text: `<<<EOF`},
+ {Line: 1, Text: `content`},
+ },
+ },
+ {
+ input: []byte(`not-a-heredoc "<<" ">>"`),
+ expected: []Token{
+ {Line: 1, Text: `not-a-heredoc`},
+ {Line: 1, Text: `<<`},
+ {Line: 1, Text: `>>`},
+ },
+ },
+ {
+ input: []byte(`not-a-heredoc << >>`),
+ expected: []Token{
+ {Line: 1, Text: `not-a-heredoc`},
+ {Line: 1, Text: `<<`},
+ {Line: 1, Text: `>>`},
+ },
+ },
+ {
+ input: []byte(`not-a-heredoc <<HERE SAME LINE
+ content
+ HERE same-line-arg
+ `),
+ expected: []Token{
+ {Line: 1, Text: `not-a-heredoc`},
+ {Line: 1, Text: `<<HERE`},
+ {Line: 1, Text: `SAME`},
+ {Line: 1, Text: `LINE`},
+ {Line: 2, Text: `content`},
+ {Line: 3, Text: `HERE`},
+ {Line: 3, Text: `same-line-arg`},
+ },
+ },
+ {
+ input: []byte(`heredoc <<s
+ �
+ s
+ `),
+ expected: []Token{
+ {Line: 1, Text: `heredoc`},
+ {Line: 1, Text: "�"},
+ },
+ },
+ {
+ input: []byte("\u000Aheredoc \u003C\u003C\u0073\u0073\u000A\u00BF\u0057\u0001\u0000\u00FF\u00FF\u00FF\u00FF\u00FF\u00FF\u00FF\u003D\u001F\u000A\u0073\u0073\u000A\u00BF\u0057\u0001\u0000\u00FF\u00FF\u00FF\u00FF\u00FF\u00FF\u00FF\u003D\u001F\u000A\u00BF\u00BF\u0057\u0001\u0000\u00FF\u00FF\u00FF\u00FF\u00FF\u00FF\u00FF\u003D\u001F"),
+ expected: []Token{
+ {
+ Line: 2,
+ Text: "heredoc",
+ },
+ {
+ Line: 2,
+ Text: "\u00BF\u0057\u0001\u0000\u00FF\u00FF\u00FF\u00FF\u00FF\u00FF\u00FF\u003D\u001F",
+ },
+ {
+ Line: 5,
+ Text: "\u00BF\u0057\u0001\u0000\u00FF\u00FF\u00FF\u00FF\u00FF\u00FF\u00FF\u003D\u001F",
+ },
+ {
+ Line: 6,
+ Text: "\u00BF\u00BF\u0057\u0001\u0000\u00FF\u00FF\u00FF\u00FF\u00FF\u00FF\u00FF\u003D\u001F",
+ },
+ },
+ },
+ {
+ input: []byte("not-a-heredoc <<\n"),
+ expectErr: true,
+ errorMessage: "missing opening heredoc marker on line #1; must contain only alpha-numeric characters, dashes and underscores; got empty string",
+ },
+ {
+ input: []byte(`heredoc <<<EOF
+ content
+ EOF same-line-arg
+ `),
+ expectErr: true,
+ errorMessage: "too many '<' for heredoc on line #1; only use two, for example <<END",
+ },
+ {
+ input: []byte(`heredoc <<EOF
+ content
+ `),
+ expectErr: true,
+ errorMessage: "incomplete heredoc <<EOF on line #3, expected ending marker EOF",
+ },
+ {
+ input: []byte(`heredoc <<EOF
+ content
+ EOF
+ `),
+ expectErr: true,
+ errorMessage: "mismatched leading whitespace in heredoc <<EOF on line #2 [\tcontent], expected whitespace [\t\t] to match the closing marker",
+ },
+ {
+ input: []byte(`heredoc <<EOF
+ content
+ EOF
+ `),
+ expectErr: true,
+ errorMessage: "mismatched leading whitespace in heredoc <<EOF on line #2 [ content], expected whitespace [\t\t] to match the closing marker",
+ },
}
for i, testCase := range testCases {
actual, err := Tokenize(testCase.input, "")
+ if testCase.expectErr {
+ if err == nil {
+ t.Fatalf("expected error, got actual: %v", actual)
+ continue
+ }
+ if err.Error() != testCase.errorMessage {
+ t.Fatalf("expected error '%v', got: %v", testCase.errorMessage, err)
+ }
+ continue
+ }
+
if err != nil {
- t.Errorf("%v", err)
+ t.Fatalf("%v", err)
}
lexerCompare(t, i, testCase.expected, actual)
}
@@ -262,17 +469,17 @@ func TestLexer(t *testing.T) {
func lexerCompare(t *testing.T, n int, expected, actual []Token) {
if len(expected) != len(actual) {
- t.Errorf("Test case %d: expected %d token(s) but got %d", n, len(expected), len(actual))
+ t.Fatalf("Test case %d: expected %d token(s) but got %d", n, len(expected), len(actual))
}
for i := 0; i < len(actual) && i < len(expected); i++ {
if actual[i].Line != expected[i].Line {
- t.Errorf("Test case %d token %d ('%s'): expected line %d but was line %d",
+ t.Fatalf("Test case %d token %d ('%s'): expected line %d but was line %d",
n, i, expected[i].Text, expected[i].Line, actual[i].Line)
break
}
if actual[i].Text != expected[i].Text {
- t.Errorf("Test case %d token %d: expected text '%s' but was '%s'",
+ t.Fatalf("Test case %d token %d: expected text '%s' but was '%s'",
n, i, expected[i].Text, actual[i].Text)
break
}
diff --git a/caddyconfig/caddyfile/parse.go b/caddyconfig/caddyfile/parse.go
index edc86f2..65d6ee9 100644
--- a/caddyconfig/caddyfile/parse.go
+++ b/caddyconfig/caddyfile/parse.go
@@ -20,11 +20,11 @@ import (
"io"
"os"
"path/filepath"
- "strconv"
"strings"
- "github.com/caddyserver/caddy/v2"
"go.uber.org/zap"
+
+ "github.com/caddyserver/caddy/v2"
)
// Parse parses the input just enough to group tokens, in
@@ -149,7 +149,6 @@ func (p *parser) begin() error {
}
err := p.addresses()
-
if err != nil {
return err
}
@@ -160,6 +159,25 @@ func (p *parser) begin() error {
return nil
}
+ if ok, name := p.isNamedRoute(); ok {
+ // named routes only have one key, the route name
+ p.block.Keys = []string{name}
+ p.block.IsNamedRoute = true
+
+ // we just need a dummy leading token to ease parsing later
+ nameToken := p.Token()
+ nameToken.Text = name
+
+ // get all the tokens from the block, including the braces
+ tokens, err := p.blockTokens(true)
+ if err != nil {
+ return err
+ }
+ tokens = append([]Token{nameToken}, tokens...)
+ p.block.Segments = []Segment{tokens}
+ return nil
+ }
+
if ok, name := p.isSnippet(); ok {
if p.definedSnippets == nil {
p.definedSnippets = map[string][]Token{}
@@ -168,16 +186,15 @@ func (p *parser) begin() error {
return p.Errf("redeclaration of previously declared snippet %s", name)
}
// consume all tokens til matched close brace
- tokens, err := p.snippetTokens()
+ tokens, err := p.blockTokens(false)
if err != nil {
return err
}
// Just as we need to track which file the token comes from, we need to
- // keep track of which snippets do the tokens come from. This is helpful
- // in tracking import cycles across files/snippets by namespacing them. Without
- // this we end up with false-positives in cycle-detection.
+ // keep track of which snippet the token comes from. This is helpful
+ // in tracking import cycles across files/snippets by namespacing them.
+ // Without this, we end up with false-positives in cycle-detection.
for k, v := range tokens {
- v.inSnippet = true
v.snippetName = name
tokens[k] = v
}
@@ -198,7 +215,7 @@ func (p *parser) addresses() error {
// special case: import directive replaces tokens during parse-time
if tkn == "import" && p.isNewLine() {
- err := p.doImport()
+ err := p.doImport(0)
if err != nil {
return err
}
@@ -298,7 +315,7 @@ func (p *parser) directives() error {
// special case: import directive replaces tokens during parse-time
if p.Val() == "import" {
- err := p.doImport()
+ err := p.doImport(1)
if err != nil {
return err
}
@@ -324,7 +341,7 @@ func (p *parser) directives() error {
// is on the token before where the import directive was. In
// other words, call Next() to access the first token that was
// imported.
-func (p *parser) doImport() error {
+func (p *parser) doImport(nesting int) error {
// syntax checks
if !p.NextArg() {
return p.ArgErr()
@@ -337,11 +354,8 @@ func (p *parser) doImport() error {
// grab remaining args as placeholder replacements
args := p.RemainingArgs()
- // add args to the replacer
- repl := caddy.NewEmptyReplacer()
- for index, arg := range args {
- repl.Set("args."+strconv.Itoa(index), arg)
- }
+ // set up a replacer for non-variadic args replacement
+ repl := makeArgsReplacer(args)
// splice out the import directive and its arguments
// (2 tokens, plus the length of args)
@@ -417,7 +431,7 @@ func (p *parser) doImport() error {
}
nodeName := p.File()
- if p.Token().inSnippet {
+ if p.Token().snippetName != "" {
nodeName += fmt.Sprintf(":%s", p.Token().snippetName)
}
p.importGraph.addNode(nodeName)
@@ -428,13 +442,69 @@ func (p *parser) doImport() error {
}
// copy the tokens so we don't overwrite p.definedSnippets
- tokensCopy := make([]Token, len(importedTokens))
- copy(tokensCopy, importedTokens)
+ tokensCopy := make([]Token, 0, len(importedTokens))
+
+ var (
+ maybeSnippet bool
+ maybeSnippetId bool
+ index int
+ )
// run the argument replacer on the tokens
- for index, token := range tokensCopy {
- token.Text = repl.ReplaceKnown(token.Text, "")
- tokensCopy[index] = token
+ // golang for range slice return a copy of value
+ // similarly, append also copy value
+ for i, token := range importedTokens {
+ // update the token's imports to refer to import directive filename, line number and snippet name if there is one
+ if token.snippetName != "" {
+ token.imports = append(token.imports, fmt.Sprintf("%s:%d (import %s)", p.File(), p.Line(), token.snippetName))
+ } else {
+ token.imports = append(token.imports, fmt.Sprintf("%s:%d (import)", p.File(), p.Line()))
+ }
+
+ // naive way of determine snippets, as snippets definition can only follow name + block
+ // format, won't check for nesting correctness or any other error, that's what parser does.
+ if !maybeSnippet && nesting == 0 {
+ // first of the line
+ if i == 0 || isNextOnNewLine(tokensCopy[i-1], token) {
+ index = 0
+ } else {
+ index++
+ }
+
+ if index == 0 && len(token.Text) >= 3 && strings.HasPrefix(token.Text, "(") && strings.HasSuffix(token.Text, ")") {
+ maybeSnippetId = true
+ }
+ }
+
+ switch token.Text {
+ case "{":
+ nesting++
+ if index == 1 && maybeSnippetId && nesting == 1 {
+ maybeSnippet = true
+ maybeSnippetId = false
+ }
+ case "}":
+ nesting--
+ if nesting == 0 && maybeSnippet {
+ maybeSnippet = false
+ }
+ }
+
+ if maybeSnippet {
+ tokensCopy = append(tokensCopy, token)
+ continue
+ }
+
+ foundVariadic, startIndex, endIndex := parseVariadic(token, len(args))
+ if foundVariadic {
+ for _, arg := range args[startIndex:endIndex] {
+ token.Text = arg
+ tokensCopy = append(tokensCopy, token)
+ }
+ } else {
+ token.Text = repl.ReplaceKnown(token.Text, "")
+ tokensCopy = append(tokensCopy, token)
+ }
}
// splice the imported tokens in the place of the import statement
@@ -496,7 +566,6 @@ func (p *parser) doSingleImport(importFile string) ([]Token, error) {
// are loaded into the current server block for later use
// by directive setup functions.
func (p *parser) directive() error {
-
// a segment is a list of tokens associated with this directive
var segment Segment
@@ -509,6 +578,9 @@ func (p *parser) directive() error {
if !p.isNextOnNewLine() && p.Token().wasQuoted == 0 {
return p.Err("Unexpected next token after '{' on same line")
}
+ if p.isNewLine() {
+ return p.Err("Unexpected '{' on a new line; did you mean to place the '{' on the previous line?")
+ }
} else if p.Val() == "{}" {
if p.isNextOnNewLine() && p.Token().wasQuoted == 0 {
return p.Err("Unexpected '{}' at end of line")
@@ -521,7 +593,7 @@ func (p *parser) directive() error {
} else if p.Val() == "}" && p.nesting == 0 {
return p.Err("Unexpected '}' because no matching opening brace")
} else if p.Val() == "import" && p.isNewLine() {
- if err := p.doImport(); err != nil {
+ if err := p.doImport(1); err != nil {
return err
}
p.cursor-- // cursor is advanced when we continue, so roll back one more
@@ -562,6 +634,15 @@ func (p *parser) closeCurlyBrace() error {
return nil
}
+func (p *parser) isNamedRoute() (bool, string) {
+ keys := p.block.Keys
+ // A named route block is a single key with parens, prefixed with &.
+ if len(keys) == 1 && strings.HasPrefix(keys[0], "&(") && strings.HasSuffix(keys[0], ")") {
+ return true, strings.TrimSuffix(keys[0][2:], ")")
+ }
+ return false, ""
+}
+
func (p *parser) isSnippet() (bool, string) {
keys := p.block.Keys
// A snippet block is a single key with parens. Nothing else qualifies.
@@ -572,18 +653,24 @@ func (p *parser) isSnippet() (bool, string) {
}
// read and store everything in a block for later replay.
-func (p *parser) snippetTokens() ([]Token, error) {
- // snippet must have curlies.
+func (p *parser) blockTokens(retainCurlies bool) ([]Token, error) {
+ // block must have curlies.
err := p.openCurlyBrace()
if err != nil {
return nil, err
}
- nesting := 1 // count our own nesting in snippets
+ nesting := 1 // count our own nesting
tokens := []Token{}
+ if retainCurlies {
+ tokens = append(tokens, p.Token())
+ }
for p.Next() {
if p.Val() == "}" {
nesting--
if nesting == 0 {
+ if retainCurlies {
+ tokens = append(tokens, p.Token())
+ }
break
}
}
@@ -603,9 +690,10 @@ func (p *parser) snippetTokens() ([]Token, error) {
// head of the server block with tokens, which are
// grouped by segments.
type ServerBlock struct {
- HasBraces bool
- Keys []string
- Segments []Segment
+ HasBraces bool
+ Keys []string
+ Segments []Segment
+ IsNamedRoute bool
}
// DispenseDirective returns a dispenser that contains
diff --git a/caddyconfig/caddyfile/parse_test.go b/caddyconfig/caddyfile/parse_test.go
index 4d18cc4..b1104ed 100644
--- a/caddyconfig/caddyfile/parse_test.go
+++ b/caddyconfig/caddyfile/parse_test.go
@@ -21,6 +21,88 @@ import (
"testing"
)
+func TestParseVariadic(t *testing.T) {
+ var args = make([]string, 10)
+ for i, tc := range []struct {
+ input string
+ result bool
+ }{
+ {
+ input: "",
+ result: false,
+ },
+ {
+ input: "{args[1",
+ result: false,
+ },
+ {
+ input: "1]}",
+ result: false,
+ },
+ {
+ input: "{args[:]}aaaaa",
+ result: false,
+ },
+ {
+ input: "aaaaa{args[:]}",
+ result: false,
+ },
+ {
+ input: "{args.}",
+ result: false,
+ },
+ {
+ input: "{args.1}",
+ result: false,
+ },
+ {
+ input: "{args[]}",
+ result: false,
+ },
+ {
+ input: "{args[:]}",
+ result: true,
+ },
+ {
+ input: "{args[:]}",
+ result: true,
+ },
+ {
+ input: "{args[0:]}",
+ result: true,
+ },
+ {
+ input: "{args[:0]}",
+ result: true,
+ },
+ {
+ input: "{args[-1:]}",
+ result: false,
+ },
+ {
+ input: "{args[:11]}",
+ result: false,
+ },
+ {
+ input: "{args[10:0]}",
+ result: false,
+ },
+ {
+ input: "{args[0:10]}",
+ result: true,
+ },
+ } {
+ token := Token{
+ File: "test",
+ Line: 1,
+ Text: tc.input,
+ }
+ if v, _, _ := parseVariadic(token, len(args)); v != tc.result {
+ t.Errorf("Test %d error expectation failed Expected: %t, got %t", i, tc.result, v)
+ }
+ }
+}
+
func TestAllTokens(t *testing.T) {
input := []byte("a b c\nd e")
expected := []string{"a", "b", "c", "d", "e"}
@@ -211,6 +293,14 @@ func TestParseOneAndImport(t *testing.T) {
// Unexpected next token after '{' on same line
{`localhost
dir1 { a b }`, true, []string{"localhost"}, []int{}},
+
+ // Unexpected '{' on a new line
+ {`localhost
+ dir1
+ {
+ a b
+ }`, true, []string{"localhost"}, []int{}},
+
// Workaround with quotes
{`localhost
dir1 "{" a b "}"`, false, []string{"localhost"}, []int{5}},
@@ -628,6 +718,36 @@ func TestEnvironmentReplacement(t *testing.T) {
}
}
+func TestImportReplacementInJSONWithBrace(t *testing.T) {
+ for i, test := range []struct {
+ args []string
+ input string
+ expect string
+ }{
+ {
+ args: []string{"123"},
+ input: "{args[0]}",
+ expect: "123",
+ },
+ {
+ args: []string{"123"},
+ input: `{"key":"{args[0]}"}`,
+ expect: `{"key":"123"}`,
+ },
+ {
+ args: []string{"123", "123"},
+ input: `{"key":[{args[0]},{args[1]}]}`,
+ expect: `{"key":[123,123]}`,
+ },
+ } {
+ repl := makeArgsReplacer(test.args)
+ actual := repl.ReplaceKnown(test.input, "")
+ if actual != test.expect {
+ t.Errorf("Test %d: Expected: '%s' but got '%s'", i, test.expect, actual)
+ }
+ }
+}
+
func TestSnippets(t *testing.T) {
p := testParser(`
(common) {
diff --git a/caddyconfig/caddyfile/testdata/import_args0.txt b/caddyconfig/caddyfile/testdata/import_args0.txt
index af946fe..add211e 100644
--- a/caddyconfig/caddyfile/testdata/import_args0.txt
+++ b/caddyconfig/caddyfile/testdata/import_args0.txt
@@ -1 +1 @@
-{args.0} \ No newline at end of file
+{args[0]} \ No newline at end of file
diff --git a/caddyconfig/caddyfile/testdata/import_args1.txt b/caddyconfig/caddyfile/testdata/import_args1.txt
index 519a92d..422692a 100644
--- a/caddyconfig/caddyfile/testdata/import_args1.txt
+++ b/caddyconfig/caddyfile/testdata/import_args1.txt
@@ -1 +1 @@
-{args.0} {args.1} \ No newline at end of file
+{args[0]} {args[1]} \ No newline at end of file
diff --git a/caddyconfig/httpcaddyfile/addresses.go b/caddyconfig/httpcaddyfile/addresses.go
index 93bad27..658da48 100644
--- a/caddyconfig/httpcaddyfile/addresses.go
+++ b/caddyconfig/httpcaddyfile/addresses.go
@@ -24,10 +24,11 @@ import (
"strings"
"unicode"
+ "github.com/caddyserver/certmagic"
+
"github.com/caddyserver/caddy/v2"
"github.com/caddyserver/caddy/v2/caddyconfig/caddyfile"
"github.com/caddyserver/caddy/v2/modules/caddyhttp"
- "github.com/caddyserver/certmagic"
)
// mapAddressToServerBlocks returns a map of listener address to list of server
@@ -77,7 +78,8 @@ import (
// multiple addresses to the same lists of server blocks (a many:many mapping).
// (Doing this is essentially a map-reduce technique.)
func (st *ServerType) mapAddressToServerBlocks(originalServerBlocks []serverBlock,
- options map[string]any) (map[string][]serverBlock, error) {
+ options map[string]any,
+) (map[string][]serverBlock, error) {
sbmap := make(map[string][]serverBlock)
for i, sblock := range originalServerBlocks {
@@ -187,13 +189,25 @@ func (st *ServerType) consolidateAddrMappings(addrToServerBlocks map[string][]se
// listenerAddrsForServerBlockKey essentially converts the Caddyfile
// site addresses to Caddy listener addresses for each server block.
func (st *ServerType) listenerAddrsForServerBlockKey(sblock serverBlock, key string,
- options map[string]any) ([]string, error) {
+ options map[string]any,
+) ([]string, error) {
addr, err := ParseAddress(key)
if err != nil {
return nil, fmt.Errorf("parsing key: %v", err)
}
addr = addr.Normalize()
+ switch addr.Scheme {
+ case "wss":
+ return nil, fmt.Errorf("the scheme wss:// is only supported in browsers; use https:// instead")
+ case "ws":
+ return nil, fmt.Errorf("the scheme ws:// is only supported in browsers; use http:// instead")
+ case "https", "http", "":
+ // Do nothing or handle the valid schemes
+ default:
+ return nil, fmt.Errorf("unsupported URL scheme %s://", addr.Scheme)
+ }
+
// figure out the HTTP and HTTPS ports; either
// use defaults, or override with user config
httpPort, httpsPort := strconv.Itoa(caddyhttp.DefaultHTTPPort), strconv.Itoa(caddyhttp.DefaultHTTPSPort)
diff --git a/caddyconfig/httpcaddyfile/builtins.go b/caddyconfig/httpcaddyfile/builtins.go
index 45da4a8..94ca007 100644
--- a/caddyconfig/httpcaddyfile/builtins.go
+++ b/caddyconfig/httpcaddyfile/builtins.go
@@ -26,14 +26,15 @@ import (
"strings"
"time"
+ "github.com/caddyserver/certmagic"
+ "github.com/mholt/acmez/acme"
+ "go.uber.org/zap/zapcore"
+
"github.com/caddyserver/caddy/v2"
"github.com/caddyserver/caddy/v2/caddyconfig"
"github.com/caddyserver/caddy/v2/caddyconfig/caddyfile"
"github.com/caddyserver/caddy/v2/modules/caddyhttp"
"github.com/caddyserver/caddy/v2/modules/caddytls"
- "github.com/caddyserver/certmagic"
- "github.com/mholt/acmez/acme"
- "go.uber.org/zap/zapcore"
)
func init() {
@@ -48,6 +49,7 @@ func init() {
RegisterHandlerDirective("route", parseRoute)
RegisterHandlerDirective("handle", parseHandle)
RegisterDirective("handle_errors", parseHandleErrors)
+ RegisterHandlerDirective("invoke", parseInvoke)
RegisterDirective("log", parseLog)
RegisterHandlerDirective("skip_log", parseSkipLog)
}
@@ -179,17 +181,17 @@ func parseTLS(h Helper) ([]ConfigValue, error) {
case "protocols":
args := h.RemainingArgs()
if len(args) == 0 {
- return nil, h.SyntaxErr("one or two protocols")
+ return nil, h.Errf("protocols requires one or two arguments")
}
if len(args) > 0 {
if _, ok := caddytls.SupportedProtocols[args[0]]; !ok {
- return nil, h.Errf("Wrong protocol name or protocol not supported: '%s'", args[0])
+ return nil, h.Errf("wrong protocol name or protocol not supported: '%s'", args[0])
}
cp.ProtocolMin = args[0]
}
if len(args) > 1 {
if _, ok := caddytls.SupportedProtocols[args[1]]; !ok {
- return nil, h.Errf("Wrong protocol name or protocol not supported: '%s'", args[1])
+ return nil, h.Errf("wrong protocol name or protocol not supported: '%s'", args[1])
}
cp.ProtocolMax = args[1]
}
@@ -197,7 +199,7 @@ func parseTLS(h Helper) ([]ConfigValue, error) {
case "ciphers":
for h.NextArg() {
if !caddytls.CipherSuiteNameSupported(h.Val()) {
- return nil, h.Errf("Wrong cipher suite name or cipher suite not supported: '%s'", h.Val())
+ return nil, h.Errf("wrong cipher suite name or cipher suite not supported: '%s'", h.Val())
}
cp.CipherSuites = append(cp.CipherSuites, h.Val())
}
@@ -764,9 +766,31 @@ func parseHandleErrors(h Helper) ([]ConfigValue, error) {
}, nil
}
+// parseInvoke parses the invoke directive.
+func parseInvoke(h Helper) (caddyhttp.MiddlewareHandler, error) {
+ h.Next() // consume directive
+ if !h.NextArg() {
+ return nil, h.ArgErr()
+ }
+ for h.Next() || h.NextBlock(0) {
+ return nil, h.ArgErr()
+ }
+
+ // remember that we're invoking this name
+ // to populate the server with these named routes
+ if h.State[namedRouteKey] == nil {
+ h.State[namedRouteKey] = map[string]struct{}{}
+ }
+ h.State[namedRouteKey].(map[string]struct{})[h.Val()] = struct{}{}
+
+ // return the handler
+ return &caddyhttp.Invoke{Name: h.Val()}, nil
+}
+
// parseLog parses the log directive. Syntax:
//
-// log {
+// log <logger_name> {
+// hostnames <hostnames...>
// output <writer_module> ...
// format <encoder_module> ...
// level <level>
@@ -787,11 +811,13 @@ func parseLogHelper(h Helper, globalLogNames map[string]struct{}) ([]ConfigValue
var configValues []ConfigValue
for h.Next() {
// Logic below expects that a name is always present when a
- // global option is being parsed.
- var globalLogName string
+ // global option is being parsed; or an optional override
+ // is supported for access logs.
+ var logName string
+
if parseAsGlobalOption {
if h.NextArg() {
- globalLogName = h.Val()
+ logName = h.Val()
// Only a single argument is supported.
if h.NextArg() {
@@ -802,26 +828,47 @@ func parseLogHelper(h Helper, globalLogNames map[string]struct{}) ([]ConfigValue
// reference the default logger. See the
// setupNewDefault function in the logging
// package for where this is configured.
- globalLogName = caddy.DefaultLoggerName
+ logName = caddy.DefaultLoggerName
}
// Verify this name is unused.
- _, used := globalLogNames[globalLogName]
+ _, used := globalLogNames[logName]
if used {
- return nil, h.Err("duplicate global log option for: " + globalLogName)
+ return nil, h.Err("duplicate global log option for: " + logName)
}
- globalLogNames[globalLogName] = struct{}{}
+ globalLogNames[logName] = struct{}{}
} else {
- // No arguments are supported for the server block log directive
+ // An optional override of the logger name can be provided;
+ // otherwise a default will be used, like "log0", "log1", etc.
if h.NextArg() {
- return nil, h.ArgErr()
+ logName = h.Val()
+
+ // Only a single argument is supported.
+ if h.NextArg() {
+ return nil, h.ArgErr()
+ }
}
}
cl := new(caddy.CustomLog)
+ // allow overriding the current site block's hostnames for this logger;
+ // this is useful for setting up loggers per subdomain in a site block
+ // with a wildcard domain
+ customHostnames := []string{}
+
for h.NextBlock(0) {
switch h.Val() {
+ case "hostnames":
+ if parseAsGlobalOption {
+ return nil, h.Err("hostnames is not allowed in the log global options")
+ }
+ args := h.RemainingArgs()
+ if len(args) == 0 {
+ return nil, h.ArgErr()
+ }
+ customHostnames = append(customHostnames, args...)
+
case "output":
if !h.NextArg() {
return nil, h.ArgErr()
@@ -880,18 +927,16 @@ func parseLogHelper(h Helper, globalLogNames map[string]struct{}) ([]ConfigValue
}
case "include":
- // This configuration is only allowed in the global options
if !parseAsGlobalOption {
- return nil, h.ArgErr()
+ return nil, h.Err("include is not allowed in the log directive")
}
for h.NextArg() {
cl.Include = append(cl.Include, h.Val())
}
case "exclude":
- // This configuration is only allowed in the global options
if !parseAsGlobalOption {
- return nil, h.ArgErr()
+ return nil, h.Err("exclude is not allowed in the log directive")
}
for h.NextArg() {
cl.Exclude = append(cl.Exclude, h.Val())
@@ -903,24 +948,34 @@ func parseLogHelper(h Helper, globalLogNames map[string]struct{}) ([]ConfigValue
}
var val namedCustomLog
+ val.hostnames = customHostnames
+
+ isEmptyConfig := reflect.DeepEqual(cl, new(caddy.CustomLog))
+
// Skip handling of empty logging configs
- if !reflect.DeepEqual(cl, new(caddy.CustomLog)) {
- if parseAsGlobalOption {
- // Use indicated name for global log options
- val.name = globalLogName
- val.log = cl
- } else {
+
+ if parseAsGlobalOption {
+ // Use indicated name for global log options
+ val.name = logName
+ } else {
+ if logName != "" {
+ val.name = logName
+ } else if !isEmptyConfig {
// Construct a log name for server log streams
logCounter, ok := h.State["logCounter"].(int)
if !ok {
logCounter = 0
}
val.name = fmt.Sprintf("log%d", logCounter)
- cl.Include = []string{"http.log.access." + val.name}
- val.log = cl
logCounter++
h.State["logCounter"] = logCounter
}
+ if val.name != "" {
+ cl.Include = []string{"http.log.access." + val.name}
+ }
+ }
+ if !isEmptyConfig {
+ val.log = cl
}
configValues = append(configValues, ConfigValue{
Class: "custom_log",
diff --git a/caddyconfig/httpcaddyfile/builtins_test.go b/caddyconfig/httpcaddyfile/builtins_test.go
index bd5e116..70f347d 100644
--- a/caddyconfig/httpcaddyfile/builtins_test.go
+++ b/caddyconfig/httpcaddyfile/builtins_test.go
@@ -1,6 +1,7 @@
package httpcaddyfile
import (
+ "strings"
"testing"
"github.com/caddyserver/caddy/v2/caddyconfig/caddyfile"
@@ -51,12 +52,13 @@ func TestLogDirectiveSyntax(t *testing.T) {
},
{
input: `:8080 {
- log invalid {
+ log name-override {
output file foo.log
}
}
`,
- expectError: true,
+ output: `{"logging":{"logs":{"default":{"exclude":["http.log.access.name-override"]},"name-override":{"writer":{"filename":"foo.log","output":"file"},"include":["http.log.access.name-override"]}}},"apps":{"http":{"servers":{"srv0":{"listen":[":8080"],"logs":{"default_logger_name":"name-override"}}}}}}`,
+ expectError: false,
},
} {
@@ -213,3 +215,139 @@ func TestRedirDirectiveSyntax(t *testing.T) {
}
}
}
+
+func TestImportErrorLine(t *testing.T) {
+ for i, tc := range []struct {
+ input string
+ errorFunc func(err error) bool
+ }{
+ {
+ input: `(t1) {
+ abort {args[:]}
+ }
+ :8080 {
+ import t1
+ import t1 true
+ }`,
+ errorFunc: func(err error) bool {
+ return err != nil && strings.Contains(err.Error(), "Caddyfile:6 (import t1)")
+ },
+ },
+ {
+ input: `(t1) {
+ abort {args[:]}
+ }
+ :8080 {
+ import t1 true
+ }`,
+ errorFunc: func(err error) bool {
+ return err != nil && strings.Contains(err.Error(), "Caddyfile:5 (import t1)")
+ },
+ },
+ {
+ input: `
+ import testdata/import_variadic_snippet.txt
+ :8080 {
+ import t1 true
+ }`,
+ errorFunc: func(err error) bool {
+ return err == nil
+ },
+ },
+ {
+ input: `
+ import testdata/import_variadic_with_import.txt
+ :8080 {
+ import t1 true
+ import t2 true
+ }`,
+ errorFunc: func(err error) bool {
+ return err == nil
+ },
+ },
+ } {
+ adapter := caddyfile.Adapter{
+ ServerType: ServerType{},
+ }
+
+ _, _, err := adapter.Adapt([]byte(tc.input), nil)
+
+ if !tc.errorFunc(err) {
+ t.Errorf("Test %d error expectation failed, got %s", i, err)
+ continue
+ }
+ }
+}
+
+func TestNestedImport(t *testing.T) {
+ for i, tc := range []struct {
+ input string
+ errorFunc func(err error) bool
+ }{
+ {
+ input: `(t1) {
+ respond {args[0]} {args[1]}
+ }
+
+ (t2) {
+ import t1 {args[0]} 202
+ }
+
+ :8080 {
+ handle {
+ import t2 "foobar"
+ }
+ }`,
+ errorFunc: func(err error) bool {
+ return err == nil
+ },
+ },
+ {
+ input: `(t1) {
+ respond {args[:]}
+ }
+
+ (t2) {
+ import t1 {args[0]} {args[1]}
+ }
+
+ :8080 {
+ handle {
+ import t2 "foobar" 202
+ }
+ }`,
+ errorFunc: func(err error) bool {
+ return err == nil
+ },
+ },
+ {
+ input: `(t1) {
+ respond {args[0]} {args[1]}
+ }
+
+ (t2) {
+ import t1 {args[:]}
+ }
+
+ :8080 {
+ handle {
+ import t2 "foobar" 202
+ }
+ }`,
+ errorFunc: func(err error) bool {
+ return err == nil
+ },
+ },
+ } {
+ adapter := caddyfile.Adapter{
+ ServerType: ServerType{},
+ }
+
+ _, _, err := adapter.Adapt([]byte(tc.input), nil)
+
+ if !tc.errorFunc(err) {
+ t.Errorf("Test %d error expectation failed, got %s", i, err)
+ continue
+ }
+ }
+}
diff --git a/caddyconfig/httpcaddyfile/directives.go b/caddyconfig/httpcaddyfile/directives.go
index a772dba..13229ed 100644
--- a/caddyconfig/httpcaddyfile/directives.go
+++ b/caddyconfig/httpcaddyfile/directives.go
@@ -65,6 +65,7 @@ var directiveOrder = []string{
"templates",
// special routing & dispatching directives
+ "invoke",
"handle",
"handle_path",
"route",
@@ -172,6 +173,7 @@ func (h Helper) Caddyfiles() []string {
for file := range files {
filesSlice = append(filesSlice, file)
}
+ sort.Strings(filesSlice)
return filesSlice
}
@@ -215,7 +217,8 @@ func (h Helper) ExtractMatcherSet() (caddy.ModuleMap, error) {
// NewRoute returns config values relevant to creating a new HTTP route.
func (h Helper) NewRoute(matcherSet caddy.ModuleMap,
- handler caddyhttp.MiddlewareHandler) []ConfigValue {
+ handler caddyhttp.MiddlewareHandler,
+) []ConfigValue {
mod, err := caddy.GetModule(caddy.GetModuleID(handler))
if err != nil {
*h.warnings = append(*h.warnings, caddyconfig.Warning{
@@ -427,26 +430,16 @@ func sortRoutes(routes []ConfigValue) {
jPathLen = len(jPM[0])
}
- // some directives involve setting values which can overwrite
- // each other, so it makes most sense to reverse the order so
- // that the lease specific matcher is first; everything else
- // has most-specific matcher first
- if iDir == "vars" {
+ sortByPath := func() bool {
// we can only confidently compare path lengths if both
// directives have a single path to match (issue #5037)
if iPathLen > 0 && jPathLen > 0 {
- // sort least-specific (shortest) path first
- return iPathLen < jPathLen
- }
+ // if both paths are the same except for a trailing wildcard,
+ // sort by the shorter path first (which is more specific)
+ if strings.TrimSuffix(iPM[0], "*") == strings.TrimSuffix(jPM[0], "*") {
+ return iPathLen < jPathLen
+ }
- // if both directives don't have a single path to compare,
- // sort whichever one has no matcher first; if both have
- // no matcher, sort equally (stable sort preserves order)
- return len(iRoute.MatcherSetsRaw) == 0 && len(jRoute.MatcherSetsRaw) > 0
- } else {
- // we can only confidently compare path lengths if both
- // directives have a single path to match (issue #5037)
- if iPathLen > 0 && jPathLen > 0 {
// sort most-specific (longest) path first
return iPathLen > jPathLen
}
@@ -455,7 +448,18 @@ func sortRoutes(routes []ConfigValue) {
// sort whichever one has a matcher first; if both have
// a matcher, sort equally (stable sort preserves order)
return len(iRoute.MatcherSetsRaw) > 0 && len(jRoute.MatcherSetsRaw) == 0
+ }()
+
+ // some directives involve setting values which can overwrite
+ // each other, so it makes most sense to reverse the order so
+ // that the least-specific matcher is first, allowing the last
+ // matching one to win
+ if iDir == "vars" {
+ return !sortByPath
}
+
+ // everything else is most-specific matcher first
+ return sortByPath
})
}
diff --git a/caddyconfig/httpcaddyfile/httptype.go b/caddyconfig/httpcaddyfile/httptype.go
index 50e98ac..3e8fdca 100644
--- a/caddyconfig/httpcaddyfile/httptype.go
+++ b/caddyconfig/httpcaddyfile/httptype.go
@@ -18,18 +18,19 @@ import (
"encoding/json"
"fmt"
"reflect"
- "regexp"
"sort"
"strconv"
"strings"
+ "go.uber.org/zap"
+ "golang.org/x/exp/slices"
+
"github.com/caddyserver/caddy/v2"
"github.com/caddyserver/caddy/v2/caddyconfig"
"github.com/caddyserver/caddy/v2/caddyconfig/caddyfile"
"github.com/caddyserver/caddy/v2/modules/caddyhttp"
"github.com/caddyserver/caddy/v2/modules/caddypki"
"github.com/caddyserver/caddy/v2/modules/caddytls"
- "go.uber.org/zap"
)
func init() {
@@ -48,12 +49,13 @@ type App struct {
}
// ServerType can set up a config from an HTTP Caddyfile.
-type ServerType struct {
-}
+type ServerType struct{}
// Setup makes a config from the tokens.
-func (st ServerType) Setup(inputServerBlocks []caddyfile.ServerBlock,
- options map[string]any) (*caddy.Config, []caddyconfig.Warning, error) {
+func (st ServerType) Setup(
+ inputServerBlocks []caddyfile.ServerBlock,
+ options map[string]any,
+) (*caddy.Config, []caddyconfig.Warning, error) {
var warnings []caddyconfig.Warning
gc := counter{new(int)}
state := make(map[string]any)
@@ -79,41 +81,18 @@ func (st ServerType) Setup(inputServerBlocks []caddyfile.ServerBlock,
return nil, warnings, err
}
- // replace shorthand placeholders (which are convenient
- // when writing a Caddyfile) with their actual placeholder
- // identifiers or variable names
- replacer := strings.NewReplacer(placeholderShorthands()...)
-
- // these are placeholders that allow a user-defined final
- // parameters, but we still want to provide a shorthand
- // for those, so we use a regexp to replace
- regexpReplacements := []struct {
- search *regexp.Regexp
- replace string
- }{
- {regexp.MustCompile(`{header\.([\w-]*)}`), "{http.request.header.$1}"},
- {regexp.MustCompile(`{cookie\.([\w-]*)}`), "{http.request.cookie.$1}"},
- {regexp.MustCompile(`{labels\.([\w-]*)}`), "{http.request.host.labels.$1}"},
- {regexp.MustCompile(`{path\.([\w-]*)}`), "{http.request.uri.path.$1}"},
- {regexp.MustCompile(`{file\.([\w-]*)}`), "{http.request.uri.path.file.$1}"},
- {regexp.MustCompile(`{query\.([\w-]*)}`), "{http.request.uri.query.$1}"},
- {regexp.MustCompile(`{re\.([\w-]*)\.([\w-]*)}`), "{http.regexp.$1.$2}"},
- {regexp.MustCompile(`{vars\.([\w-]*)}`), "{http.vars.$1}"},
- {regexp.MustCompile(`{rp\.([\w-\.]*)}`), "{http.reverse_proxy.$1}"},
- {regexp.MustCompile(`{err\.([\w-\.]*)}`), "{http.error.$1}"},
- {regexp.MustCompile(`{file_match\.([\w-]*)}`), "{http.matchers.file.$1}"},
+ // this will replace both static and user-defined placeholder shorthands
+ // with actual identifiers used by Caddy
+ replacer := NewShorthandReplacer()
+
+ originalServerBlocks, err = st.extractNamedRoutes(originalServerBlocks, options, &warnings, replacer)
+ if err != nil {
+ return nil, warnings, err
}
for _, sb := range originalServerBlocks {
- for _, segment := range sb.block.Segments {
- for i := 0; i < len(segment); i++ {
- // simple string replacements
- segment[i].Text = replacer.Replace(segment[i].Text)
- // complex regexp replacements
- for _, r := range regexpReplacements {
- segment[i].Text = r.search.ReplaceAllString(segment[i].Text, r.replace)
- }
- }
+ for i := range sb.block.Segments {
+ replacer.ApplyToSegment(&sb.block.Segments[i])
}
if len(sb.block.Keys) == 0 {
@@ -172,6 +151,18 @@ func (st ServerType) Setup(inputServerBlocks []caddyfile.ServerBlock,
result.directive = dir
sb.pile[result.Class] = append(sb.pile[result.Class], result)
}
+
+ // specially handle named routes that were pulled out from
+ // the invoke directive, which could be nested anywhere within
+ // some subroutes in this directive; we add them to the pile
+ // for this server block
+ if state[namedRouteKey] != nil {
+ for name := range state[namedRouteKey].(map[string]struct{}) {
+ result := ConfigValue{Class: namedRouteKey, Value: name}
+ sb.pile[result.Class] = append(sb.pile[result.Class], result)
+ }
+ state[namedRouteKey] = nil
+ }
}
}
@@ -222,7 +213,7 @@ func (st ServerType) Setup(inputServerBlocks []caddyfile.ServerBlock,
if ncl.name == caddy.DefaultLoggerName {
hasDefaultLog = true
}
- if _, ok := options["debug"]; ok && ncl.log.Level == "" {
+ if _, ok := options["debug"]; ok && ncl.log != nil && ncl.log.Level == "" {
ncl.log.Level = zap.DebugLevel.CapitalString()
}
customLogs = append(customLogs, ncl)
@@ -241,7 +232,9 @@ func (st ServerType) Setup(inputServerBlocks []caddyfile.ServerBlock,
if _, ok := options["debug"]; ok {
customLogs = append(customLogs, namedCustomLog{
name: caddy.DefaultLoggerName,
- log: &caddy.CustomLog{Level: zap.DebugLevel.CapitalString()},
+ log: &caddy.CustomLog{
+ BaseLog: caddy.BaseLog{Level: zap.DebugLevel.CapitalString()},
+ },
})
}
}
@@ -303,7 +296,21 @@ func (st ServerType) Setup(inputServerBlocks []caddyfile.ServerBlock,
Logs: make(map[string]*caddy.CustomLog),
}
}
+
+ // Add the default log first if defined, so that it doesn't
+ // accidentally get re-created below due to the Exclude logic
for _, ncl := range customLogs {
+ if ncl.name == caddy.DefaultLoggerName && ncl.log != nil {
+ cfg.Logging.Logs[caddy.DefaultLoggerName] = ncl.log
+ break
+ }
+ }
+
+ // Add the rest of the custom logs
+ for _, ncl := range customLogs {
+ if ncl.log == nil || ncl.name == caddy.DefaultLoggerName {
+ continue
+ }
if ncl.name != "" {
cfg.Logging.Logs[ncl.name] = ncl.log
}
@@ -317,8 +324,16 @@ func (st ServerType) Setup(inputServerBlocks []caddyfile.ServerBlock,
cfg.Logging.Logs[caddy.DefaultLoggerName] = defaultLog
}
defaultLog.Exclude = append(defaultLog.Exclude, ncl.log.Include...)
+
+ // avoid duplicates by sorting + compacting
+ sort.Strings(defaultLog.Exclude)
+ defaultLog.Exclude = slices.Compact[[]string, string](defaultLog.Exclude)
}
}
+ // we may have not actually added anything, so remove if empty
+ if len(cfg.Logging.Logs) == 0 {
+ cfg.Logging = nil
+ }
}
return cfg, warnings, nil
@@ -401,6 +416,81 @@ func (ServerType) evaluateGlobalOptionsBlock(serverBlocks []serverBlock, options
return serverBlocks[1:], nil
}
+// extractNamedRoutes pulls out any named route server blocks
+// so they don't get parsed as sites, and stores them in options
+// for later.
+func (ServerType) extractNamedRoutes(
+ serverBlocks []serverBlock,
+ options map[string]any,
+ warnings *[]caddyconfig.Warning,
+ replacer ShorthandReplacer,
+) ([]serverBlock, error) {
+ namedRoutes := map[string]*caddyhttp.Route{}
+
+ gc := counter{new(int)}
+ state := make(map[string]any)
+
+ // copy the server blocks so we can
+ // splice out the named route ones
+ filtered := append([]serverBlock{}, serverBlocks...)
+ index := -1
+
+ for _, sb := range serverBlocks {
+ index++
+ if !sb.block.IsNamedRoute {
+ continue
+ }
+
+ // splice out this block, because we know it's not a real server
+ filtered = append(filtered[:index], filtered[index+1:]...)
+ index--
+
+ if len(sb.block.Segments) == 0 {
+ continue
+ }
+
+ wholeSegment := caddyfile.Segment{}
+ for i := range sb.block.Segments {
+ // replace user-defined placeholder shorthands in extracted named routes
+ replacer.ApplyToSegment(&sb.block.Segments[i])
+
+ // zip up all the segments since ParseSegmentAsSubroute
+ // was designed to take a directive+
+ wholeSegment = append(wholeSegment, sb.block.Segments[i]...)
+ }
+
+ h := Helper{
+ Dispenser: caddyfile.NewDispenser(wholeSegment),
+ options: options,
+ warnings: warnings,
+ matcherDefs: nil,
+ parentBlock: sb.block,
+ groupCounter: gc,
+ State: state,
+ }
+
+ handler, err := ParseSegmentAsSubroute(h)
+ if err != nil {
+ return nil, err
+ }
+ subroute := handler.(*caddyhttp.Subroute)
+ route := caddyhttp.Route{}
+
+ if len(subroute.Routes) == 1 && len(subroute.Routes[0].MatcherSetsRaw) == 0 {
+ // if there's only one route with no matcher, then we can simplify
+ route.HandlersRaw = append(route.HandlersRaw, subroute.Routes[0].HandlersRaw[0])
+ } else {
+ // otherwise we need the whole subroute
+ route.HandlersRaw = []json.RawMessage{caddyconfig.JSONModuleObject(handler, "handler", subroute.CaddyModule().ID.Name(), h.warnings)}
+ }
+
+ namedRoutes[sb.block.Keys[0]] = &route
+ }
+ options["named_routes"] = namedRoutes
+
+ return filtered, nil
+}
+
// serversFromPairings creates the servers for each pairing of addresses
// to server blocks. Each pairing is essentially a server definition.
func (st *ServerType) serversFromPairings(
@@ -411,6 +501,7 @@ func (st *ServerType) serversFromPairings(
) (map[string]*caddyhttp.Server, error) {
servers := make(map[string]*caddyhttp.Server)
defaultSNI := tryString(options["default_sni"], warnings)
+ fallbackSNI := tryString(options["fallback_sni"], warnings)
httpPort := strconv.Itoa(caddyhttp.DefaultHTTPPort)
if hp, ok := options["http_port"].(int); ok {
@@ -539,6 +630,24 @@ func (st *ServerType) serversFromPairings(
}
}
+ // add named routes to the server if 'invoke' was used inside of it
+ configuredNamedRoutes := options["named_routes"].(map[string]*caddyhttp.Route)
+ for _, sblock := range p.serverBlocks {
+ if len(sblock.pile[namedRouteKey]) == 0 {
+ continue
+ }
+ for _, value := range sblock.pile[namedRouteKey] {
+ if srv.NamedRoutes == nil {
+ srv.NamedRoutes = map[string]*caddyhttp.Route{}
+ }
+ name := value.Value.(string)
+ if configuredNamedRoutes[name] == nil {
+ return nil, fmt.Errorf("cannot invoke named route '%s', which was not defined", name)
+ }
+ srv.NamedRoutes[name] = configuredNamedRoutes[name]
+ }
+ }
+
// create a subroute for each site in the server block
for _, sblock := range p.serverBlocks {
matcherSetsEnc, err := st.compileEncodedMatcherSets(sblock)
@@ -568,14 +677,21 @@ func (st *ServerType) serversFromPairings(
cp.DefaultSNI = defaultSNI
break
}
+ if h == fallbackSNI {
+ hosts = append(hosts, "")
+ cp.FallbackSNI = fallbackSNI
+ break
+ }
}
if len(hosts) > 0 {
+ slices.Sort(hosts) // for deterministic JSON output
cp.MatchersRaw = caddy.ModuleMap{
"sni": caddyconfig.JSON(hosts, warnings), // make sure to match all hosts, not just auto-HTTPS-qualified ones
}
} else {
cp.DefaultSNI = defaultSNI
+ cp.FallbackSNI = fallbackSNI
}
// only append this policy if it actually changes something
@@ -601,10 +717,20 @@ func (st *ServerType) serversFromPairings(
}
}
+ // If TLS is specified as directive, it will also result in 1 or more connection policy being created
+ // Thus, catch-all address with non-standard port, e.g. :8443, can have TLS enabled without
+ // specifying prefix "https://"
+ // Second part of the condition is to allow creating TLS conn policy even though `auto_https` has been disabled
+ // ensuring compatibility with behavior described in below link
+ // https://caddy.community/t/making-sense-of-auto-https-and-why-disabling-it-still-serves-https-instead-of-http/9761
+ createdTLSConnPolicies, ok := sblock.pile["tls.connection_policy"]
+ hasTLSEnabled := (ok && len(createdTLSConnPolicies) > 0) ||
+ (addr.Host != "" && srv.AutoHTTPS != nil && !sliceContains(srv.AutoHTTPS.Skip, addr.Host))
+
// we'll need to remember if the address qualifies for auto-HTTPS, so we
// can add a TLS conn policy if necessary
if addr.Scheme == "https" ||
- (addr.Scheme != "http" && addr.Host != "" && addr.Port != httpPort) {
+ (addr.Scheme != "http" && addr.Port != httpPort && hasTLSEnabled) {
addressQualifiesForTLS = true
}
// predict whether auto-HTTPS will add the conn policy for us; if so, we
@@ -653,12 +779,20 @@ func (st *ServerType) serversFromPairings(
sblockLogHosts := sblock.hostsFromKeys(true)
for _, cval := range sblock.pile["custom_log"] {
ncl := cval.Value.(namedCustomLog)
- if sblock.hasHostCatchAllKey() {
+ if sblock.hasHostCatchAllKey() && len(ncl.hostnames) == 0 {
// all requests for hosts not able to be listed should use
// this log because it's a catch-all-hosts server block
srv.Logs.DefaultLoggerName = ncl.name
+ } else if len(ncl.hostnames) > 0 {
+ // if the logger overrides the hostnames, map that to the logger name
+ for _, h := range ncl.hostnames {
+ if srv.Logs.LoggerNames == nil {
+ srv.Logs.LoggerNames = make(map[string]string)
+ }
+ srv.Logs.LoggerNames[h] = ncl.name
+ }
} else {
- // map each host to the user's desired logger name
+ // otherwise, map each host to the logger name
for _, h := range sblockLogHosts {
if srv.Logs.LoggerNames == nil {
srv.Logs.LoggerNames = make(map[string]string)
@@ -701,8 +835,8 @@ func (st *ServerType) serversFromPairings(
// policy missing for any HTTPS-enabled hosts, if so, add it... maybe?
if addressQualifiesForTLS &&
!hasCatchAllTLSConnPolicy &&
- (len(srv.TLSConnPolicies) > 0 || !autoHTTPSWillAddConnPolicy || defaultSNI != "") {
- srv.TLSConnPolicies = append(srv.TLSConnPolicies, &caddytls.ConnectionPolicy{DefaultSNI: defaultSNI})
+ (len(srv.TLSConnPolicies) > 0 || !autoHTTPSWillAddConnPolicy || defaultSNI != "" || fallbackSNI != "") {
+ srv.TLSConnPolicies = append(srv.TLSConnPolicies, &caddytls.ConnectionPolicy{DefaultSNI: defaultSNI, FallbackSNI: fallbackSNI})
}
// tidy things up a bit
@@ -911,8 +1045,8 @@ func appendSubrouteToRouteList(routeList caddyhttp.RouteList,
subroute *caddyhttp.Subroute,
matcherSetsEnc []caddy.ModuleMap,
p sbAddrAssociation,
- warnings *[]caddyconfig.Warning) caddyhttp.RouteList {
-
+ warnings *[]caddyconfig.Warning,
+) caddyhttp.RouteList {
// nothing to do if... there's nothing to do
if len(matcherSetsEnc) == 0 && len(subroute.Routes) == 0 && subroute.Errors == nil {
return routeList
@@ -974,7 +1108,7 @@ func buildSubroute(routes []ConfigValue, groupCounter counter, needsSorting bool
if needsSorting {
for _, val := range routes {
if !directiveIsOrdered(val.directive) {
- return nil, fmt.Errorf("directive '%s' is not an ordered HTTP handler, so it cannot be used here", val.directive)
+ return nil, fmt.Errorf("directive '%s' is not an ordered HTTP handler, so it cannot be used here - try placing within a route block or using the order global option", val.directive)
}
}
@@ -1301,36 +1435,6 @@ func encodeMatcherSet(matchers map[string]caddyhttp.RequestMatcher) (caddy.Modul
return msEncoded, nil
}
-// placeholderShorthands returns a slice of old-new string pairs,
-// where the left of the pair is a placeholder shorthand that may
-// be used in the Caddyfile, and the right is the replacement.
-func placeholderShorthands() []string {
- return []string{
- "{dir}", "{http.request.uri.path.dir}",
- "{file}", "{http.request.uri.path.file}",
- "{host}", "{http.request.host}",
- "{hostport}", "{http.request.hostport}",
- "{port}", "{http.request.port}",
- "{method}", "{http.request.method}",
- "{path}", "{http.request.uri.path}",
- "{query}", "{http.request.uri.query}",
- "{remote}", "{http.request.remote}",
- "{remote_host}", "{http.request.remote.host}",
- "{remote_port}", "{http.request.remote.port}",
- "{scheme}", "{http.request.scheme}",
- "{uri}", "{http.request.uri}",
- "{tls_cipher}", "{http.request.tls.cipher_suite}",
- "{tls_version}", "{http.request.tls.version}",
- "{tls_client_fingerprint}", "{http.request.tls.client.fingerprint}",
- "{tls_client_issuer}", "{http.request.tls.client.issuer}",
- "{tls_client_serial}", "{http.request.tls.client.serial}",
- "{tls_client_subject}", "{http.request.tls.client.subject}",
- "{tls_client_certificate_pem}", "{http.request.tls.client.certificate_pem}",
- "{tls_client_certificate_der_base64}", "{http.request.tls.client.certificate_der_base64}",
- "{upstream_hostport}", "{http.reverse_proxy.upstream.hostport}",
- }
-}
-
// WasReplacedPlaceholderShorthand checks if a token string was
// likely a replaced shorthand of the known Caddyfile placeholder
// replacement outputs. Useful to prevent some user-defined map
@@ -1446,8 +1550,9 @@ func (c counter) nextGroup() string {
}
type namedCustomLog struct {
- name string
- log *caddy.CustomLog
+ name string
+ hostnames []string
+ log *caddy.CustomLog
}
// sbAddrAssociation is a mapping from a list of
@@ -1458,7 +1563,10 @@ type sbAddrAssociation struct {
serverBlocks []serverBlock
}
-const matcherPrefix = "@"
+const (
+ matcherPrefix = "@"
+ namedRouteKey = "named_route"
+)
// Interface guard
var _ caddyfile.ServerType = (*ServerType)(nil)
diff --git a/caddyconfig/httpcaddyfile/options.go b/caddyconfig/httpcaddyfile/options.go
index 4e5212b..ba1896b 100644
--- a/caddyconfig/httpcaddyfile/options.go
+++ b/caddyconfig/httpcaddyfile/options.go
@@ -17,12 +17,13 @@ package httpcaddyfile
import (
"strconv"
+ "github.com/caddyserver/certmagic"
+ "github.com/mholt/acmez/acme"
+
"github.com/caddyserver/caddy/v2"
"github.com/caddyserver/caddy/v2/caddyconfig"
"github.com/caddyserver/caddy/v2/caddyconfig/caddyfile"
"github.com/caddyserver/caddy/v2/modules/caddytls"
- "github.com/caddyserver/certmagic"
- "github.com/mholt/acmez/acme"
)
func init() {
@@ -33,6 +34,7 @@ func init() {
RegisterGlobalOption("grace_period", parseOptDuration)
RegisterGlobalOption("shutdown_delay", parseOptDuration)
RegisterGlobalOption("default_sni", parseOptSingleString)
+ RegisterGlobalOption("fallback_sni", parseOptSingleString)
RegisterGlobalOption("order", parseOptOrder)
RegisterGlobalOption("storage", parseOptStorage)
RegisterGlobalOption("storage_clean_interval", parseOptDuration)
diff --git a/caddyconfig/httpcaddyfile/pkiapp.go b/caddyconfig/httpcaddyfile/pkiapp.go
index 3414636..b5c6821 100644
--- a/caddyconfig/httpcaddyfile/pkiapp.go
+++ b/caddyconfig/httpcaddyfile/pkiapp.go
@@ -174,7 +174,6 @@ func (st ServerType) buildPKIApp(
options map[string]any,
warnings []caddyconfig.Warning,
) (*caddypki.PKI, []caddyconfig.Warning, error) {
-
skipInstallTrust := false
if _, ok := options["skip_install_trust"]; ok {
skipInstallTrust = true
diff --git a/caddyconfig/httpcaddyfile/serveroptions.go b/caddyconfig/httpcaddyfile/serveroptions.go
index eb57c58..6d7c678 100644
--- a/caddyconfig/httpcaddyfile/serveroptions.go
+++ b/caddyconfig/httpcaddyfile/serveroptions.go
@@ -18,11 +18,12 @@ import (
"encoding/json"
"fmt"
+ "github.com/dustin/go-humanize"
+
"github.com/caddyserver/caddy/v2"
"github.com/caddyserver/caddy/v2/caddyconfig"
"github.com/caddyserver/caddy/v2/caddyconfig/caddyfile"
"github.com/caddyserver/caddy/v2/modules/caddyhttp"
- "github.com/dustin/go-humanize"
)
// serverOptions collects server config overrides parsed from Caddyfile global options
@@ -41,9 +42,11 @@ type serverOptions struct {
IdleTimeout caddy.Duration
KeepAliveInterval caddy.Duration
MaxHeaderBytes int
+ EnableFullDuplex bool
Protocols []string
StrictSNIHost *bool
TrustedProxiesRaw json.RawMessage
+ ClientIPHeaders []string
ShouldLogCredentials bool
Metrics *caddyhttp.Metrics
}
@@ -156,6 +159,12 @@ func unmarshalCaddyfileServerOptions(d *caddyfile.Dispenser) (any, error) {
}
serverOpts.MaxHeaderBytes = int(size)
+ case "enable_full_duplex":
+ if d.NextArg() {
+ return nil, d.ArgErr()
+ }
+ serverOpts.EnableFullDuplex = true
+
case "log_credentials":
if d.NextArg() {
return nil, d.ArgErr()
@@ -208,6 +217,18 @@ func unmarshalCaddyfileServerOptions(d *caddyfile.Dispenser) (any, error) {
)
serverOpts.TrustedProxiesRaw = jsonSource
+ case "client_ip_headers":
+ headers := d.RemainingArgs()
+ for _, header := range headers {
+ if sliceContains(serverOpts.ClientIPHeaders, header) {
+ return nil, d.Errf("client IP header %s specified more than once", header)
+ }
+ serverOpts.ClientIPHeaders = append(serverOpts.ClientIPHeaders, header)
+ }
+ if nesting := d.Nesting(); d.NextBlock(nesting) {
+ return nil, d.ArgErr()
+ }
+
case "metrics":
if d.NextArg() {
return nil, d.ArgErr()
@@ -314,9 +335,11 @@ func applyServerOptions(
server.IdleTimeout = opts.IdleTimeout
server.KeepAliveInterval = opts.KeepAliveInterval
server.MaxHeaderBytes = opts.MaxHeaderBytes
+ server.EnableFullDuplex = opts.EnableFullDuplex
server.Protocols = opts.Protocols
server.StrictSNIHost = opts.StrictSNIHost
server.TrustedProxiesRaw = opts.TrustedProxiesRaw
+ server.ClientIPHeaders = opts.ClientIPHeaders
server.Metrics = opts.Metrics
if opts.ShouldLogCredentials {
if server.Logs == nil {
diff --git a/caddyconfig/httpcaddyfile/shorthands.go b/caddyconfig/httpcaddyfile/shorthands.go
new file mode 100644
index 0000000..102bc36
--- /dev/null
+++ b/caddyconfig/httpcaddyfile/shorthands.go
@@ -0,0 +1,92 @@
+package httpcaddyfile
+
+import (
+ "regexp"
+ "strings"
+
+ "github.com/caddyserver/caddy/v2/caddyconfig/caddyfile"
+)
+
+type ComplexShorthandReplacer struct {
+ search *regexp.Regexp
+ replace string
+}
+
+type ShorthandReplacer struct {
+ complex []ComplexShorthandReplacer
+ simple *strings.Replacer
+}
+
+func NewShorthandReplacer() ShorthandReplacer {
+ // replace shorthand placeholders (which are convenient
+ // when writing a Caddyfile) with their actual placeholder
+ // identifiers or variable names
+ replacer := strings.NewReplacer(placeholderShorthands()...)
+
+ // these are placeholders that allow a user-defined final
+ // parameters, but we still want to provide a shorthand
+ // for those, so we use a regexp to replace
+ regexpReplacements := []ComplexShorthandReplacer{
+ {regexp.MustCompile(`{header\.([\w-]*)}`), "{http.request.header.$1}"},
+ {regexp.MustCompile(`{cookie\.([\w-]*)}`), "{http.request.cookie.$1}"},
+ {regexp.MustCompile(`{labels\.([\w-]*)}`), "{http.request.host.labels.$1}"},
+ {regexp.MustCompile(`{path\.([\w-]*)}`), "{http.request.uri.path.$1}"},
+ {regexp.MustCompile(`{file\.([\w-]*)}`), "{http.request.uri.path.file.$1}"},
+ {regexp.MustCompile(`{query\.([\w-]*)}`), "{http.request.uri.query.$1}"},
+ {regexp.MustCompile(`{re\.([\w-]*)\.([\w-]*)}`), "{http.regexp.$1.$2}"},
+ {regexp.MustCompile(`{vars\.([\w-]*)}`), "{http.vars.$1}"},
+ {regexp.MustCompile(`{rp\.([\w-\.]*)}`), "{http.reverse_proxy.$1}"},
+ {regexp.MustCompile(`{err\.([\w-\.]*)}`), "{http.error.$1}"},
+ {regexp.MustCompile(`{file_match\.([\w-]*)}`), "{http.matchers.file.$1}"},
+ }
+
+ return ShorthandReplacer{
+ complex: regexpReplacements,
+ simple: replacer,
+ }
+}
+
+// placeholderShorthands returns a slice of old-new string pairs,
+// where the left of the pair is a placeholder shorthand that may
+// be used in the Caddyfile, and the right is the replacement.
+func placeholderShorthands() []string {
+ return []string{
+ "{dir}", "{http.request.uri.path.dir}",
+ "{file}", "{http.request.uri.path.file}",
+ "{host}", "{http.request.host}",
+ "{hostport}", "{http.request.hostport}",
+ "{port}", "{http.request.port}",
+ "{method}", "{http.request.method}",
+ "{path}", "{http.request.uri.path}",
+ "{query}", "{http.request.uri.query}",
+ "{remote}", "{http.request.remote}",
+ "{remote_host}", "{http.request.remote.host}",
+ "{remote_port}", "{http.request.remote.port}",
+ "{scheme}", "{http.request.scheme}",
+ "{uri}", "{http.request.uri}",
+ "{tls_cipher}", "{http.request.tls.cipher_suite}",
+ "{tls_version}", "{http.request.tls.version}",
+ "{tls_client_fingerprint}", "{http.request.tls.client.fingerprint}",
+ "{tls_client_issuer}", "{http.request.tls.client.issuer}",
+ "{tls_client_serial}", "{http.request.tls.client.serial}",
+ "{tls_client_subject}", "{http.request.tls.client.subject}",
+ "{tls_client_certificate_pem}", "{http.request.tls.client.certificate_pem}",
+ "{tls_client_certificate_der_base64}", "{http.request.tls.client.certificate_der_base64}",
+ "{upstream_hostport}", "{http.reverse_proxy.upstream.hostport}",
+ "{client_ip}", "{http.vars.client_ip}",
+ }
+}
+
+// ApplyToSegment replaces shorthand placeholder to its full placeholder, understandable by Caddy.
+func (s ShorthandReplacer) ApplyToSegment(segment *caddyfile.Segment) {
+ if segment != nil {
+ for i := 0; i < len(*segment); i++ {
+ // simple string replacements
+ (*segment)[i].Text = s.simple.Replace((*segment)[i].Text)
+ // complex regexp replacements
+ for _, r := range s.complex {
+ (*segment)[i].Text = r.search.ReplaceAllString((*segment)[i].Text, r.replace)
+ }
+ }
+ }
+}
diff --git a/caddyconfig/httpcaddyfile/testdata/import_variadic.txt b/caddyconfig/httpcaddyfile/testdata/import_variadic.txt
new file mode 100644
index 0000000..f1e50e0
--- /dev/null
+++ b/caddyconfig/httpcaddyfile/testdata/import_variadic.txt
@@ -0,0 +1,9 @@
+(t2) {
+ respond 200 {
+ body {args[:]}
+ }
+}
+
+:8082 {
+ import t2 false
+} \ No newline at end of file
diff --git a/caddyconfig/httpcaddyfile/testdata/import_variadic_snippet.txt b/caddyconfig/httpcaddyfile/testdata/import_variadic_snippet.txt
new file mode 100644
index 0000000..a02fcf9
--- /dev/null
+++ b/caddyconfig/httpcaddyfile/testdata/import_variadic_snippet.txt
@@ -0,0 +1,9 @@
+(t1) {
+ respond 200 {
+ body {args[:]}
+ }
+}
+
+:8081 {
+ import t1 false
+} \ No newline at end of file
diff --git a/caddyconfig/httpcaddyfile/testdata/import_variadic_with_import.txt b/caddyconfig/httpcaddyfile/testdata/import_variadic_with_import.txt
new file mode 100644
index 0000000..ab1b32d
--- /dev/null
+++ b/caddyconfig/httpcaddyfile/testdata/import_variadic_with_import.txt
@@ -0,0 +1,15 @@
+(t1) {
+ respond 200 {
+ body {args[:]}
+ }
+}
+
+:8081 {
+ import t1 false
+}
+
+import import_variadic.txt
+
+:8083 {
+ import t2 true
+} \ No newline at end of file
diff --git a/caddyconfig/httpcaddyfile/tlsapp.go b/caddyconfig/httpcaddyfile/tlsapp.go
index 3d32b4f..927f225 100644
--- a/caddyconfig/httpcaddyfile/tlsapp.go
+++ b/caddyconfig/httpcaddyfile/tlsapp.go
@@ -23,12 +23,13 @@ import (
"strconv"
"strings"
+ "github.com/caddyserver/certmagic"
+ "github.com/mholt/acmez/acme"
+
"github.com/caddyserver/caddy/v2"
"github.com/caddyserver/caddy/v2/caddyconfig"
"github.com/caddyserver/caddy/v2/modules/caddyhttp"
"github.com/caddyserver/caddy/v2/modules/caddytls"
- "github.com/caddyserver/certmagic"
- "github.com/mholt/acmez/acme"
)
func (st ServerType) buildTLSApp(
@@ -36,7 +37,6 @@ func (st ServerType) buildTLSApp(
options map[string]any,
warnings []caddyconfig.Warning,
) (*caddytls.TLS, []caddyconfig.Warning, error) {
-
tlsApp := &caddytls.TLS{CertificatesRaw: make(caddy.ModuleMap)}
var certLoaders []caddytls.CertificateLoader
@@ -206,8 +206,8 @@ func (st ServerType) buildTLSApp(
}
// associate our new automation policy with this server block's hosts
- ap.Subjects = sblock.hostsFromKeysNotHTTP(httpPort)
- sort.Strings(ap.Subjects) // solely for deterministic test results
+ ap.SubjectsRaw = sblock.hostsFromKeysNotHTTP(httpPort)
+ sort.Strings(ap.SubjectsRaw) // solely for deterministic test results
// if a combination of public and internal names were given
// for this same server block and no issuer was specified, we
@@ -217,7 +217,11 @@ func (st ServerType) buildTLSApp(
var ap2 *caddytls.AutomationPolicy
if len(ap.Issuers) == 0 {
var internal, external []string
- for _, s := range ap.Subjects {
+ for _, s := range ap.SubjectsRaw {
+ // do not create Issuers for Tailscale domains; they will be given a Manager instead
+ if strings.HasSuffix(strings.ToLower(s), ".ts.net") {
+ continue
+ }
if !certmagic.SubjectQualifiesForCert(s) {
return nil, warnings, fmt.Errorf("subject does not qualify for certificate: '%s'", s)
}
@@ -235,10 +239,10 @@ func (st ServerType) buildTLSApp(
}
}
if len(external) > 0 && len(internal) > 0 {
- ap.Subjects = external
+ ap.SubjectsRaw = external
apCopy := *ap
ap2 = &apCopy
- ap2.Subjects = internal
+ ap2.SubjectsRaw = internal
ap2.IssuersRaw = []json.RawMessage{caddyconfig.JSONModuleObject(caddytls.InternalIssuer{}, "module", "internal", &warnings)}
}
}
@@ -339,14 +343,14 @@ func (st ServerType) buildTLSApp(
for h := range httpsHostsSharedWithHostlessKey {
al = append(al, h)
if !certmagic.SubjectQualifiesForPublicCert(h) {
- internalAP.Subjects = append(internalAP.Subjects, h)
+ internalAP.SubjectsRaw = append(internalAP.SubjectsRaw, h)
}
}
}
if len(al) > 0 {
tlsApp.CertificatesRaw["automate"] = caddyconfig.JSON(al, &warnings)
}
- if len(internalAP.Subjects) > 0 {
+ if len(internalAP.SubjectsRaw) > 0 {
if tlsApp.Automation == nil {
tlsApp.Automation = new(caddytls.AutomationConfig)
}
@@ -412,7 +416,7 @@ func (st ServerType) buildTLSApp(
// for convenience)
automationHostSet := make(map[string]struct{})
for _, ap := range tlsApp.Automation.Policies {
- for _, s := range ap.Subjects {
+ for _, s := range ap.SubjectsRaw {
if _, ok := automationHostSet[s]; ok {
return nil, warnings, fmt.Errorf("hostname appears in more than one automation policy, making certificate management ambiguous: %s", s)
}
@@ -533,7 +537,7 @@ func consolidateAutomationPolicies(aps []*caddytls.AutomationPolicy) []*caddytls
if automationPolicyIsSubset(aps[j], aps[i]) {
return false
}
- return len(aps[i].Subjects) > len(aps[j].Subjects)
+ return len(aps[i].SubjectsRaw) > len(aps[j].SubjectsRaw)
})
emptyAPCount := 0
@@ -541,7 +545,7 @@ func consolidateAutomationPolicies(aps []*caddytls.AutomationPolicy) []*caddytls
// compute the number of empty policies (disregarding subjects) - see #4128
emptyAP := new(caddytls.AutomationPolicy)
for i := 0; i < len(aps); i++ {
- emptyAP.Subjects = aps[i].Subjects
+ emptyAP.SubjectsRaw = aps[i].SubjectsRaw
if reflect.DeepEqual(aps[i], emptyAP) {
emptyAPCount++
if !automationPolicyHasAllPublicNames(aps[i]) {
@@ -583,7 +587,7 @@ outer:
aps[i].KeyType == aps[j].KeyType &&
aps[i].OnDemand == aps[j].OnDemand &&
aps[i].RenewalWindowRatio == aps[j].RenewalWindowRatio {
- if len(aps[i].Subjects) > 0 && len(aps[j].Subjects) == 0 {
+ if len(aps[i].SubjectsRaw) > 0 && len(aps[j].SubjectsRaw) == 0 {
// later policy (at j) has no subjects ("catch-all"), so we can
// remove the identical-but-more-specific policy that comes first
// AS LONG AS it is not shadowed by another policy before it; e.g.
@@ -598,9 +602,9 @@ outer:
}
} else {
// avoid repeated subjects
- for _, subj := range aps[j].Subjects {
- if !sliceContains(aps[i].Subjects, subj) {
- aps[i].Subjects = append(aps[i].Subjects, subj)
+ for _, subj := range aps[j].SubjectsRaw {
+ if !sliceContains(aps[i].SubjectsRaw, subj) {
+ aps[i].SubjectsRaw = append(aps[i].SubjectsRaw, subj)
}
}
aps = append(aps[:j], aps[j+1:]...)
@@ -616,15 +620,15 @@ outer:
// automationPolicyIsSubset returns true if a's subjects are a subset
// of b's subjects.
func automationPolicyIsSubset(a, b *caddytls.AutomationPolicy) bool {
- if len(b.Subjects) == 0 {
+ if len(b.SubjectsRaw) == 0 {
return true
}
- if len(a.Subjects) == 0 {
+ if len(a.SubjectsRaw) == 0 {
return false
}
- for _, aSubj := range a.Subjects {
+ for _, aSubj := range a.SubjectsRaw {
var inSuperset bool
- for _, bSubj := range b.Subjects {
+ for _, bSubj := range b.SubjectsRaw {
if certmagic.MatchWildcard(aSubj, bSubj) {
inSuperset = true
break
@@ -662,7 +666,7 @@ func subjectQualifiesForPublicCert(ap *caddytls.AutomationPolicy, subj string) b
}
func automationPolicyHasAllPublicNames(ap *caddytls.AutomationPolicy) bool {
- for _, subj := range ap.Subjects {
+ for _, subj := range ap.SubjectsRaw {
if !subjectQualifiesForPublicCert(ap, subj) {
return false
}
diff --git a/caddyconfig/httpcaddyfile/tlsapp_test.go b/caddyconfig/httpcaddyfile/tlsapp_test.go
index 1925e02..d8edbdf 100644
--- a/caddyconfig/httpcaddyfile/tlsapp_test.go
+++ b/caddyconfig/httpcaddyfile/tlsapp_test.go
@@ -47,8 +47,8 @@ func TestAutomationPolicyIsSubset(t *testing.T) {
expect: false,
},
} {
- apA := &caddytls.AutomationPolicy{Subjects: test.a}
- apB := &caddytls.AutomationPolicy{Subjects: test.b}
+ apA := &caddytls.AutomationPolicy{SubjectsRaw: test.a}
+ apB := &caddytls.AutomationPolicy{SubjectsRaw: test.b}
if actual := automationPolicyIsSubset(apA, apB); actual != test.expect {
t.Errorf("Test %d: Expected %t but got %t (A: %v B: %v)", i, test.expect, actual, test.a, test.b)
}
diff --git a/caddyconfig/httploader.go b/caddyconfig/httploader.go
index 7c4dc23..e0ce4eb 100644
--- a/caddyconfig/httploader.go
+++ b/caddyconfig/httploader.go
@@ -30,8 +30,14 @@ func init() {
caddy.RegisterModule(HTTPLoader{})
}
-// HTTPLoader can load Caddy configs over HTTP(S). It can adapt the config
-// based on the Content-Type header of the HTTP response.
+// HTTPLoader can load Caddy configs over HTTP(S).
+//
+// If the response is not a JSON config, a config adapter must be specified
+// either in the loader config (`adapter`), or in the Content-Type HTTP header
+// returned in the HTTP response from the server. The Content-Type header is
+// read just like the admin API's `/load` endpoint. Uf you don't have control
+// over the HTTP server (but can still trust its response), you can override
+// the Content-Type header by setting the `adapter` property in this config.
type HTTPLoader struct {
// The method for the request. Default: GET
Method string `json:"method,omitempty"`
@@ -45,6 +51,11 @@ type HTTPLoader struct {
// Maximum time allowed for a complete connection and request.
Timeout caddy.Duration `json:"timeout,omitempty"`
+ // The name of the config adapter to use, if any. Only needed
+ // if the HTTP response is not a JSON config and if the server's
+ // Content-Type header is missing or incorrect.
+ Adapter string `json:"adapter,omitempty"`
+
TLS *struct {
// Present this instance's managed remote identity credentials to the server.
UseServerIdentity bool `json:"use_server_identity,omitempty"`
@@ -108,7 +119,12 @@ func (hl HTTPLoader) LoadConfig(ctx caddy.Context) ([]byte, error) {
return nil, err
}
- result, warnings, err := adaptByContentType(resp.Header.Get("Content-Type"), body)
+ // adapt the config based on either manually-configured adapter or server's response header
+ ct := resp.Header.Get("Content-Type")
+ if hl.Adapter != "" {
+ ct = "text/" + hl.Adapter
+ }
+ result, warnings, err := adaptByContentType(ct, body)
if err != nil {
return nil, err
}