summaryrefslogtreecommitdiff
path: root/caddyconfig/caddyfile/lexer.go
diff options
context:
space:
mode:
authorFrancis Lavoie <lavofr@gmail.com>2020-07-20 15:55:51 -0400
committerGitHub <noreply@github.com>2020-07-20 13:55:51 -0600
commitfb9d874fa91c796fdb1c6cb32ab3246e63d38af9 (patch)
tree7cfaeb520ebbbae73549f0d33f1a5aca337e518a /caddyconfig/caddyfile/lexer.go
parent6cea1f239d01fc065bc6f4b22d765d89b6db0152 (diff)
caddyfile: Export Tokenize function for lexing (#3549)
Diffstat (limited to 'caddyconfig/caddyfile/lexer.go')
-rwxr-xr-xcaddyconfig/caddyfile/lexer.go19
1 files changed, 19 insertions, 0 deletions
diff --git a/caddyconfig/caddyfile/lexer.go b/caddyconfig/caddyfile/lexer.go
index 568d15c..188ef06 100755
--- a/caddyconfig/caddyfile/lexer.go
+++ b/caddyconfig/caddyfile/lexer.go
@@ -16,6 +16,7 @@ package caddyfile
import (
"bufio"
+ "bytes"
"io"
"unicode"
)
@@ -168,3 +169,21 @@ func (l *lexer) next() bool {
val = append(val, ch)
}
}
+
+// Tokenize takes bytes as input and lexes it into
+// a list of tokens that can be parsed as a Caddyfile.
+// Also takes a filename to fill the token's File as
+// the source of the tokens, which is important to
+// determine relative paths for `import` directives.
+func Tokenize(input []byte, filename string) ([]Token, error) {
+ l := lexer{}
+ if err := l.load(bytes.NewReader(input)); err != nil {
+ return nil, err
+ }
+ var tokens []Token
+ for l.next() {
+ l.token.File = filename
+ tokens = append(tokens, l.token)
+ }
+ return tokens, nil
+}