summaryrefslogtreecommitdiff
path: root/caddyconfig/caddyfile/lexer.go
diff options
context:
space:
mode:
authorMatthew Holt <mholt@users.noreply.github.com>2019-08-09 12:05:47 -0600
committerMatthew Holt <mholt@users.noreply.github.com>2019-08-09 12:05:47 -0600
commitab885f07b844fd60adb9d49ed7884f3cd2d939a7 (patch)
tree8827ad88cf3da8982154e2fda46f53274342785d /caddyconfig/caddyfile/lexer.go
parent4950ce485f7d931890fcfd2ee287b6df1b5db435 (diff)
Implement config adapters and beginning of Caddyfile adapter
Along with several other changes, such as renaming caddyhttp.ServerRoute to caddyhttp.Route, exporting some types that were not exported before, and tweaking the caddytls TLS values to be more consistent. Notably, we also now disable automatic cert management for names which already have a cert (manually) loaded into the cache. These names no longer need to be specified in the "skip_certificates" field of the automatic HTTPS config, because they will be skipped automatically.
Diffstat (limited to 'caddyconfig/caddyfile/lexer.go')
-rwxr-xr-xcaddyconfig/caddyfile/lexer.go150
1 files changed, 150 insertions, 0 deletions
diff --git a/caddyconfig/caddyfile/lexer.go b/caddyconfig/caddyfile/lexer.go
new file mode 100755
index 0000000..efe648d
--- /dev/null
+++ b/caddyconfig/caddyfile/lexer.go
@@ -0,0 +1,150 @@
+// Copyright 2015 Light Code Labs, LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package caddyfile
+
+import (
+ "bufio"
+ "io"
+ "unicode"
+)
+
+type (
+ // lexer is a utility which can get values, token by
+ // token, from a Reader. A token is a word, and tokens
+ // are separated by whitespace. A word can be enclosed
+ // in quotes if it contains whitespace.
+ lexer struct {
+ reader *bufio.Reader
+ token Token
+ line int
+ }
+
+ // Token represents a single parsable unit.
+ Token struct {
+ File string
+ Line int
+ Text string
+ }
+)
+
+// load prepares the lexer to scan an input for tokens.
+// It discards any leading byte order mark.
+func (l *lexer) load(input io.Reader) error {
+ l.reader = bufio.NewReader(input)
+ l.line = 1
+
+ // discard byte order mark, if present
+ firstCh, _, err := l.reader.ReadRune()
+ if err != nil {
+ return err
+ }
+ if firstCh != 0xFEFF {
+ err := l.reader.UnreadRune()
+ if err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
+
+// next loads the next token into the lexer.
+// A token is delimited by whitespace, unless
+// the token starts with a quotes character (")
+// in which case the token goes until the closing
+// quotes (the enclosing quotes are not included).
+// Inside quoted strings, quotes may be escaped
+// with a preceding \ character. No other chars
+// may be escaped. The rest of the line is skipped
+// if a "#" character is read in. Returns true if
+// a token was loaded; false otherwise.
+func (l *lexer) next() bool {
+ var val []rune
+ var comment, quoted, escaped bool
+
+ makeToken := func() bool {
+ l.token.Text = string(val)
+ return true
+ }
+
+ for {
+ ch, _, err := l.reader.ReadRune()
+ if err != nil {
+ if len(val) > 0 {
+ return makeToken()
+ }
+ if err == io.EOF {
+ return false
+ }
+ panic(err)
+ }
+
+ if quoted {
+ if !escaped {
+ if ch == '\\' {
+ escaped = true
+ continue
+ } else if ch == '"' {
+ quoted = false
+ return makeToken()
+ }
+ }
+ if ch == '\n' {
+ l.line++
+ }
+ if escaped {
+ // only escape quotes and newlines
+ if ch != '"' && ch != '\n' {
+ val = append(val, '\\')
+ }
+ }
+ val = append(val, ch)
+ escaped = false
+ continue
+ }
+
+ if unicode.IsSpace(ch) {
+ if ch == '\r' {
+ continue
+ }
+ if ch == '\n' {
+ l.line++
+ comment = false
+ }
+ if len(val) > 0 {
+ return makeToken()
+ }
+ continue
+ }
+
+ if ch == '#' {
+ comment = true
+ }
+
+ if comment {
+ continue
+ }
+
+ if len(val) == 0 {
+ l.token = Token{Line: l.line}
+ if ch == '"' {
+ quoted = true
+ continue
+ }
+ }
+
+ val = append(val, ch)
+ }
+}