-
Notifications
You must be signed in to change notification settings - Fork 1.2k
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
add an initial css lexer+parser+printer (#20)
- v0.25.0
- v0.24.2
- v0.24.1
- v0.24.0
- v0.23.1
- v0.23.0
- v0.22.0
- v0.21.5
- v0.21.4
- v0.21.3
- v0.21.2
- v0.21.1
- v0.21.0
- v0.20.2
- v0.20.1
- v0.20.0
- v0.19.12
- v0.19.11
- v0.19.10
- v0.19.9
- v0.19.8
- v0.19.7
- v0.19.6
- v0.19.5
- v0.19.4
- v0.19.3
- v0.19.2
- v0.19.1
- v0.19.0
- v0.18.20
- v0.18.19
- v0.18.18
- v0.18.17
- v0.18.16
- v0.18.15
- v0.18.14
- v0.18.13
- v0.18.12
- v0.18.11
- v0.18.10
- v0.18.9
- v0.18.8
- v0.18.7
- v0.18.6
- v0.18.5
- v0.18.4
- v0.18.3
- v0.18.2
- v0.18.1
- v0.18.0
- v0.17.19
- v0.17.18
- v0.17.17
- v0.17.16
- v0.17.15
- v0.17.14
- v0.17.13
- v0.17.12
- v0.17.11
- v0.17.10
- v0.17.9
- v0.17.8
- v0.17.7
- v0.17.6
- v0.17.5
- v0.17.4
- v0.17.3
- v0.17.2
- v0.17.1
- v0.17.0
- v0.16.17
- v0.16.16
- v0.16.15
- v0.16.14
- v0.16.13
- v0.16.12
- v0.16.11
- v0.16.10
- v0.16.9
- v0.16.8
- v0.16.7
- v0.16.6
- v0.16.5
- v0.16.4
- v0.16.3
- v0.16.2
- v0.16.1
- v0.16.0
- v0.15.18
- v0.15.17
- v0.15.16
- v0.15.15
- v0.15.14
- v0.15.13
- v0.15.12
- v0.15.11
- v0.15.10
- v0.15.9
- v0.15.8
- v0.15.7
- v0.15.6
- v0.15.5
- v0.15.4
- v0.15.3
- v0.15.2
- v0.15.1
- v0.15.0
- v0.14.54
- v0.14.53
- v0.14.52
- v0.14.51
- v0.14.50
- v0.14.49
- v0.14.48
- v0.14.47
- v0.14.46
- v0.14.45
- v0.14.44
- v0.14.43
- v0.14.42
- v0.14.41
- v0.14.40
- v0.14.39
- v0.14.38
- v0.14.37
- v0.14.36
- v0.14.35
- v0.14.34
- v0.14.33
- v0.14.32
- v0.14.31
- v0.14.30
- v0.14.29
- v0.14.28
- v0.14.27
- v0.14.26
- v0.14.25
- v0.14.24
- v0.14.23
- v0.14.22
- v0.14.21
- v0.14.20
- v0.14.19
- v0.14.18
- v0.14.17
- v0.14.16
- v0.14.15
- v0.14.14
- v0.14.13
- v0.14.12
- v0.14.11
- v0.14.10
- v0.14.9
- v0.14.8
- v0.14.7
- v0.14.6
- v0.14.5
- v0.14.4
- v0.14.3
- v0.14.2
- v0.14.1
- v0.14.0
- v0.13.15
- v0.13.14
- v0.13.13
- v0.13.12
- v0.13.11
- v0.13.10
- v0.13.9
- v0.13.8
- v0.13.7
- v0.13.6
- v0.13.5
- v0.13.4
- v0.13.3
- v0.13.2
- v0.13.1
- v0.13.0
- v0.12.29
- v0.12.28
- v0.12.27
- v0.12.26
- v0.12.25
- v0.12.24
- v0.12.23
- v0.12.22
- v0.12.21
- v0.12.20
- v0.12.19
- v0.12.18
- v0.12.17
- v0.12.16
- v0.12.15
- v0.12.14
- v0.12.13
- v0.12.12
- v0.12.11
- v0.12.10
- v0.12.9
- v0.12.8
- v0.12.7
- v0.12.6
- v0.12.5
- v0.12.4
- v0.12.3
- v0.12.2
- v0.12.1
- v0.12.0
- v0.11.23
- v0.11.22
- v0.11.21
- v0.11.20
- v0.11.19
- v0.11.18
- v0.11.17
- v0.11.16
- v0.11.15
- v0.11.14
- v0.11.13
- v0.11.12
- v0.11.11
- v0.11.10
- v0.11.9
- v0.11.8
- v0.11.7
- v0.11.6
- v0.11.5
- v0.11.4
- v0.11.3
- v0.11.2
- v0.11.1
- v0.11.0
- v0.10.2
- v0.10.1
- v0.10.0
- v0.9.7
- v0.9.6
- v0.9.5
- v0.9.4
- v0.9.3
- v0.9.2
- v0.9.1
- v0.9.0
- v0.8.57
- v0.8.56
- v0.8.55
- v0.8.54
- v0.8.53
- v0.8.52
- v0.8.51
- v0.8.50
- v0.8.49
- v0.8.48
- v0.8.47
- v0.8.46
- v0.8.45
- v0.8.44
- v0.8.43
- v0.8.42
- v0.8.41
- v0.8.40
- v0.8.39
- v0.8.38
- v0.8.37
- v0.8.36
- v0.8.35
- v0.8.34
- v0.8.33
- v0.8.32
- v0.8.31
- v0.8.30
- v0.8.29
- v0.8.28
- v0.8.27
- v0.8.26
- v0.8.25
- v0.8.24
- v0.8.23
- v0.8.22
- v0.8.21
- v0.8.20
- v0.8.19
- v0.8.18
- v0.8.17
- v0.8.16
- v0.8.15
- v0.8.14
- v0.8.13
- v0.8.12
- v0.8.11
- v0.8.10
- v0.8.9
- v0.8.8
- v0.8.7
- v0.8.6
- v0.8.5
- v0.8.4
- v0.8.3
- v0.8.2
- v0.8.1
- v0.8.0
- v0.7.22
- v0.7.21
- v0.7.20
- v0.7.19
- v0.7.18
- v0.7.17
- v0.7.16
- v0.7.15
- v0.7.14
- v0.7.13
- v0.7.12
- v0.7.11
- v0.7.10
- v0.7.9
- v0.7.8
- v0.7.7
- v0.7.6
- v0.7.5
- v0.7.4
- v0.7.3
- v0.7.2
Showing
6 changed files
with
1,964 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,111 @@ | ||
package css_ast | ||
|
||
import ( | ||
"github.com/evanw/esbuild/internal/css_lexer" | ||
"github.com/evanw/esbuild/internal/logger" | ||
) | ||
|
||
type AST struct { | ||
Rules []R | ||
} | ||
|
||
// This interface is never called. Its purpose is to encode a variant type in | ||
// Go's type system. | ||
type R interface { | ||
isRule() | ||
} | ||
|
||
type RAtImport struct { | ||
PathText string | ||
PathRange logger.Range | ||
} | ||
|
||
type RKnownAt struct { | ||
Name css_lexer.Token | ||
Prelude []css_lexer.Token | ||
Rules []R | ||
} | ||
|
||
type RUnknownAt struct { | ||
Name css_lexer.Token | ||
Prelude []css_lexer.Token | ||
Block []css_lexer.Token | ||
} | ||
|
||
type RSelector struct { | ||
Selectors []ComplexSelector | ||
Rules []R | ||
} | ||
|
||
type RQualified struct { | ||
Prelude []css_lexer.Token | ||
Rules []R | ||
} | ||
|
||
type RDeclaration struct { | ||
Key css_lexer.Token | ||
Value []css_lexer.Token | ||
Important bool | ||
} | ||
|
||
type RBadDeclaration struct { | ||
Tokens []css_lexer.Token | ||
} | ||
|
||
func (*RAtImport) isRule() {} | ||
func (*RKnownAt) isRule() {} | ||
func (*RUnknownAt) isRule() {} | ||
func (*RSelector) isRule() {} | ||
func (*RQualified) isRule() {} | ||
func (*RDeclaration) isRule() {} | ||
func (*RBadDeclaration) isRule() {} | ||
|
||
type ComplexSelector struct { | ||
Selectors []CompoundSelector | ||
} | ||
|
||
type CompoundSelector struct { | ||
Combinator string // Optional, may be "" | ||
TypeSelector *NamespacedName | ||
SubclassSelectors []SS | ||
PseudoClassSelectors []SSPseudoClass // If present, these follow a ":" character | ||
} | ||
|
||
type NamespacedName struct { | ||
// If present, this is an identifier or "*" or "" and is followed by a "|" character | ||
NamespacePrefix *string | ||
|
||
// This is an identifier or "*" or "&" | ||
Name string | ||
} | ||
|
||
// This interface is never called. Its purpose is to encode a variant type in | ||
// Go's type system. | ||
type SS interface { | ||
isSubclassSelector() | ||
} | ||
|
||
type SSHash struct { | ||
Name string | ||
} | ||
|
||
type SSClass struct { | ||
Name string | ||
} | ||
|
||
type SSAttribute struct { | ||
NamespacedName NamespacedName | ||
MatcherOp string | ||
MatcherValue string | ||
MatcherModifier byte | ||
} | ||
|
||
type SSPseudoClass struct { | ||
Name string | ||
Args []css_lexer.Token | ||
} | ||
|
||
func (*SSHash) isSubclassSelector() {} | ||
func (*SSClass) isSubclassSelector() {} | ||
func (*SSAttribute) isSubclassSelector() {} | ||
func (*SSPseudoClass) isSubclassSelector() {} |
Large diffs are not rendered by default.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,104 @@ | ||
package css_lexer | ||
|
||
import ( | ||
"testing" | ||
|
||
"github.com/evanw/esbuild/internal/logger" | ||
"github.com/evanw/esbuild/internal/test" | ||
) | ||
|
||
func lexToken(t *testing.T, contents string) T { | ||
log := logger.NewDeferLog() | ||
tokens := Tokenize(log, test.SourceForTest(contents)) | ||
if len(tokens) > 0 { | ||
return tokens[0].Kind | ||
} | ||
return TEndOfFile | ||
} | ||
|
||
func TestTokens(t *testing.T) { | ||
expected := []struct { | ||
contents string | ||
token T | ||
text string | ||
}{ | ||
{"", TEndOfFile, "end of file"}, | ||
{"@media", TAtKeyword, "@-keyword"}, | ||
{"url(x y", TBadURL, "bad URL token"}, | ||
{"-->", TCDC, "\"-->\""}, | ||
{"<!--", TCDO, "\"<!--\""}, | ||
{"}", TCloseBrace, "\"}\""}, | ||
{"]", TCloseBracket, "\"]\""}, | ||
{")", TCloseParen, "\")\""}, | ||
{":", TColon, "\":\""}, | ||
{",", TComma, "\",\""}, | ||
{"?", TDelim, "delimiter"}, | ||
{"&", TDelimAmpersand, "\"&\""}, | ||
{"*", TDelimAsterisk, "\"*\""}, | ||
{"|", TDelimBar, "\"|\""}, | ||
{"^", TDelimCaret, "\"^\""}, | ||
{"$", TDelimDollar, "\"$\""}, | ||
{".", TDelimDot, "\".\""}, | ||
{"=", TDelimEquals, "\"=\""}, | ||
{"!", TDelimExclamation, "\"!\""}, | ||
{">", TDelimGreaterThan, "\">\""}, | ||
{"+", TDelimPlus, "\"+\""}, | ||
{"~", TDelimTilde, "\"~\""}, | ||
{"1px", TDimension, "dimension"}, | ||
{"max(", TFunction, "function token"}, | ||
{"#0", THash, "hash token"}, | ||
{"#id", THashID, "hash token"}, | ||
{"name", TIdent, "identifier"}, | ||
{"123", TNumber, "number"}, | ||
{"{", TOpenBrace, "\"{\""}, | ||
{"[", TOpenBracket, "\"[\""}, | ||
{"(", TOpenParen, "\"(\""}, | ||
{"50%", TPercentage, "percentage"}, | ||
{";", TSemicolon, "\";\""}, | ||
{"'abc'", TString, "string token"}, | ||
{"url(test)", TURL, "URL token"}, | ||
{" ", TWhitespace, "whitespace"}, | ||
} | ||
|
||
for _, it := range expected { | ||
contents := it.contents | ||
token := it.token | ||
t.Run(contents, func(t *testing.T) { | ||
test.AssertEqual(t, lexToken(t, contents), token) | ||
}) | ||
} | ||
} | ||
|
||
func TestStringParsing(t *testing.T) { | ||
test.AssertEqual(t, ContentsOfStringToken("\"foo\""), "foo") | ||
test.AssertEqual(t, ContentsOfStringToken("\"f\\oo\""), "foo") | ||
test.AssertEqual(t, ContentsOfStringToken("\"f\\\"o\""), "f\"o") | ||
test.AssertEqual(t, ContentsOfStringToken("\"f\\\\o\""), "f\\o") | ||
test.AssertEqual(t, ContentsOfStringToken("\"f\\\no\""), "f\no") | ||
test.AssertEqual(t, ContentsOfStringToken("\"f\\\ro\""), "f\ro") | ||
test.AssertEqual(t, ContentsOfStringToken("\"f\\\vo\""), "f\vo") | ||
test.AssertEqual(t, ContentsOfStringToken("\"f\\6fo\""), "foo") | ||
test.AssertEqual(t, ContentsOfStringToken("\"f\\6f o\""), "foo") | ||
test.AssertEqual(t, ContentsOfStringToken("\"f\\6f o\""), "fo o") | ||
} | ||
|
||
func TestURLParsing(t *testing.T) { | ||
test.AssertEqual(t, ContentsOfURLToken("url(foo)"), "foo") | ||
test.AssertEqual(t, ContentsOfURLToken("url( foo\t\t)"), "foo") | ||
test.AssertEqual(t, ContentsOfURLToken("url(f\\oo)"), "foo") | ||
test.AssertEqual(t, ContentsOfURLToken("url(f\\\"o)"), "f\"o") | ||
test.AssertEqual(t, ContentsOfURLToken("url(f\\'o)"), "f'o") | ||
test.AssertEqual(t, ContentsOfURLToken("url(f\\)o)"), "f)o") | ||
test.AssertEqual(t, ContentsOfURLToken("url(f\\6fo)"), "foo") | ||
test.AssertEqual(t, ContentsOfURLToken("url(f\\6f o)"), "foo") | ||
test.AssertEqual(t, ContentsOfURLToken("url(f\\6f o)"), "fo o") | ||
} | ||
|
||
func TestStringQuoting(t *testing.T) { | ||
test.AssertEqual(t, QuoteForStringToken("foo"), "\"foo\"") | ||
test.AssertEqual(t, QuoteForStringToken("f\"o"), "\"f\\\"o\"") | ||
test.AssertEqual(t, QuoteForStringToken("f\\o"), "\"f\\\\o\"") | ||
test.AssertEqual(t, QuoteForStringToken("f\no"), "\"f\\\no\"") | ||
test.AssertEqual(t, QuoteForStringToken("f\ro"), "\"f\\\ro\"") | ||
test.AssertEqual(t, QuoteForStringToken("f\fo"), "\"f\\\fo\"") | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,410 @@ | ||
package css_parser | ||
|
||
import ( | ||
"fmt" | ||
"strings" | ||
|
||
"github.com/evanw/esbuild/internal/css_ast" | ||
"github.com/evanw/esbuild/internal/css_lexer" | ||
"github.com/evanw/esbuild/internal/logger" | ||
) | ||
|
||
type parser struct { | ||
log logger.Log | ||
source logger.Source | ||
tokens []css_lexer.Token | ||
stack []css_lexer.T | ||
index int | ||
end int | ||
prevError logger.Loc | ||
} | ||
|
||
func Parse(log logger.Log, source logger.Source) css_ast.AST { | ||
p := parser{ | ||
log: log, | ||
source: source, | ||
tokens: css_lexer.Tokenize(log, source), | ||
prevError: logger.Loc{Start: -1}, | ||
} | ||
p.end = len(p.tokens) | ||
tree := css_ast.AST{} | ||
tree.Rules = p.parseListOfRules(ruleContext{ | ||
isTopLevel: true, | ||
parseSelectors: true, | ||
}) | ||
p.expect(css_lexer.TEndOfFile) | ||
return tree | ||
} | ||
|
||
func (p *parser) advance() { | ||
if p.index < p.end { | ||
p.index++ | ||
} | ||
} | ||
|
||
func (p *parser) at(index int) css_lexer.Token { | ||
if index < p.end { | ||
return p.tokens[index] | ||
} | ||
if p.end < len(p.tokens) { | ||
return css_lexer.Token{ | ||
Kind: css_lexer.TEndOfFile, | ||
Range: logger.Range{Loc: p.tokens[p.end].Range.Loc}, | ||
} | ||
} | ||
return css_lexer.Token{ | ||
Kind: css_lexer.TEndOfFile, | ||
Range: logger.Range{Loc: logger.Loc{Start: int32(len(p.source.Contents))}}, | ||
} | ||
} | ||
|
||
func (p *parser) current() css_lexer.Token { | ||
return p.at(p.index) | ||
} | ||
|
||
func (p *parser) next() css_lexer.Token { | ||
return p.at(p.index + 1) | ||
} | ||
|
||
func (p *parser) text() string { | ||
return p.current().Raw(p.source.Contents) | ||
} | ||
|
||
func (p *parser) peek(kind css_lexer.T) bool { | ||
return kind == p.current().Kind | ||
} | ||
|
||
func (p *parser) eat(kind css_lexer.T) bool { | ||
if p.peek(kind) { | ||
p.advance() | ||
return true | ||
} | ||
return false | ||
} | ||
|
||
func (p *parser) expect(kind css_lexer.T) bool { | ||
if p.eat(kind) { | ||
return true | ||
} | ||
var text string | ||
if p.peek(css_lexer.TEndOfFile) { | ||
text = fmt.Sprintf("Expected %s but found end of file", kind.String()) | ||
} else { | ||
text = fmt.Sprintf("Expected %s but found %q", kind.String(), p.text()) | ||
} | ||
r := p.current().Range | ||
if r.Loc.Start > p.prevError.Start { | ||
p.log.AddRangeError(&p.source, r, text) | ||
p.prevError = r.Loc | ||
} | ||
return false | ||
} | ||
|
||
func (p *parser) unexpected() { | ||
var text string | ||
if p.peek(css_lexer.TEndOfFile) { | ||
text = "Unexpected end of file" | ||
} else { | ||
text = fmt.Sprintf("Unexpected %q", p.text()) | ||
} | ||
r := p.current().Range | ||
if r.Loc.Start > p.prevError.Start { | ||
p.log.AddRangeError(&p.source, r, text) | ||
p.prevError = r.Loc | ||
} | ||
} | ||
|
||
type ruleContext struct { | ||
isTopLevel bool | ||
parseSelectors bool | ||
} | ||
|
||
func (p *parser) parseListOfRules(context ruleContext) []css_ast.R { | ||
rules := []css_ast.R{} | ||
for { | ||
kind := p.current().Kind | ||
switch { | ||
case kind == css_lexer.TEndOfFile || kind == css_lexer.TCloseBrace: | ||
return rules | ||
|
||
case kind == css_lexer.TWhitespace: | ||
p.advance() | ||
|
||
case kind == css_lexer.TAtKeyword: | ||
rules = append(rules, p.parseAtRule(atRuleContext{})) | ||
|
||
case (kind == css_lexer.TCDO || kind == css_lexer.TCDC) && context.isTopLevel: | ||
p.advance() | ||
|
||
default: | ||
if context.parseSelectors { | ||
rules = append(rules, p.parseSelectorRule()) | ||
} else { | ||
rules = append(rules, p.parseQualifiedRuleFrom(p.index)) | ||
} | ||
} | ||
} | ||
} | ||
|
||
func (p *parser) parseListOfDeclarations() (list []css_ast.R) { | ||
for { | ||
switch p.current().Kind { | ||
case css_lexer.TWhitespace, css_lexer.TSemicolon: | ||
p.advance() | ||
|
||
case css_lexer.TEndOfFile, css_lexer.TCloseBrace: | ||
return | ||
|
||
case css_lexer.TAtKeyword: | ||
list = append(list, p.parseAtRule(atRuleContext{ | ||
isDeclarationList: true, | ||
})) | ||
|
||
case css_lexer.TDelimAmpersand: | ||
// Reference: https://drafts.csswg.org/css-nesting-1/ | ||
list = append(list, p.parseSelectorRule()) | ||
|
||
default: | ||
list = append(list, p.parseDeclaration()) | ||
} | ||
} | ||
} | ||
|
||
type atRuleKind uint8 | ||
|
||
const ( | ||
atRuleUnknown atRuleKind = iota | ||
atRuleQualifiedRules | ||
atRuleInheritContext | ||
atRuleEmpty | ||
) | ||
|
||
var specialAtRules = map[string]atRuleKind{ | ||
"@keyframes": atRuleQualifiedRules, | ||
|
||
"@document": atRuleInheritContext, | ||
"@media": atRuleInheritContext, | ||
"@scope": atRuleInheritContext, | ||
"@supports": atRuleInheritContext, | ||
|
||
"@charset": atRuleEmpty, | ||
"@import": atRuleEmpty, | ||
"@namespace": atRuleEmpty, | ||
} | ||
|
||
type atRuleContext struct { | ||
isDeclarationList bool | ||
} | ||
|
||
func (p *parser) parseAtRule(context atRuleContext) css_ast.R { | ||
// Parse the name | ||
name := p.current() | ||
text := p.text() | ||
kind := specialAtRules[text] | ||
p.advance() | ||
|
||
// Parse the prelude | ||
preludeStart := p.index | ||
for !p.peek(css_lexer.TOpenBrace) { | ||
if p.peek(css_lexer.TSemicolon) || p.peek(css_lexer.TCloseBrace) { | ||
prelude := p.tokens[preludeStart:p.index] | ||
|
||
// Report an error for rules that should have blocks | ||
if kind != atRuleEmpty && kind != atRuleUnknown { | ||
p.expect(css_lexer.TOpenBrace) | ||
p.eat(css_lexer.TSemicolon) | ||
return &css_ast.RUnknownAt{Name: name, Prelude: prelude} | ||
} | ||
|
||
// Special-case certain rules | ||
if text == "@import" { | ||
tokens := trimWhitespace(prelude) | ||
if len(tokens) == 1 { | ||
t := tokens[0] | ||
switch t.Kind { | ||
case css_lexer.TString: | ||
path := css_lexer.ContentsOfStringToken(t.Raw(p.source.Contents)) | ||
p.eat(css_lexer.TSemicolon) | ||
return &css_ast.RAtImport{PathText: path, PathRange: t.Range} | ||
|
||
case css_lexer.TURL: | ||
path := css_lexer.ContentsOfURLToken(t.Raw(p.source.Contents)) | ||
p.eat(css_lexer.TSemicolon) | ||
return &css_ast.RAtImport{PathText: path, PathRange: t.Range} | ||
} | ||
} | ||
} | ||
|
||
p.eat(css_lexer.TSemicolon) | ||
return &css_ast.RKnownAt{Name: name, Prelude: prelude} | ||
} | ||
|
||
p.parseComponentValue() | ||
} | ||
prelude := p.tokens[preludeStart:p.index] | ||
blockStart := p.index | ||
|
||
// Report an error for rules that shouldn't have blocks | ||
if kind == atRuleEmpty { | ||
p.expect(css_lexer.TSemicolon) | ||
p.parseBlock(css_lexer.TCloseBrace) | ||
block := p.tokens[blockStart:p.index] | ||
return &css_ast.RUnknownAt{Name: name, Prelude: prelude, Block: block} | ||
} | ||
|
||
// Parse known rules whose blocks consist of qualified rules | ||
if kind == atRuleQualifiedRules { | ||
p.advance() | ||
rules := p.parseListOfRules(ruleContext{}) | ||
p.expect(css_lexer.TCloseBrace) | ||
return &css_ast.RKnownAt{Name: name, Prelude: prelude, Rules: rules} | ||
} | ||
|
||
// Parse known rules whose blocks consist of whatever the current context is | ||
if kind == atRuleInheritContext { | ||
p.advance() | ||
var rules []css_ast.R | ||
if context.isDeclarationList { | ||
rules = p.parseListOfDeclarations() | ||
} else { | ||
rules = p.parseListOfRules(ruleContext{ | ||
parseSelectors: true, | ||
}) | ||
} | ||
p.expect(css_lexer.TCloseBrace) | ||
return &css_ast.RKnownAt{Name: name, Prelude: prelude, Rules: rules} | ||
} | ||
|
||
// Otherwise, parse an unknown rule | ||
p.parseBlock(css_lexer.TCloseBrace) | ||
block := p.tokens[blockStart:p.index] | ||
return &css_ast.RUnknownAt{Name: name, Prelude: prelude, Block: block} | ||
} | ||
|
||
func (p *parser) parseSelectorRule() css_ast.R { | ||
preludeStart := p.index | ||
|
||
// Try parsing the prelude as a selector list | ||
if list, ok := p.parseSelectorList(); ok { | ||
rule := css_ast.RSelector{Selectors: list} | ||
if p.expect(css_lexer.TOpenBrace) { | ||
rule.Rules = p.parseListOfDeclarations() | ||
p.expect(css_lexer.TCloseBrace) | ||
return &rule | ||
} | ||
} | ||
|
||
// Otherwise, parse a generic qualified rule | ||
return p.parseQualifiedRuleFrom(preludeStart) | ||
} | ||
|
||
func (p *parser) parseQualifiedRuleFrom(preludeStart int) *css_ast.RQualified { | ||
for !p.peek(css_lexer.TOpenBrace) { | ||
p.parseComponentValue() | ||
} | ||
rule := css_ast.RQualified{ | ||
Prelude: p.tokens[preludeStart:p.index], | ||
} | ||
if p.expect(css_lexer.TOpenBrace) { | ||
rule.Rules = p.parseListOfDeclarations() | ||
p.expect(css_lexer.TCloseBrace) | ||
} | ||
return &rule | ||
} | ||
|
||
func (p *parser) parseDeclaration() css_ast.R { | ||
// Parse the key | ||
keyStart := p.index | ||
ok := false | ||
if p.expect(css_lexer.TIdent) { | ||
p.eat(css_lexer.TWhitespace) | ||
if p.expect(css_lexer.TColon) { | ||
ok = true | ||
} | ||
} else { | ||
p.advance() | ||
} | ||
|
||
// Parse the value | ||
valueStart := p.index | ||
stop: | ||
for { | ||
switch p.current().Kind { | ||
case css_lexer.TEndOfFile, css_lexer.TSemicolon, css_lexer.TCloseBrace: | ||
break stop | ||
|
||
default: | ||
p.parseComponentValue() | ||
} | ||
} | ||
|
||
// Stop now if this is not a valid declaration | ||
if !ok { | ||
return &css_ast.RBadDeclaration{ | ||
Tokens: p.tokens[keyStart:p.index], | ||
} | ||
} | ||
|
||
// Remove leading and trailing whitespace from the value | ||
value := trimWhitespace(p.tokens[valueStart:p.index]) | ||
|
||
// Remove trailing "!important" | ||
important := false | ||
if i := len(value) - 2; i >= 0 && value[i].Kind == css_lexer.TDelimExclamation { | ||
if t := value[i+1]; t.Kind == css_lexer.TIdent && strings.EqualFold(t.Raw(p.source.Contents), "important") { | ||
value = value[:i] | ||
important = true | ||
} | ||
} | ||
|
||
return &css_ast.RDeclaration{ | ||
Key: p.tokens[keyStart], | ||
Value: value, | ||
Important: important, | ||
} | ||
} | ||
|
||
func (p *parser) parseComponentValue() { | ||
switch p.current().Kind { | ||
case css_lexer.TFunction: | ||
p.parseBlock(css_lexer.TCloseParen) | ||
|
||
case css_lexer.TOpenParen: | ||
p.parseBlock(css_lexer.TCloseParen) | ||
|
||
case css_lexer.TOpenBrace: | ||
p.parseBlock(css_lexer.TCloseBrace) | ||
|
||
case css_lexer.TOpenBracket: | ||
p.parseBlock(css_lexer.TCloseBracket) | ||
|
||
case css_lexer.TEndOfFile: | ||
p.unexpected() | ||
|
||
default: | ||
p.advance() | ||
} | ||
} | ||
|
||
func (p *parser) parseBlock(close css_lexer.T) { | ||
p.advance() | ||
|
||
for !p.eat(close) { | ||
if p.peek(css_lexer.TEndOfFile) { | ||
p.expect(close) | ||
return | ||
} | ||
|
||
p.parseComponentValue() | ||
} | ||
} | ||
|
||
func trimWhitespace(tokens []css_lexer.Token) []css_lexer.Token { | ||
if len(tokens) > 0 && tokens[0].Kind == css_lexer.TWhitespace { | ||
tokens = tokens[1:] | ||
} | ||
if i := len(tokens) - 1; i >= 0 && tokens[i].Kind == css_lexer.TWhitespace { | ||
tokens = tokens[:i] | ||
} | ||
return tokens | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,322 @@ | ||
package css_parser | ||
|
||
import ( | ||
"strings" | ||
|
||
"github.com/evanw/esbuild/internal/css_ast" | ||
"github.com/evanw/esbuild/internal/css_lexer" | ||
) | ||
|
||
func (p *parser) parseSelectorList() (list []css_ast.ComplexSelector, ok bool) { | ||
// Parse the first selector | ||
p.eat(css_lexer.TWhitespace) | ||
sel, good := p.parseComplexSelector() | ||
if !good { | ||
return | ||
} | ||
list = append(list, sel) | ||
|
||
// Parse the remaining selectors | ||
for { | ||
p.eat(css_lexer.TWhitespace) | ||
if !p.eat(css_lexer.TComma) { | ||
break | ||
} | ||
p.eat(css_lexer.TWhitespace) | ||
sel, good := p.parseComplexSelector() | ||
if !good { | ||
return | ||
} | ||
list = append(list, sel) | ||
} | ||
|
||
ok = true | ||
return | ||
} | ||
|
||
func (p *parser) parseComplexSelector() (result css_ast.ComplexSelector, ok bool) { | ||
// Parent | ||
sel, good := p.parseCompoundSelector() | ||
if !good { | ||
return | ||
} | ||
result.Selectors = append(result.Selectors, sel) | ||
|
||
for { | ||
p.eat(css_lexer.TWhitespace) | ||
if p.peek(css_lexer.TEndOfFile) || p.peek(css_lexer.TComma) || p.peek(css_lexer.TOpenBrace) { | ||
break | ||
} | ||
|
||
// Optional combinator | ||
combinator := p.parseCombinator() | ||
if combinator != "" { | ||
p.eat(css_lexer.TWhitespace) | ||
} | ||
|
||
// Child | ||
sel, good := p.parseCompoundSelector() | ||
if !good { | ||
return | ||
} | ||
sel.Combinator = combinator | ||
result.Selectors = append(result.Selectors, sel) | ||
} | ||
|
||
ok = true | ||
return | ||
} | ||
|
||
func (p *parser) parseCompoundSelector() (sel css_ast.CompoundSelector, ok bool) { | ||
// Parse the type selector | ||
switch p.current().Kind { | ||
case css_lexer.TDelimAmpersand: | ||
// This is an extension: https://drafts.csswg.org/css-nesting-1/ | ||
sel.TypeSelector = &css_ast.NamespacedName{Name: "&"} | ||
p.advance() | ||
|
||
case css_lexer.TDelimBar, css_lexer.TIdent, css_lexer.TDelimAsterisk: | ||
nsName := css_ast.NamespacedName{} | ||
if !p.peek(css_lexer.TDelimBar) { | ||
nsName.Name = p.text() | ||
p.advance() | ||
} | ||
if p.eat(css_lexer.TDelimBar) { | ||
if !p.peek(css_lexer.TIdent) && !p.peek(css_lexer.TDelimAsterisk) { | ||
p.expect(css_lexer.TIdent) | ||
return | ||
} | ||
prefix := nsName.Name | ||
nsName.NamespacePrefix = &prefix | ||
nsName.Name = p.text() | ||
p.advance() | ||
} | ||
sel.TypeSelector = &nsName | ||
} | ||
|
||
// Parse the subclass selectors | ||
subclassSelectors: | ||
for { | ||
switch p.current().Kind { | ||
case css_lexer.THashID: | ||
name := p.text()[1:] | ||
sel.SubclassSelectors = append(sel.SubclassSelectors, &css_ast.SSHash{Name: name}) | ||
p.advance() | ||
|
||
case css_lexer.TDelimDot: | ||
p.advance() | ||
name := p.text() | ||
sel.SubclassSelectors = append(sel.SubclassSelectors, &css_ast.SSClass{Name: name}) | ||
p.expect(css_lexer.TIdent) | ||
|
||
case css_lexer.TOpenBracket: | ||
p.advance() | ||
attr, good := p.parseAttributeSelector() | ||
if !good { | ||
return | ||
} | ||
sel.SubclassSelectors = append(sel.SubclassSelectors, &attr) | ||
|
||
case css_lexer.TColon: | ||
if p.next().Kind == css_lexer.TColon { | ||
// Stop if this is the start of the pseudo-element selector section | ||
break subclassSelectors | ||
} | ||
pseudo := p.parsePseudoElementSelector() | ||
sel.SubclassSelectors = append(sel.SubclassSelectors, &pseudo) | ||
|
||
default: | ||
break subclassSelectors | ||
} | ||
} | ||
|
||
// Parse the pseudo-element selectors | ||
if p.eat(css_lexer.TColon) { | ||
pseudo := p.parsePseudoElementSelector() | ||
sel.PseudoClassSelectors = append(sel.PseudoClassSelectors, pseudo) | ||
for p.peek(css_lexer.TColon) { | ||
pseudo := p.parsePseudoElementSelector() | ||
sel.PseudoClassSelectors = append(sel.PseudoClassSelectors, pseudo) | ||
} | ||
} | ||
|
||
// The compound selector must be non-empty | ||
if sel.TypeSelector == nil && len(sel.SubclassSelectors) == 0 && len(sel.PseudoClassSelectors) == 0 { | ||
p.unexpected() | ||
return | ||
} | ||
|
||
ok = true | ||
return | ||
} | ||
|
||
func (p *parser) parseAttributeSelector() (attr css_ast.SSAttribute, ok bool) { | ||
// Parse the namespaced name | ||
switch p.current().Kind { | ||
case css_lexer.TDelimBar, css_lexer.TDelimAsterisk: | ||
// "[|x]" | ||
// "[*|x]" | ||
prefix := "" | ||
if p.peek(css_lexer.TDelimAsterisk) { | ||
prefix = "*" | ||
p.advance() | ||
} | ||
attr.NamespacedName.NamespacePrefix = &prefix | ||
if !p.expect(css_lexer.TDelimBar) { | ||
return | ||
} | ||
if !p.peek(css_lexer.TIdent) { | ||
p.expect(css_lexer.TIdent) | ||
return | ||
} | ||
attr.NamespacedName.Name = p.text() | ||
p.advance() | ||
|
||
case css_lexer.TIdent: | ||
// "[x]" | ||
// "[x|y]" | ||
attr.NamespacedName.Name = p.text() | ||
p.advance() | ||
if p.eat(css_lexer.TDelimBar) { | ||
if !p.peek(css_lexer.TIdent) { | ||
p.expect(css_lexer.TIdent) | ||
return | ||
} | ||
prefix := attr.NamespacedName.Name | ||
attr.NamespacedName.NamespacePrefix = &prefix | ||
attr.NamespacedName.Name = p.text() | ||
p.advance() | ||
} | ||
|
||
default: | ||
p.expect(css_lexer.TIdent) | ||
return | ||
} | ||
|
||
// Parse the optional matcher operator | ||
if p.eat(css_lexer.TDelimEquals) { | ||
attr.MatcherOp = "=" | ||
} else if p.next().Kind == css_lexer.TDelimEquals { | ||
switch p.current().Kind { | ||
case css_lexer.TDelimTilde: | ||
attr.MatcherOp = "~=" | ||
case css_lexer.TDelimBar: | ||
attr.MatcherOp = "|=" | ||
case css_lexer.TDelimCaret: | ||
attr.MatcherOp = "^=" | ||
case css_lexer.TDelimDollar: | ||
attr.MatcherOp = "$=" | ||
case css_lexer.TDelimAsterisk: | ||
attr.MatcherOp = "*=" | ||
} | ||
if attr.MatcherOp != "" { | ||
p.advance() | ||
p.advance() | ||
} | ||
} | ||
|
||
// Parse the optional matcher value | ||
if attr.MatcherOp != "" { | ||
if !p.peek(css_lexer.TString) && !p.peek(css_lexer.TIdent) { | ||
p.unexpected() | ||
} | ||
attr.MatcherValue = p.text() | ||
p.advance() | ||
p.eat(css_lexer.TWhitespace) | ||
if p.peek(css_lexer.TIdent) { | ||
if modifier := p.text(); len(modifier) == 1 { | ||
if c := modifier[0]; strings.ContainsRune("iIsS", rune(c)) { | ||
attr.MatcherModifier = c | ||
p.advance() | ||
} | ||
} | ||
} | ||
} | ||
|
||
p.expect(css_lexer.TCloseBracket) | ||
ok = true | ||
return | ||
} | ||
|
||
func (p *parser) parsePseudoElementSelector() css_ast.SSPseudoClass { | ||
p.advance() | ||
|
||
if p.peek(css_lexer.TFunction) { | ||
text := p.text() | ||
p.advance() | ||
args := p.parseAnyValue() | ||
p.expect(css_lexer.TCloseParen) | ||
return css_ast.SSPseudoClass{Name: text[:len(text)-1], Args: args} | ||
} | ||
|
||
sel := css_ast.SSPseudoClass{Name: p.text()} | ||
p.expect(css_lexer.TIdent) | ||
return sel | ||
} | ||
|
||
func (p *parser) parseAnyValue() []css_lexer.Token { | ||
// Reference: https://drafts.csswg.org/css-syntax-3/#typedef-declaration-value | ||
|
||
p.stack = p.stack[:0] // Reuse allocated memory | ||
start := p.index | ||
|
||
loop: | ||
for { | ||
switch p.current().Kind { | ||
case css_lexer.TCloseParen, css_lexer.TCloseBracket, css_lexer.TCloseBrace: | ||
last := len(p.stack) - 1 | ||
if last < 0 || !p.peek(p.stack[last]) { | ||
break loop | ||
} | ||
p.stack = p.stack[:last] | ||
|
||
case css_lexer.TSemicolon, css_lexer.TDelimExclamation: | ||
if len(p.stack) == 0 { | ||
break loop | ||
} | ||
|
||
case css_lexer.TOpenParen, css_lexer.TFunction: | ||
p.stack = append(p.stack, css_lexer.TCloseParen) | ||
|
||
case css_lexer.TOpenBracket: | ||
p.stack = append(p.stack, css_lexer.TCloseBracket) | ||
|
||
case css_lexer.TOpenBrace: | ||
p.stack = append(p.stack, css_lexer.TCloseBrace) | ||
} | ||
|
||
p.advance() | ||
} | ||
|
||
tokens := p.tokens[start:p.index] | ||
if len(tokens) == 0 { | ||
p.unexpected() | ||
} | ||
return tokens | ||
} | ||
|
||
func (p *parser) parseCombinator() string { | ||
switch p.current().Kind { | ||
case css_lexer.TDelimGreaterThan: | ||
p.advance() | ||
return ">" | ||
|
||
case css_lexer.TDelimPlus: | ||
p.advance() | ||
return "+" | ||
|
||
case css_lexer.TDelimTilde: | ||
p.advance() | ||
return "~" | ||
|
||
case css_lexer.TDelimBar: | ||
if p.next().Kind == css_lexer.TDelimBar { | ||
p.advance() | ||
p.advance() | ||
} | ||
return "||" | ||
|
||
default: | ||
return "" | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,231 @@ | ||
package css_printer | ||
|
||
import ( | ||
"strings" | ||
|
||
"github.com/evanw/esbuild/internal/css_ast" | ||
"github.com/evanw/esbuild/internal/css_lexer" | ||
) | ||
|
||
type printer struct { | ||
Options | ||
sb strings.Builder | ||
} | ||
|
||
type Options struct { | ||
Contents string | ||
RemoveWhitespace bool | ||
} | ||
|
||
func Print(tree css_ast.AST, options Options) string { | ||
p := printer{ | ||
Options: options, | ||
} | ||
for _, rule := range tree.Rules { | ||
p.printRule(rule, 0, false) | ||
} | ||
return p.sb.String() | ||
} | ||
|
||
func (p *printer) printRule(rule css_ast.R, indent int, omitTrailingSemicolon bool) { | ||
if !p.RemoveWhitespace { | ||
p.printIndent(indent) | ||
} | ||
switch r := rule.(type) { | ||
case *css_ast.RAtImport: | ||
if p.RemoveWhitespace { | ||
p.print("@import") | ||
} else { | ||
p.print("@import ") | ||
} | ||
p.print(css_lexer.QuoteForStringToken(r.PathText)) | ||
p.print(";") | ||
|
||
case *css_ast.RKnownAt: | ||
p.printToken(r.Name) | ||
p.printTokens(r.Prelude) | ||
if r.Rules == nil { | ||
p.print(";") | ||
} else { | ||
p.printRuleBlock(r.Rules, indent) | ||
} | ||
|
||
case *css_ast.RUnknownAt: | ||
p.printToken(r.Name) | ||
p.printTokens(r.Prelude) | ||
if r.Block == nil { | ||
p.print(";") | ||
} else { | ||
p.printTokens(r.Block) | ||
} | ||
|
||
case *css_ast.RSelector: | ||
p.printComplexSelectors(r.Selectors, indent) | ||
if !p.RemoveWhitespace { | ||
p.print(" ") | ||
} | ||
p.printRuleBlock(r.Rules, indent) | ||
|
||
case *css_ast.RQualified: | ||
p.printTokens(r.Prelude) | ||
p.printRuleBlock(r.Rules, indent) | ||
|
||
case *css_ast.RDeclaration: | ||
p.printToken(r.Key) | ||
if p.RemoveWhitespace { | ||
p.print(":") | ||
} else { | ||
p.print(": ") | ||
} | ||
p.printTokens(r.Value) | ||
if r.Important { | ||
p.print("!important") | ||
} | ||
if !omitTrailingSemicolon { | ||
p.print(";") | ||
} | ||
|
||
case *css_ast.RBadDeclaration: | ||
p.printTokens(r.Tokens) | ||
if !omitTrailingSemicolon { | ||
p.print(";") | ||
} | ||
|
||
default: | ||
panic("Internal error") | ||
} | ||
if !p.RemoveWhitespace { | ||
p.print("\n") | ||
} | ||
} | ||
|
||
func (p *printer) printRuleBlock(rules []css_ast.R, indent int) { | ||
if p.RemoveWhitespace { | ||
p.print("{") | ||
} else { | ||
p.print("{\n") | ||
} | ||
for i, decl := range rules { | ||
omitTrailingSemicolon := p.RemoveWhitespace && i+1 == len(rules) | ||
p.printRule(decl, indent+1, omitTrailingSemicolon) | ||
} | ||
if !p.RemoveWhitespace { | ||
p.printIndent(indent) | ||
} | ||
p.print("}") | ||
} | ||
|
||
func (p *printer) printComplexSelectors(selectors []css_ast.ComplexSelector, indent int) { | ||
for i, complex := range selectors { | ||
if i > 0 { | ||
if p.RemoveWhitespace { | ||
p.print(",") | ||
} else { | ||
p.print(",\n") | ||
p.printIndent(indent) | ||
} | ||
} | ||
for j, compound := range complex.Selectors { | ||
p.printCompoundSelector(compound, j == 0) | ||
} | ||
} | ||
} | ||
|
||
func (p *printer) printCompoundSelector(sel css_ast.CompoundSelector, isFirst bool) { | ||
if sel.Combinator != "" { | ||
if !p.RemoveWhitespace { | ||
p.print(" ") | ||
} | ||
p.print(sel.Combinator) | ||
if !p.RemoveWhitespace { | ||
p.print(" ") | ||
} | ||
} else if !isFirst { | ||
p.print(" ") | ||
} | ||
|
||
if sel.TypeSelector != nil { | ||
p.printNamespacedName(*sel.TypeSelector) | ||
} | ||
|
||
for _, sub := range sel.SubclassSelectors { | ||
switch s := sub.(type) { | ||
case *css_ast.SSHash: | ||
p.print("#") | ||
p.print(s.Name) | ||
|
||
case *css_ast.SSClass: | ||
p.print(".") | ||
p.print(s.Name) | ||
|
||
case *css_ast.SSAttribute: | ||
p.print("[") | ||
p.printNamespacedName(s.NamespacedName) | ||
p.print(s.MatcherOp) | ||
p.print(s.MatcherValue) | ||
if s.MatcherModifier != 0 { | ||
p.print(" ") | ||
p.print(string(rune(s.MatcherModifier))) | ||
} | ||
p.print("]") | ||
|
||
case *css_ast.SSPseudoClass: | ||
p.printPseudoClassSelector(*s) | ||
} | ||
} | ||
|
||
if len(sel.PseudoClassSelectors) > 0 { | ||
p.print(":") | ||
for _, pseudo := range sel.PseudoClassSelectors { | ||
p.printPseudoClassSelector(pseudo) | ||
} | ||
} | ||
} | ||
|
||
func (p *printer) printNamespacedName(nsName css_ast.NamespacedName) { | ||
if nsName.NamespacePrefix != nil { | ||
p.print(*nsName.NamespacePrefix) | ||
if p.RemoveWhitespace { | ||
p.print("|") | ||
} else { | ||
p.print(" | ") | ||
} | ||
} | ||
p.print(nsName.Name) | ||
} | ||
|
||
func (p *printer) printPseudoClassSelector(pseudo css_ast.SSPseudoClass) { | ||
p.print(":") | ||
p.print(pseudo.Name) | ||
if len(pseudo.Args) > 0 { | ||
p.print("(") | ||
for _, arg := range pseudo.Args { | ||
p.printToken(arg) | ||
} | ||
p.print(")") | ||
} | ||
} | ||
|
||
func (p *printer) print(text string) { | ||
p.sb.WriteString(text) | ||
} | ||
|
||
func (p *printer) printIndent(indent int) { | ||
for i := 0; i < indent; i++ { | ||
p.sb.WriteString(" ") | ||
} | ||
} | ||
|
||
func (p *printer) printToken(token css_lexer.Token) { | ||
if token.Kind == css_lexer.TWhitespace { | ||
p.print(" ") | ||
} else { | ||
p.print(token.Raw(p.Contents)) | ||
} | ||
} | ||
|
||
func (p *printer) printTokens(tokens []css_lexer.Token) { | ||
for _, t := range tokens { | ||
p.printToken(t) | ||
} | ||
} |