Skip to content

Commit

Permalink
add an initial css lexer+parser+printer (#20)
Browse files Browse the repository at this point in the history
evanw committed Sep 12, 2020

Verified

This commit was created on GitHub.com and signed with GitHub’s verified signature. The key has expired.
1 parent f9858fc commit 99bafd0
Showing 6 changed files with 1,964 additions and 0 deletions.
111 changes: 111 additions & 0 deletions internal/css_ast/css_ast.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,111 @@
package css_ast

import (
"github.com/evanw/esbuild/internal/css_lexer"
"github.com/evanw/esbuild/internal/logger"
)

type AST struct {
Rules []R
}

// This interface is never called. Its purpose is to encode a variant type in
// Go's type system.
type R interface {
isRule()
}

type RAtImport struct {
PathText string
PathRange logger.Range
}

type RKnownAt struct {
Name css_lexer.Token
Prelude []css_lexer.Token
Rules []R
}

type RUnknownAt struct {
Name css_lexer.Token
Prelude []css_lexer.Token
Block []css_lexer.Token
}

type RSelector struct {
Selectors []ComplexSelector
Rules []R
}

type RQualified struct {
Prelude []css_lexer.Token
Rules []R
}

type RDeclaration struct {
Key css_lexer.Token
Value []css_lexer.Token
Important bool
}

type RBadDeclaration struct {
Tokens []css_lexer.Token
}

func (*RAtImport) isRule() {}
func (*RKnownAt) isRule() {}
func (*RUnknownAt) isRule() {}
func (*RSelector) isRule() {}
func (*RQualified) isRule() {}
func (*RDeclaration) isRule() {}
func (*RBadDeclaration) isRule() {}

type ComplexSelector struct {
Selectors []CompoundSelector
}

type CompoundSelector struct {
Combinator string // Optional, may be ""
TypeSelector *NamespacedName
SubclassSelectors []SS
PseudoClassSelectors []SSPseudoClass // If present, these follow a ":" character
}

type NamespacedName struct {
// If present, this is an identifier or "*" or "" and is followed by a "|" character
NamespacePrefix *string

// This is an identifier or "*" or "&"
Name string
}

// This interface is never called. Its purpose is to encode a variant type in
// Go's type system.
type SS interface {
isSubclassSelector()
}

type SSHash struct {
Name string
}

type SSClass struct {
Name string
}

type SSAttribute struct {
NamespacedName NamespacedName
MatcherOp string
MatcherValue string
MatcherModifier byte
}

type SSPseudoClass struct {
Name string
Args []css_lexer.Token
}

func (*SSHash) isSubclassSelector() {}
func (*SSClass) isSubclassSelector() {}
func (*SSAttribute) isSubclassSelector() {}
func (*SSPseudoClass) isSubclassSelector() {}
786 changes: 786 additions & 0 deletions internal/css_lexer/css_lexer.go

Large diffs are not rendered by default.

104 changes: 104 additions & 0 deletions internal/css_lexer/css_lexer_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,104 @@
package css_lexer

import (
"testing"

"github.com/evanw/esbuild/internal/logger"
"github.com/evanw/esbuild/internal/test"
)

func lexToken(t *testing.T, contents string) T {
log := logger.NewDeferLog()
tokens := Tokenize(log, test.SourceForTest(contents))
if len(tokens) > 0 {
return tokens[0].Kind
}
return TEndOfFile
}

func TestTokens(t *testing.T) {
expected := []struct {
contents string
token T
text string
}{
{"", TEndOfFile, "end of file"},
{"@media", TAtKeyword, "@-keyword"},
{"url(x y", TBadURL, "bad URL token"},
{"-->", TCDC, "\"-->\""},
{"<!--", TCDO, "\"<!--\""},
{"}", TCloseBrace, "\"}\""},
{"]", TCloseBracket, "\"]\""},
{")", TCloseParen, "\")\""},
{":", TColon, "\":\""},
{",", TComma, "\",\""},
{"?", TDelim, "delimiter"},
{"&", TDelimAmpersand, "\"&\""},
{"*", TDelimAsterisk, "\"*\""},
{"|", TDelimBar, "\"|\""},
{"^", TDelimCaret, "\"^\""},
{"$", TDelimDollar, "\"$\""},
{".", TDelimDot, "\".\""},
{"=", TDelimEquals, "\"=\""},
{"!", TDelimExclamation, "\"!\""},
{">", TDelimGreaterThan, "\">\""},
{"+", TDelimPlus, "\"+\""},
{"~", TDelimTilde, "\"~\""},
{"1px", TDimension, "dimension"},
{"max(", TFunction, "function token"},
{"#0", THash, "hash token"},
{"#id", THashID, "hash token"},
{"name", TIdent, "identifier"},
{"123", TNumber, "number"},
{"{", TOpenBrace, "\"{\""},
{"[", TOpenBracket, "\"[\""},
{"(", TOpenParen, "\"(\""},
{"50%", TPercentage, "percentage"},
{";", TSemicolon, "\";\""},
{"'abc'", TString, "string token"},
{"url(test)", TURL, "URL token"},
{" ", TWhitespace, "whitespace"},
}

for _, it := range expected {
contents := it.contents
token := it.token
t.Run(contents, func(t *testing.T) {
test.AssertEqual(t, lexToken(t, contents), token)
})
}
}

func TestStringParsing(t *testing.T) {
test.AssertEqual(t, ContentsOfStringToken("\"foo\""), "foo")
test.AssertEqual(t, ContentsOfStringToken("\"f\\oo\""), "foo")
test.AssertEqual(t, ContentsOfStringToken("\"f\\\"o\""), "f\"o")
test.AssertEqual(t, ContentsOfStringToken("\"f\\\\o\""), "f\\o")
test.AssertEqual(t, ContentsOfStringToken("\"f\\\no\""), "f\no")
test.AssertEqual(t, ContentsOfStringToken("\"f\\\ro\""), "f\ro")
test.AssertEqual(t, ContentsOfStringToken("\"f\\\vo\""), "f\vo")
test.AssertEqual(t, ContentsOfStringToken("\"f\\6fo\""), "foo")
test.AssertEqual(t, ContentsOfStringToken("\"f\\6f o\""), "foo")
test.AssertEqual(t, ContentsOfStringToken("\"f\\6f o\""), "fo o")
}

func TestURLParsing(t *testing.T) {
test.AssertEqual(t, ContentsOfURLToken("url(foo)"), "foo")
test.AssertEqual(t, ContentsOfURLToken("url( foo\t\t)"), "foo")
test.AssertEqual(t, ContentsOfURLToken("url(f\\oo)"), "foo")
test.AssertEqual(t, ContentsOfURLToken("url(f\\\"o)"), "f\"o")
test.AssertEqual(t, ContentsOfURLToken("url(f\\'o)"), "f'o")
test.AssertEqual(t, ContentsOfURLToken("url(f\\)o)"), "f)o")
test.AssertEqual(t, ContentsOfURLToken("url(f\\6fo)"), "foo")
test.AssertEqual(t, ContentsOfURLToken("url(f\\6f o)"), "foo")
test.AssertEqual(t, ContentsOfURLToken("url(f\\6f o)"), "fo o")
}

func TestStringQuoting(t *testing.T) {
test.AssertEqual(t, QuoteForStringToken("foo"), "\"foo\"")
test.AssertEqual(t, QuoteForStringToken("f\"o"), "\"f\\\"o\"")
test.AssertEqual(t, QuoteForStringToken("f\\o"), "\"f\\\\o\"")
test.AssertEqual(t, QuoteForStringToken("f\no"), "\"f\\\no\"")
test.AssertEqual(t, QuoteForStringToken("f\ro"), "\"f\\\ro\"")
test.AssertEqual(t, QuoteForStringToken("f\fo"), "\"f\\\fo\"")
}
410 changes: 410 additions & 0 deletions internal/css_parser/css_parser.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,410 @@
package css_parser

import (
"fmt"
"strings"

"github.com/evanw/esbuild/internal/css_ast"
"github.com/evanw/esbuild/internal/css_lexer"
"github.com/evanw/esbuild/internal/logger"
)

type parser struct {
log logger.Log
source logger.Source
tokens []css_lexer.Token
stack []css_lexer.T
index int
end int
prevError logger.Loc
}

func Parse(log logger.Log, source logger.Source) css_ast.AST {
p := parser{
log: log,
source: source,
tokens: css_lexer.Tokenize(log, source),
prevError: logger.Loc{Start: -1},
}
p.end = len(p.tokens)
tree := css_ast.AST{}
tree.Rules = p.parseListOfRules(ruleContext{
isTopLevel: true,
parseSelectors: true,
})
p.expect(css_lexer.TEndOfFile)
return tree
}

func (p *parser) advance() {
if p.index < p.end {
p.index++
}
}

func (p *parser) at(index int) css_lexer.Token {
if index < p.end {
return p.tokens[index]
}
if p.end < len(p.tokens) {
return css_lexer.Token{
Kind: css_lexer.TEndOfFile,
Range: logger.Range{Loc: p.tokens[p.end].Range.Loc},
}
}
return css_lexer.Token{
Kind: css_lexer.TEndOfFile,
Range: logger.Range{Loc: logger.Loc{Start: int32(len(p.source.Contents))}},
}
}

func (p *parser) current() css_lexer.Token {
return p.at(p.index)
}

func (p *parser) next() css_lexer.Token {
return p.at(p.index + 1)
}

func (p *parser) text() string {
return p.current().Raw(p.source.Contents)
}

func (p *parser) peek(kind css_lexer.T) bool {
return kind == p.current().Kind
}

func (p *parser) eat(kind css_lexer.T) bool {
if p.peek(kind) {
p.advance()
return true
}
return false
}

func (p *parser) expect(kind css_lexer.T) bool {
if p.eat(kind) {
return true
}
var text string
if p.peek(css_lexer.TEndOfFile) {
text = fmt.Sprintf("Expected %s but found end of file", kind.String())
} else {
text = fmt.Sprintf("Expected %s but found %q", kind.String(), p.text())
}
r := p.current().Range
if r.Loc.Start > p.prevError.Start {
p.log.AddRangeError(&p.source, r, text)
p.prevError = r.Loc
}
return false
}

func (p *parser) unexpected() {
var text string
if p.peek(css_lexer.TEndOfFile) {
text = "Unexpected end of file"
} else {
text = fmt.Sprintf("Unexpected %q", p.text())
}
r := p.current().Range
if r.Loc.Start > p.prevError.Start {
p.log.AddRangeError(&p.source, r, text)
p.prevError = r.Loc
}
}

type ruleContext struct {
isTopLevel bool
parseSelectors bool
}

func (p *parser) parseListOfRules(context ruleContext) []css_ast.R {
rules := []css_ast.R{}
for {
kind := p.current().Kind
switch {
case kind == css_lexer.TEndOfFile || kind == css_lexer.TCloseBrace:
return rules

case kind == css_lexer.TWhitespace:
p.advance()

case kind == css_lexer.TAtKeyword:
rules = append(rules, p.parseAtRule(atRuleContext{}))

case (kind == css_lexer.TCDO || kind == css_lexer.TCDC) && context.isTopLevel:
p.advance()

default:
if context.parseSelectors {
rules = append(rules, p.parseSelectorRule())
} else {
rules = append(rules, p.parseQualifiedRuleFrom(p.index))
}
}
}
}

func (p *parser) parseListOfDeclarations() (list []css_ast.R) {
for {
switch p.current().Kind {
case css_lexer.TWhitespace, css_lexer.TSemicolon:
p.advance()

case css_lexer.TEndOfFile, css_lexer.TCloseBrace:
return

case css_lexer.TAtKeyword:
list = append(list, p.parseAtRule(atRuleContext{
isDeclarationList: true,
}))

case css_lexer.TDelimAmpersand:
// Reference: https://drafts.csswg.org/css-nesting-1/
list = append(list, p.parseSelectorRule())

default:
list = append(list, p.parseDeclaration())
}
}
}

type atRuleKind uint8

const (
atRuleUnknown atRuleKind = iota
atRuleQualifiedRules
atRuleInheritContext
atRuleEmpty
)

var specialAtRules = map[string]atRuleKind{
"@keyframes": atRuleQualifiedRules,

"@document": atRuleInheritContext,
"@media": atRuleInheritContext,
"@scope": atRuleInheritContext,
"@supports": atRuleInheritContext,

"@charset": atRuleEmpty,
"@import": atRuleEmpty,
"@namespace": atRuleEmpty,
}

type atRuleContext struct {
isDeclarationList bool
}

func (p *parser) parseAtRule(context atRuleContext) css_ast.R {
// Parse the name
name := p.current()
text := p.text()
kind := specialAtRules[text]
p.advance()

// Parse the prelude
preludeStart := p.index
for !p.peek(css_lexer.TOpenBrace) {
if p.peek(css_lexer.TSemicolon) || p.peek(css_lexer.TCloseBrace) {
prelude := p.tokens[preludeStart:p.index]

// Report an error for rules that should have blocks
if kind != atRuleEmpty && kind != atRuleUnknown {
p.expect(css_lexer.TOpenBrace)
p.eat(css_lexer.TSemicolon)
return &css_ast.RUnknownAt{Name: name, Prelude: prelude}
}

// Special-case certain rules
if text == "@import" {
tokens := trimWhitespace(prelude)
if len(tokens) == 1 {
t := tokens[0]
switch t.Kind {
case css_lexer.TString:
path := css_lexer.ContentsOfStringToken(t.Raw(p.source.Contents))
p.eat(css_lexer.TSemicolon)
return &css_ast.RAtImport{PathText: path, PathRange: t.Range}

case css_lexer.TURL:
path := css_lexer.ContentsOfURLToken(t.Raw(p.source.Contents))
p.eat(css_lexer.TSemicolon)
return &css_ast.RAtImport{PathText: path, PathRange: t.Range}
}
}
}

p.eat(css_lexer.TSemicolon)
return &css_ast.RKnownAt{Name: name, Prelude: prelude}
}

p.parseComponentValue()
}
prelude := p.tokens[preludeStart:p.index]
blockStart := p.index

// Report an error for rules that shouldn't have blocks
if kind == atRuleEmpty {
p.expect(css_lexer.TSemicolon)
p.parseBlock(css_lexer.TCloseBrace)
block := p.tokens[blockStart:p.index]
return &css_ast.RUnknownAt{Name: name, Prelude: prelude, Block: block}
}

// Parse known rules whose blocks consist of qualified rules
if kind == atRuleQualifiedRules {
p.advance()
rules := p.parseListOfRules(ruleContext{})
p.expect(css_lexer.TCloseBrace)
return &css_ast.RKnownAt{Name: name, Prelude: prelude, Rules: rules}
}

// Parse known rules whose blocks consist of whatever the current context is
if kind == atRuleInheritContext {
p.advance()
var rules []css_ast.R
if context.isDeclarationList {
rules = p.parseListOfDeclarations()
} else {
rules = p.parseListOfRules(ruleContext{
parseSelectors: true,
})
}
p.expect(css_lexer.TCloseBrace)
return &css_ast.RKnownAt{Name: name, Prelude: prelude, Rules: rules}
}

// Otherwise, parse an unknown rule
p.parseBlock(css_lexer.TCloseBrace)
block := p.tokens[blockStart:p.index]
return &css_ast.RUnknownAt{Name: name, Prelude: prelude, Block: block}
}

func (p *parser) parseSelectorRule() css_ast.R {
preludeStart := p.index

// Try parsing the prelude as a selector list
if list, ok := p.parseSelectorList(); ok {
rule := css_ast.RSelector{Selectors: list}
if p.expect(css_lexer.TOpenBrace) {
rule.Rules = p.parseListOfDeclarations()
p.expect(css_lexer.TCloseBrace)
return &rule
}
}

// Otherwise, parse a generic qualified rule
return p.parseQualifiedRuleFrom(preludeStart)
}

func (p *parser) parseQualifiedRuleFrom(preludeStart int) *css_ast.RQualified {
for !p.peek(css_lexer.TOpenBrace) {
p.parseComponentValue()
}
rule := css_ast.RQualified{
Prelude: p.tokens[preludeStart:p.index],
}
if p.expect(css_lexer.TOpenBrace) {
rule.Rules = p.parseListOfDeclarations()
p.expect(css_lexer.TCloseBrace)
}
return &rule
}

func (p *parser) parseDeclaration() css_ast.R {
// Parse the key
keyStart := p.index
ok := false
if p.expect(css_lexer.TIdent) {
p.eat(css_lexer.TWhitespace)
if p.expect(css_lexer.TColon) {
ok = true
}
} else {
p.advance()
}

// Parse the value
valueStart := p.index
stop:
for {
switch p.current().Kind {
case css_lexer.TEndOfFile, css_lexer.TSemicolon, css_lexer.TCloseBrace:
break stop

default:
p.parseComponentValue()
}
}

// Stop now if this is not a valid declaration
if !ok {
return &css_ast.RBadDeclaration{
Tokens: p.tokens[keyStart:p.index],
}
}

// Remove leading and trailing whitespace from the value
value := trimWhitespace(p.tokens[valueStart:p.index])

// Remove trailing "!important"
important := false
if i := len(value) - 2; i >= 0 && value[i].Kind == css_lexer.TDelimExclamation {
if t := value[i+1]; t.Kind == css_lexer.TIdent && strings.EqualFold(t.Raw(p.source.Contents), "important") {
value = value[:i]
important = true
}
}

return &css_ast.RDeclaration{
Key: p.tokens[keyStart],
Value: value,
Important: important,
}
}

func (p *parser) parseComponentValue() {
switch p.current().Kind {
case css_lexer.TFunction:
p.parseBlock(css_lexer.TCloseParen)

case css_lexer.TOpenParen:
p.parseBlock(css_lexer.TCloseParen)

case css_lexer.TOpenBrace:
p.parseBlock(css_lexer.TCloseBrace)

case css_lexer.TOpenBracket:
p.parseBlock(css_lexer.TCloseBracket)

case css_lexer.TEndOfFile:
p.unexpected()

default:
p.advance()
}
}

func (p *parser) parseBlock(close css_lexer.T) {
p.advance()

for !p.eat(close) {
if p.peek(css_lexer.TEndOfFile) {
p.expect(close)
return
}

p.parseComponentValue()
}
}

func trimWhitespace(tokens []css_lexer.Token) []css_lexer.Token {
if len(tokens) > 0 && tokens[0].Kind == css_lexer.TWhitespace {
tokens = tokens[1:]
}
if i := len(tokens) - 1; i >= 0 && tokens[i].Kind == css_lexer.TWhitespace {
tokens = tokens[:i]
}
return tokens
}
322 changes: 322 additions & 0 deletions internal/css_parser/css_parser_selector.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,322 @@
package css_parser

import (
"strings"

"github.com/evanw/esbuild/internal/css_ast"
"github.com/evanw/esbuild/internal/css_lexer"
)

func (p *parser) parseSelectorList() (list []css_ast.ComplexSelector, ok bool) {
// Parse the first selector
p.eat(css_lexer.TWhitespace)
sel, good := p.parseComplexSelector()
if !good {
return
}
list = append(list, sel)

// Parse the remaining selectors
for {
p.eat(css_lexer.TWhitespace)
if !p.eat(css_lexer.TComma) {
break
}
p.eat(css_lexer.TWhitespace)
sel, good := p.parseComplexSelector()
if !good {
return
}
list = append(list, sel)
}

ok = true
return
}

func (p *parser) parseComplexSelector() (result css_ast.ComplexSelector, ok bool) {
// Parent
sel, good := p.parseCompoundSelector()
if !good {
return
}
result.Selectors = append(result.Selectors, sel)

for {
p.eat(css_lexer.TWhitespace)
if p.peek(css_lexer.TEndOfFile) || p.peek(css_lexer.TComma) || p.peek(css_lexer.TOpenBrace) {
break
}

// Optional combinator
combinator := p.parseCombinator()
if combinator != "" {
p.eat(css_lexer.TWhitespace)
}

// Child
sel, good := p.parseCompoundSelector()
if !good {
return
}
sel.Combinator = combinator
result.Selectors = append(result.Selectors, sel)
}

ok = true
return
}

func (p *parser) parseCompoundSelector() (sel css_ast.CompoundSelector, ok bool) {
// Parse the type selector
switch p.current().Kind {
case css_lexer.TDelimAmpersand:
// This is an extension: https://drafts.csswg.org/css-nesting-1/
sel.TypeSelector = &css_ast.NamespacedName{Name: "&"}
p.advance()

case css_lexer.TDelimBar, css_lexer.TIdent, css_lexer.TDelimAsterisk:
nsName := css_ast.NamespacedName{}
if !p.peek(css_lexer.TDelimBar) {
nsName.Name = p.text()
p.advance()
}
if p.eat(css_lexer.TDelimBar) {
if !p.peek(css_lexer.TIdent) && !p.peek(css_lexer.TDelimAsterisk) {
p.expect(css_lexer.TIdent)
return
}
prefix := nsName.Name
nsName.NamespacePrefix = &prefix
nsName.Name = p.text()
p.advance()
}
sel.TypeSelector = &nsName
}

// Parse the subclass selectors
subclassSelectors:
for {
switch p.current().Kind {
case css_lexer.THashID:
name := p.text()[1:]
sel.SubclassSelectors = append(sel.SubclassSelectors, &css_ast.SSHash{Name: name})
p.advance()

case css_lexer.TDelimDot:
p.advance()
name := p.text()
sel.SubclassSelectors = append(sel.SubclassSelectors, &css_ast.SSClass{Name: name})
p.expect(css_lexer.TIdent)

case css_lexer.TOpenBracket:
p.advance()
attr, good := p.parseAttributeSelector()
if !good {
return
}
sel.SubclassSelectors = append(sel.SubclassSelectors, &attr)

case css_lexer.TColon:
if p.next().Kind == css_lexer.TColon {
// Stop if this is the start of the pseudo-element selector section
break subclassSelectors
}
pseudo := p.parsePseudoElementSelector()
sel.SubclassSelectors = append(sel.SubclassSelectors, &pseudo)

default:
break subclassSelectors
}
}

// Parse the pseudo-element selectors
if p.eat(css_lexer.TColon) {
pseudo := p.parsePseudoElementSelector()
sel.PseudoClassSelectors = append(sel.PseudoClassSelectors, pseudo)
for p.peek(css_lexer.TColon) {
pseudo := p.parsePseudoElementSelector()
sel.PseudoClassSelectors = append(sel.PseudoClassSelectors, pseudo)
}
}

// The compound selector must be non-empty
if sel.TypeSelector == nil && len(sel.SubclassSelectors) == 0 && len(sel.PseudoClassSelectors) == 0 {
p.unexpected()
return
}

ok = true
return
}

func (p *parser) parseAttributeSelector() (attr css_ast.SSAttribute, ok bool) {
// Parse the namespaced name
switch p.current().Kind {
case css_lexer.TDelimBar, css_lexer.TDelimAsterisk:
// "[|x]"
// "[*|x]"
prefix := ""
if p.peek(css_lexer.TDelimAsterisk) {
prefix = "*"
p.advance()
}
attr.NamespacedName.NamespacePrefix = &prefix
if !p.expect(css_lexer.TDelimBar) {
return
}
if !p.peek(css_lexer.TIdent) {
p.expect(css_lexer.TIdent)
return
}
attr.NamespacedName.Name = p.text()
p.advance()

case css_lexer.TIdent:
// "[x]"
// "[x|y]"
attr.NamespacedName.Name = p.text()
p.advance()
if p.eat(css_lexer.TDelimBar) {
if !p.peek(css_lexer.TIdent) {
p.expect(css_lexer.TIdent)
return
}
prefix := attr.NamespacedName.Name
attr.NamespacedName.NamespacePrefix = &prefix
attr.NamespacedName.Name = p.text()
p.advance()
}

default:
p.expect(css_lexer.TIdent)
return
}

// Parse the optional matcher operator
if p.eat(css_lexer.TDelimEquals) {
attr.MatcherOp = "="
} else if p.next().Kind == css_lexer.TDelimEquals {
switch p.current().Kind {
case css_lexer.TDelimTilde:
attr.MatcherOp = "~="
case css_lexer.TDelimBar:
attr.MatcherOp = "|="
case css_lexer.TDelimCaret:
attr.MatcherOp = "^="
case css_lexer.TDelimDollar:
attr.MatcherOp = "$="
case css_lexer.TDelimAsterisk:
attr.MatcherOp = "*="
}
if attr.MatcherOp != "" {
p.advance()
p.advance()
}
}

// Parse the optional matcher value
if attr.MatcherOp != "" {
if !p.peek(css_lexer.TString) && !p.peek(css_lexer.TIdent) {
p.unexpected()
}
attr.MatcherValue = p.text()
p.advance()
p.eat(css_lexer.TWhitespace)
if p.peek(css_lexer.TIdent) {
if modifier := p.text(); len(modifier) == 1 {
if c := modifier[0]; strings.ContainsRune("iIsS", rune(c)) {
attr.MatcherModifier = c
p.advance()
}
}
}
}

p.expect(css_lexer.TCloseBracket)
ok = true
return
}

func (p *parser) parsePseudoElementSelector() css_ast.SSPseudoClass {
p.advance()

if p.peek(css_lexer.TFunction) {
text := p.text()
p.advance()
args := p.parseAnyValue()
p.expect(css_lexer.TCloseParen)
return css_ast.SSPseudoClass{Name: text[:len(text)-1], Args: args}
}

sel := css_ast.SSPseudoClass{Name: p.text()}
p.expect(css_lexer.TIdent)
return sel
}

func (p *parser) parseAnyValue() []css_lexer.Token {
// Reference: https://drafts.csswg.org/css-syntax-3/#typedef-declaration-value

p.stack = p.stack[:0] // Reuse allocated memory
start := p.index

loop:
for {
switch p.current().Kind {
case css_lexer.TCloseParen, css_lexer.TCloseBracket, css_lexer.TCloseBrace:
last := len(p.stack) - 1
if last < 0 || !p.peek(p.stack[last]) {
break loop
}
p.stack = p.stack[:last]

case css_lexer.TSemicolon, css_lexer.TDelimExclamation:
if len(p.stack) == 0 {
break loop
}

case css_lexer.TOpenParen, css_lexer.TFunction:
p.stack = append(p.stack, css_lexer.TCloseParen)

case css_lexer.TOpenBracket:
p.stack = append(p.stack, css_lexer.TCloseBracket)

case css_lexer.TOpenBrace:
p.stack = append(p.stack, css_lexer.TCloseBrace)
}

p.advance()
}

tokens := p.tokens[start:p.index]
if len(tokens) == 0 {
p.unexpected()
}
return tokens
}

func (p *parser) parseCombinator() string {
switch p.current().Kind {
case css_lexer.TDelimGreaterThan:
p.advance()
return ">"

case css_lexer.TDelimPlus:
p.advance()
return "+"

case css_lexer.TDelimTilde:
p.advance()
return "~"

case css_lexer.TDelimBar:
if p.next().Kind == css_lexer.TDelimBar {
p.advance()
p.advance()
}
return "||"

default:
return ""
}
}
231 changes: 231 additions & 0 deletions internal/css_printer/css_printer.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,231 @@
package css_printer

import (
"strings"

"github.com/evanw/esbuild/internal/css_ast"
"github.com/evanw/esbuild/internal/css_lexer"
)

type printer struct {
Options
sb strings.Builder
}

type Options struct {
Contents string
RemoveWhitespace bool
}

func Print(tree css_ast.AST, options Options) string {
p := printer{
Options: options,
}
for _, rule := range tree.Rules {
p.printRule(rule, 0, false)
}
return p.sb.String()
}

func (p *printer) printRule(rule css_ast.R, indent int, omitTrailingSemicolon bool) {
if !p.RemoveWhitespace {
p.printIndent(indent)
}
switch r := rule.(type) {
case *css_ast.RAtImport:
if p.RemoveWhitespace {
p.print("@import")
} else {
p.print("@import ")
}
p.print(css_lexer.QuoteForStringToken(r.PathText))
p.print(";")

case *css_ast.RKnownAt:
p.printToken(r.Name)
p.printTokens(r.Prelude)
if r.Rules == nil {
p.print(";")
} else {
p.printRuleBlock(r.Rules, indent)
}

case *css_ast.RUnknownAt:
p.printToken(r.Name)
p.printTokens(r.Prelude)
if r.Block == nil {
p.print(";")
} else {
p.printTokens(r.Block)
}

case *css_ast.RSelector:
p.printComplexSelectors(r.Selectors, indent)
if !p.RemoveWhitespace {
p.print(" ")
}
p.printRuleBlock(r.Rules, indent)

case *css_ast.RQualified:
p.printTokens(r.Prelude)
p.printRuleBlock(r.Rules, indent)

case *css_ast.RDeclaration:
p.printToken(r.Key)
if p.RemoveWhitespace {
p.print(":")
} else {
p.print(": ")
}
p.printTokens(r.Value)
if r.Important {
p.print("!important")
}
if !omitTrailingSemicolon {
p.print(";")
}

case *css_ast.RBadDeclaration:
p.printTokens(r.Tokens)
if !omitTrailingSemicolon {
p.print(";")
}

default:
panic("Internal error")
}
if !p.RemoveWhitespace {
p.print("\n")
}
}

func (p *printer) printRuleBlock(rules []css_ast.R, indent int) {
if p.RemoveWhitespace {
p.print("{")
} else {
p.print("{\n")
}
for i, decl := range rules {
omitTrailingSemicolon := p.RemoveWhitespace && i+1 == len(rules)
p.printRule(decl, indent+1, omitTrailingSemicolon)
}
if !p.RemoveWhitespace {
p.printIndent(indent)
}
p.print("}")
}

func (p *printer) printComplexSelectors(selectors []css_ast.ComplexSelector, indent int) {
for i, complex := range selectors {
if i > 0 {
if p.RemoveWhitespace {
p.print(",")
} else {
p.print(",\n")
p.printIndent(indent)
}
}
for j, compound := range complex.Selectors {
p.printCompoundSelector(compound, j == 0)
}
}
}

func (p *printer) printCompoundSelector(sel css_ast.CompoundSelector, isFirst bool) {
if sel.Combinator != "" {
if !p.RemoveWhitespace {
p.print(" ")
}
p.print(sel.Combinator)
if !p.RemoveWhitespace {
p.print(" ")
}
} else if !isFirst {
p.print(" ")
}

if sel.TypeSelector != nil {
p.printNamespacedName(*sel.TypeSelector)
}

for _, sub := range sel.SubclassSelectors {
switch s := sub.(type) {
case *css_ast.SSHash:
p.print("#")
p.print(s.Name)

case *css_ast.SSClass:
p.print(".")
p.print(s.Name)

case *css_ast.SSAttribute:
p.print("[")
p.printNamespacedName(s.NamespacedName)
p.print(s.MatcherOp)
p.print(s.MatcherValue)
if s.MatcherModifier != 0 {
p.print(" ")
p.print(string(rune(s.MatcherModifier)))
}
p.print("]")

case *css_ast.SSPseudoClass:
p.printPseudoClassSelector(*s)
}
}

if len(sel.PseudoClassSelectors) > 0 {
p.print(":")
for _, pseudo := range sel.PseudoClassSelectors {
p.printPseudoClassSelector(pseudo)
}
}
}

func (p *printer) printNamespacedName(nsName css_ast.NamespacedName) {
if nsName.NamespacePrefix != nil {
p.print(*nsName.NamespacePrefix)
if p.RemoveWhitespace {
p.print("|")
} else {
p.print(" | ")
}
}
p.print(nsName.Name)
}

func (p *printer) printPseudoClassSelector(pseudo css_ast.SSPseudoClass) {
p.print(":")
p.print(pseudo.Name)
if len(pseudo.Args) > 0 {
p.print("(")
for _, arg := range pseudo.Args {
p.printToken(arg)
}
p.print(")")
}
}

func (p *printer) print(text string) {
p.sb.WriteString(text)
}

func (p *printer) printIndent(indent int) {
for i := 0; i < indent; i++ {
p.sb.WriteString(" ")
}
}

func (p *printer) printToken(token css_lexer.Token) {
if token.Kind == css_lexer.TWhitespace {
p.print(" ")
} else {
p.print(token.Raw(p.Contents))
}
}

func (p *printer) printTokens(tokens []css_lexer.Token) {
for _, t := range tokens {
p.printToken(t)
}
}

0 comments on commit 99bafd0

Please sign in to comment.