diff --git a/README.md b/README.md index dbf8e41d34d..d30ddbb5e3c 100644 --- a/README.md +++ b/README.md @@ -385,7 +385,7 @@ Advanced options: --log-level=... Disable logging (info | warning | error | silent, default info) --resolve-extensions=... A comma-separated list of implicit extensions - (default ".tsx,.ts,.jsx,.mjs,.cjs,.js,.json") + (default ".tsx,.ts,.jsx,.mjs,.cjs,.js,.css,.json") --metafile=... Write metadata about the build to a JSON file --strict Transforms handle edge cases but have more overhead (enable individually using --strict:X where X is diff --git a/cmd/esbuild/main.go b/cmd/esbuild/main.go index 9dae579e5dc..d0e7cf91a6b 100644 --- a/cmd/esbuild/main.go +++ b/cmd/esbuild/main.go @@ -53,7 +53,7 @@ Advanced options: --log-level=... Disable logging (info | warning | error | silent, default info) --resolve-extensions=... A comma-separated list of implicit extensions - (default ".tsx,.ts,.jsx,.mjs,.cjs,.js,.json") + (default ".tsx,.ts,.jsx,.mjs,.cjs,.js,.css,.json") --metafile=... Write metadata about the build to a JSON file --strict Transforms handle edge cases but have more overhead (enable individually using --strict:X where X is diff --git a/internal/bundler/bundler.go b/internal/bundler/bundler.go index 93501bef10b..e80408c86a3 100644 --- a/internal/bundler/bundler.go +++ b/internal/bundler/bundler.go @@ -17,6 +17,8 @@ import ( "unicode/utf8" "github.com/evanw/esbuild/internal/config" + "github.com/evanw/esbuild/internal/css_ast" + "github.com/evanw/esbuild/internal/css_parser" "github.com/evanw/esbuild/internal/fs" "github.com/evanw/esbuild/internal/js_ast" "github.com/evanw/esbuild/internal/js_lexer" @@ -77,6 +79,19 @@ func (repr *reprJS) importRecords() []js_ast.ImportRecord { return repr.ast.ImportRecords } +type reprCSS struct { + ast css_ast.AST + + // If present, this is the JavaScript stub corresponding to this CSS file. + // A JavaScript stub is automatically generated for a CSS file when it's + // imported from a JavaScript file. + jsSourceIndex *uint32 +} + +func (repr *reprCSS) importRecords() []js_ast.ImportRecord { + return nil +} + type Bundle struct { fs fs.FS res resolver.Resolver @@ -197,6 +212,11 @@ func parseFile(args parseArgs) { result.file.repr = &reprJS{ast: ast} result.ok = ok + case config.LoaderCSS: + ast := css_parser.Parse(args.log, source) + result.file.repr = &reprCSS{ast: ast} + result.ok = true + case config.LoaderJSON: expr, ok := js_parser.ParseJSON(args.log, source, js_parser.ParseJSONOptions{}) ast := js_parser.LazyExportAST(args.log, source, args.options, expr, "") @@ -671,6 +691,31 @@ func ScanBundle(log logger.Log, fs fs.FS, res resolver.Resolver, entryPaths []st } } + // If an import from a JavaScript file targets a CSS file, generate a + // JavaScript stub to ensure that JavaScript files only ever import + // other JavaScript files. + if _, ok := result.file.repr.(*reprJS); ok { + otherFile := &results[*record.SourceIndex].file + if css, ok := otherFile.repr.(*reprCSS); ok { + if css.jsSourceIndex == nil { + sourceIndex := uint32(len(files)) + source := logger.Source{ + Index: sourceIndex, + PrettyPath: otherFile.source.PrettyPath, + } + ast := js_parser.LazyExportAST(log, source, options, js_ast.Expr{Data: &js_ast.EObject{}}, "") + f := file{ + repr: &reprJS{ast: ast}, + source: source, + } + files = append(files, f) + results = append(results, parseResult{file: f}) + css.jsSourceIndex = &sourceIndex + } + record.SourceIndex = css.jsSourceIndex + } + } + // Generate metadata about each import if options.AbsMetadataFile != "" { if isFirstImport { @@ -713,6 +758,7 @@ func DefaultExtensionToLoaderMap() map[string]config.Loader { ".jsx": config.LoaderJSX, ".ts": config.LoaderTS, ".tsx": config.LoaderTSX, + ".css": config.LoaderCSS, ".json": config.LoaderJSON, ".txt": config.LoaderText, } diff --git a/internal/bundler/linker.go b/internal/bundler/linker.go index eb37b7315b3..9cd41e334e7 100644 --- a/internal/bundler/linker.go +++ b/internal/bundler/linker.go @@ -11,6 +11,7 @@ import ( "github.com/evanw/esbuild/internal/compat" "github.com/evanw/esbuild/internal/config" + "github.com/evanw/esbuild/internal/css_printer" "github.com/evanw/esbuild/internal/fs" "github.com/evanw/esbuild/internal/js_ast" "github.com/evanw/esbuild/internal/js_lexer" @@ -275,6 +276,11 @@ type chunkReprJS struct { func (*chunkReprJS) fileExt() string { return ".js" } +type chunkReprCSS struct { +} + +func (*chunkReprCSS) fileExt() string { return ".css" } + // Returns the path of this chunk relative to the output directory. Note: // this must have OS-independent path separators (i.e. '/' not '\'). func (chunk *chunkInfo) relPath() string { @@ -2274,6 +2280,8 @@ func (c *linkerContext) computeChunks() []chunkInfo { switch file.repr.(type) { case *reprJS: repr = &chunkReprJS{} + case *reprCSS: + repr = &chunkReprCSS{} } if c.options.AbsOutputFile != "" { @@ -2316,7 +2324,8 @@ func (c *linkerContext) computeChunks() []chunkInfo { // Figure out which files are in which chunk for _, sourceIndex := range c.reachableFiles { - switch repr := c.files[sourceIndex].repr.(type) { + file := &c.files[sourceIndex] + switch repr := file.repr.(type) { case *reprJS: for _, partMeta := range repr.meta.partMeta { key := string(partMeta.entryBits.entries) @@ -2333,6 +2342,21 @@ func (c *linkerContext) computeChunks() []chunkInfo { } chunk.filesWithPartsInChunk[uint32(sourceIndex)] = true } + + case *reprCSS: + key := string(file.entryBits.entries) + if key == neverReachedKey { + // Ignore this file if it was never reached + continue + } + chunk, ok := chunks[key] + if !ok { + chunk.entryBits = file.entryBits + chunk.filesWithPartsInChunk = make(map[uint32]bool) + chunk.repr = &chunkReprJS{} + chunks[key] = chunk + } + chunk.filesWithPartsInChunk[uint32(sourceIndex)] = true } } @@ -2435,6 +2459,11 @@ func (c *linkerContext) chunkFileOrder(chunk *chunkInfo) []uint32 { suffixOrder = append(suffixOrder, sourceIndex) } } + + case *reprCSS: + if isFileInThisChunk { + suffixOrder = append(suffixOrder, sourceIndex) + } } } @@ -3352,6 +3381,136 @@ func (repr *chunkReprJS) generate(c *linkerContext, chunk *chunkInfo) func([]js_ } } +type compileResultCSS struct { + printedCSS string + sourceIndex uint32 +} + +func (repr *chunkReprCSS) generate(c *linkerContext, chunk *chunkInfo) func([]js_ast.ImportRecord) []OutputFile { + var results []OutputFile + filesInChunkInOrder := c.chunkFileOrder(chunk) + compileResults := make([]compileResultCSS, 0, len(filesInChunkInOrder)) + + // Generate CSS for each file in parallel + waitGroup := sync.WaitGroup{} + for _, sourceIndex := range filesInChunkInOrder { + // Skip the runtime in test output + if sourceIndex == runtime.SourceIndex && c.options.OmitRuntimeForTests { + continue + } + + // Each file may optionally contain an additional file to be copied to the + // output directory. This is used by the "file" loader. + if additionalFile := c.files[sourceIndex].additionalFile; additionalFile != nil { + results = append(results, *additionalFile) + } + + // Create a goroutine for this file + compileResults = append(compileResults, compileResultCSS{}) + compileResult := &compileResults[len(compileResults)-1] + waitGroup.Add(1) + go func(sourceIndex uint32, compileResult *compileResultCSS) { + file := &c.files[sourceIndex] + repr := file.repr.(*reprCSS) + css := css_printer.Print(repr.ast, css_printer.Options{ + Contents: file.source.Contents, + RemoveWhitespace: c.options.RemoveWhitespace, + }) + *compileResult = compileResultCSS{ + printedCSS: css, + sourceIndex: sourceIndex, + } + waitGroup.Done() + }(sourceIndex, compileResult) + } + + // Wait for cross-chunk import records before continuing + return func(crossChunkImportRecords []js_ast.ImportRecord) []OutputFile { + waitGroup.Wait() + j := js_printer.Joiner{} + + // Start the metadata + jMeta := js_printer.Joiner{} + if c.options.AbsMetadataFile != "" { + isFirstMeta := true + jMeta.AddString("{\n \"imports\": [") + for _, record := range crossChunkImportRecords { + if isFirstMeta { + isFirstMeta = false + } else { + jMeta.AddString(",") + } + importAbsPath := c.fs.Join(c.options.AbsOutputDir, chunk.relDir, record.Path.Text) + jMeta.AddString(fmt.Sprintf("\n {\n \"path\": %s\n }", + js_printer.QuoteForJSON(c.res.PrettyPath(logger.Path{Text: importAbsPath, Namespace: "file"})))) + } + if !isFirstMeta { + jMeta.AddString("\n ") + } + jMeta.AddString("],\n \"inputs\": {") + } + isFirstMeta := true + + // Concatenate the generated CSS chunks together + newlineBeforeComment := false + for _, compileResult := range compileResults { + if c.options.Mode == config.ModeBundle && !c.options.RemoveWhitespace { + if newlineBeforeComment { + j.AddString("\n") + } + j.AddString(fmt.Sprintf("// %s\n", c.files[compileResult.sourceIndex].source.PrettyPath)) + } + if len(compileResult.printedCSS) > 0 { + newlineBeforeComment = true + } + j.AddString(compileResult.printedCSS) + + // Include this file in the metadata + if c.options.AbsMetadataFile != "" { + if isFirstMeta { + isFirstMeta = false + } else { + jMeta.AddString(",") + } + jMeta.AddString(fmt.Sprintf("\n %s: {\n \"bytesInOutput\": %d\n }", + js_printer.QuoteForJSON(c.files[compileResult.sourceIndex].source.PrettyPath), + len(compileResult.printedCSS))) + } + } + + // Make sure the file ends with a newline + if j.Length() > 0 && j.LastByte() != '\n' { + j.AddString("\n") + } + + // The CSS contents are done now that the source map comment is in + cssContents := j.Done() + + // Figure out the base name for this chunk now that the content hash is known + if chunk.baseNameOrEmpty == "" { + hash := hashForFileName(cssContents) + chunk.baseNameOrEmpty = "chunk." + hash + c.options.OutputExtensionFor(".css") + } + + // End the metadata + var jsonMetadataChunk []byte + if c.options.AbsMetadataFile != "" { + if !isFirstMeta { + jMeta.AddString("\n ") + } + jMeta.AddString(fmt.Sprintf("},\n \"bytes\": %d\n }", len(cssContents))) + jsonMetadataChunk = jMeta.Done() + } + + results = append(results, OutputFile{ + AbsPath: c.fs.Join(c.options.AbsOutputDir, chunk.relPath()), + Contents: cssContents, + jsonMetadataChunk: jsonMetadataChunk, + }) + return results + } +} + func (offset *lineColumnOffset) advanceBytes(bytes []byte) { for i, n := 0, len(bytes); i < n; i++ { if bytes[i] == '\n' { diff --git a/internal/config/config.go b/internal/config/config.go index 24d98d6b94d..59b4f8bed69 100644 --- a/internal/config/config.go +++ b/internal/config/config.go @@ -77,6 +77,7 @@ const ( LoaderDataURL LoaderFile LoaderBinary + LoaderCSS ) func (loader Loader) IsTypeScript() bool { diff --git a/pkg/api/api.go b/pkg/api/api.go index 49ff205a89a..da25aa4ac8d 100644 --- a/pkg/api/api.go +++ b/pkg/api/api.go @@ -113,6 +113,7 @@ const ( LoaderDataURL LoaderFile LoaderBinary + LoaderCSS ) type Platform uint8 diff --git a/pkg/api/api_impl.go b/pkg/api/api_impl.go index aa683d8e09a..ace94fd8eda 100644 --- a/pkg/api/api_impl.go +++ b/pkg/api/api_impl.go @@ -118,6 +118,8 @@ func validateLoader(value Loader) config.Loader { return config.LoaderFile case LoaderBinary: return config.LoaderBinary + case LoaderCSS: + return config.LoaderCSS default: panic("Invalid loader") } @@ -224,7 +226,7 @@ func isValidExtension(ext string) bool { func validateResolveExtensions(log logger.Log, order []string) []string { if order == nil { - return []string{".tsx", ".ts", ".jsx", ".mjs", ".cjs", ".js", ".json"} + return []string{".tsx", ".ts", ".jsx", ".mjs", ".cjs", ".js", ".css", ".json"} } for _, ext := range order { if !isValidExtension(ext) { diff --git a/pkg/cli/cli_impl.go b/pkg/cli/cli_impl.go index fbb93282b4f..e1ac1b80e62 100644 --- a/pkg/cli/cli_impl.go +++ b/pkg/cli/cli_impl.go @@ -424,6 +424,8 @@ func parseLoader(text string) (api.Loader, error) { return api.LoaderTS, nil case "tsx": return api.LoaderTSX, nil + case "css": + return api.LoaderCSS, nil case "json": return api.LoaderJSON, nil case "text": @@ -438,7 +440,7 @@ func parseLoader(text string) (api.Loader, error) { return api.LoaderBinary, nil default: return 0, fmt.Errorf("Invalid loader: %q (valid: "+ - "js, jsx, ts, tsx, json, text, base64, dataurl, file, binary)", text) + "js, jsx, ts, tsx, css, json, text, base64, dataurl, file, binary)", text) } }