Skip to content

Commit

Permalink
add a basic css loader (#20)
Browse files Browse the repository at this point in the history
  • Loading branch information
evanw committed Sep 27, 2020
1 parent e5c80e6 commit 108db32
Show file tree
Hide file tree
Showing 8 changed files with 216 additions and 5 deletions.
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -385,7 +385,7 @@ Advanced options:
--log-level=... Disable logging (info | warning | error | silent,
default info)
--resolve-extensions=... A comma-separated list of implicit extensions
(default ".tsx,.ts,.jsx,.mjs,.cjs,.js,.json")
(default ".tsx,.ts,.jsx,.mjs,.cjs,.js,.css,.json")
--metafile=... Write metadata about the build to a JSON file
--strict Transforms handle edge cases but have more overhead
(enable individually using --strict:X where X is
Expand Down
2 changes: 1 addition & 1 deletion cmd/esbuild/main.go
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ Advanced options:
--log-level=... Disable logging (info | warning | error | silent,
default info)
--resolve-extensions=... A comma-separated list of implicit extensions
(default ".tsx,.ts,.jsx,.mjs,.cjs,.js,.json")
(default ".tsx,.ts,.jsx,.mjs,.cjs,.js,.css,.json")
--metafile=... Write metadata about the build to a JSON file
--strict Transforms handle edge cases but have more overhead
(enable individually using --strict:X where X is
Expand Down
46 changes: 46 additions & 0 deletions internal/bundler/bundler.go
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,8 @@ import (
"unicode/utf8"

"github.com/evanw/esbuild/internal/config"
"github.com/evanw/esbuild/internal/css_ast"
"github.com/evanw/esbuild/internal/css_parser"
"github.com/evanw/esbuild/internal/fs"
"github.com/evanw/esbuild/internal/js_ast"
"github.com/evanw/esbuild/internal/js_lexer"
Expand Down Expand Up @@ -77,6 +79,19 @@ func (repr *reprJS) importRecords() []js_ast.ImportRecord {
return repr.ast.ImportRecords
}

type reprCSS struct {
ast css_ast.AST

// If present, this is the JavaScript stub corresponding to this CSS file.
// A JavaScript stub is automatically generated for a CSS file when it's
// imported from a JavaScript file.
jsSourceIndex *uint32
}

func (repr *reprCSS) importRecords() []js_ast.ImportRecord {
return nil
}

type Bundle struct {
fs fs.FS
res resolver.Resolver
Expand Down Expand Up @@ -197,6 +212,11 @@ func parseFile(args parseArgs) {
result.file.repr = &reprJS{ast: ast}
result.ok = ok

case config.LoaderCSS:
ast := css_parser.Parse(args.log, source)
result.file.repr = &reprCSS{ast: ast}
result.ok = true

case config.LoaderJSON:
expr, ok := js_parser.ParseJSON(args.log, source, js_parser.ParseJSONOptions{})
ast := js_parser.LazyExportAST(args.log, source, args.options, expr, "")
Expand Down Expand Up @@ -671,6 +691,31 @@ func ScanBundle(log logger.Log, fs fs.FS, res resolver.Resolver, entryPaths []st
}
}

// If an import from a JavaScript file targets a CSS file, generate a
// JavaScript stub to ensure that JavaScript files only ever import
// other JavaScript files.
if _, ok := result.file.repr.(*reprJS); ok {
otherFile := &results[*record.SourceIndex].file
if css, ok := otherFile.repr.(*reprCSS); ok {
if css.jsSourceIndex == nil {
sourceIndex := uint32(len(files))
source := logger.Source{
Index: sourceIndex,
PrettyPath: otherFile.source.PrettyPath,
}
ast := js_parser.LazyExportAST(log, source, options, js_ast.Expr{Data: &js_ast.EObject{}}, "")
f := file{
repr: &reprJS{ast: ast},
source: source,
}
files = append(files, f)
results = append(results, parseResult{file: f})
css.jsSourceIndex = &sourceIndex
}
record.SourceIndex = css.jsSourceIndex
}
}

// Generate metadata about each import
if options.AbsMetadataFile != "" {
if isFirstImport {
Expand Down Expand Up @@ -713,6 +758,7 @@ func DefaultExtensionToLoaderMap() map[string]config.Loader {
".jsx": config.LoaderJSX,
".ts": config.LoaderTS,
".tsx": config.LoaderTSX,
".css": config.LoaderCSS,
".json": config.LoaderJSON,
".txt": config.LoaderText,
}
Expand Down
161 changes: 160 additions & 1 deletion internal/bundler/linker.go
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ import (

"github.com/evanw/esbuild/internal/compat"
"github.com/evanw/esbuild/internal/config"
"github.com/evanw/esbuild/internal/css_printer"
"github.com/evanw/esbuild/internal/fs"
"github.com/evanw/esbuild/internal/js_ast"
"github.com/evanw/esbuild/internal/js_lexer"
Expand Down Expand Up @@ -275,6 +276,11 @@ type chunkReprJS struct {

func (*chunkReprJS) fileExt() string { return ".js" }

type chunkReprCSS struct {
}

func (*chunkReprCSS) fileExt() string { return ".css" }

// Returns the path of this chunk relative to the output directory. Note:
// this must have OS-independent path separators (i.e. '/' not '\').
func (chunk *chunkInfo) relPath() string {
Expand Down Expand Up @@ -2274,6 +2280,8 @@ func (c *linkerContext) computeChunks() []chunkInfo {
switch file.repr.(type) {
case *reprJS:
repr = &chunkReprJS{}
case *reprCSS:
repr = &chunkReprCSS{}
}

if c.options.AbsOutputFile != "" {
Expand Down Expand Up @@ -2316,7 +2324,8 @@ func (c *linkerContext) computeChunks() []chunkInfo {

// Figure out which files are in which chunk
for _, sourceIndex := range c.reachableFiles {
switch repr := c.files[sourceIndex].repr.(type) {
file := &c.files[sourceIndex]
switch repr := file.repr.(type) {
case *reprJS:
for _, partMeta := range repr.meta.partMeta {
key := string(partMeta.entryBits.entries)
Expand All @@ -2333,6 +2342,21 @@ func (c *linkerContext) computeChunks() []chunkInfo {
}
chunk.filesWithPartsInChunk[uint32(sourceIndex)] = true
}

case *reprCSS:
key := string(file.entryBits.entries)
if key == neverReachedKey {
// Ignore this file if it was never reached
continue
}
chunk, ok := chunks[key]
if !ok {
chunk.entryBits = file.entryBits
chunk.filesWithPartsInChunk = make(map[uint32]bool)
chunk.repr = &chunkReprJS{}
chunks[key] = chunk
}
chunk.filesWithPartsInChunk[uint32(sourceIndex)] = true
}
}

Expand Down Expand Up @@ -2435,6 +2459,11 @@ func (c *linkerContext) chunkFileOrder(chunk *chunkInfo) []uint32 {
suffixOrder = append(suffixOrder, sourceIndex)
}
}

case *reprCSS:
if isFileInThisChunk {
suffixOrder = append(suffixOrder, sourceIndex)
}
}
}

Expand Down Expand Up @@ -3352,6 +3381,136 @@ func (repr *chunkReprJS) generate(c *linkerContext, chunk *chunkInfo) func([]js_
}
}

type compileResultCSS struct {
printedCSS string
sourceIndex uint32
}

func (repr *chunkReprCSS) generate(c *linkerContext, chunk *chunkInfo) func([]js_ast.ImportRecord) []OutputFile {
var results []OutputFile
filesInChunkInOrder := c.chunkFileOrder(chunk)
compileResults := make([]compileResultCSS, 0, len(filesInChunkInOrder))

// Generate CSS for each file in parallel
waitGroup := sync.WaitGroup{}
for _, sourceIndex := range filesInChunkInOrder {
// Skip the runtime in test output
if sourceIndex == runtime.SourceIndex && c.options.OmitRuntimeForTests {
continue
}

// Each file may optionally contain an additional file to be copied to the
// output directory. This is used by the "file" loader.
if additionalFile := c.files[sourceIndex].additionalFile; additionalFile != nil {
results = append(results, *additionalFile)
}

// Create a goroutine for this file
compileResults = append(compileResults, compileResultCSS{})
compileResult := &compileResults[len(compileResults)-1]
waitGroup.Add(1)
go func(sourceIndex uint32, compileResult *compileResultCSS) {
file := &c.files[sourceIndex]
repr := file.repr.(*reprCSS)
css := css_printer.Print(repr.ast, css_printer.Options{
Contents: file.source.Contents,
RemoveWhitespace: c.options.RemoveWhitespace,
})
*compileResult = compileResultCSS{
printedCSS: css,
sourceIndex: sourceIndex,
}
waitGroup.Done()
}(sourceIndex, compileResult)
}

// Wait for cross-chunk import records before continuing
return func(crossChunkImportRecords []js_ast.ImportRecord) []OutputFile {
waitGroup.Wait()
j := js_printer.Joiner{}

// Start the metadata
jMeta := js_printer.Joiner{}
if c.options.AbsMetadataFile != "" {
isFirstMeta := true
jMeta.AddString("{\n \"imports\": [")
for _, record := range crossChunkImportRecords {
if isFirstMeta {
isFirstMeta = false
} else {
jMeta.AddString(",")
}
importAbsPath := c.fs.Join(c.options.AbsOutputDir, chunk.relDir, record.Path.Text)
jMeta.AddString(fmt.Sprintf("\n {\n \"path\": %s\n }",
js_printer.QuoteForJSON(c.res.PrettyPath(logger.Path{Text: importAbsPath, Namespace: "file"}))))
}
if !isFirstMeta {
jMeta.AddString("\n ")
}
jMeta.AddString("],\n \"inputs\": {")
}
isFirstMeta := true

// Concatenate the generated CSS chunks together
newlineBeforeComment := false
for _, compileResult := range compileResults {
if c.options.Mode == config.ModeBundle && !c.options.RemoveWhitespace {
if newlineBeforeComment {
j.AddString("\n")
}
j.AddString(fmt.Sprintf("// %s\n", c.files[compileResult.sourceIndex].source.PrettyPath))
}
if len(compileResult.printedCSS) > 0 {
newlineBeforeComment = true
}
j.AddString(compileResult.printedCSS)

// Include this file in the metadata
if c.options.AbsMetadataFile != "" {
if isFirstMeta {
isFirstMeta = false
} else {
jMeta.AddString(",")
}
jMeta.AddString(fmt.Sprintf("\n %s: {\n \"bytesInOutput\": %d\n }",
js_printer.QuoteForJSON(c.files[compileResult.sourceIndex].source.PrettyPath),
len(compileResult.printedCSS)))
}
}

// Make sure the file ends with a newline
if j.Length() > 0 && j.LastByte() != '\n' {
j.AddString("\n")
}

// The CSS contents are done now that the source map comment is in
cssContents := j.Done()

// Figure out the base name for this chunk now that the content hash is known
if chunk.baseNameOrEmpty == "" {
hash := hashForFileName(cssContents)
chunk.baseNameOrEmpty = "chunk." + hash + c.options.OutputExtensionFor(".css")
}

// End the metadata
var jsonMetadataChunk []byte
if c.options.AbsMetadataFile != "" {
if !isFirstMeta {
jMeta.AddString("\n ")
}
jMeta.AddString(fmt.Sprintf("},\n \"bytes\": %d\n }", len(cssContents)))
jsonMetadataChunk = jMeta.Done()
}

results = append(results, OutputFile{
AbsPath: c.fs.Join(c.options.AbsOutputDir, chunk.relPath()),
Contents: cssContents,
jsonMetadataChunk: jsonMetadataChunk,
})
return results
}
}

func (offset *lineColumnOffset) advanceBytes(bytes []byte) {
for i, n := 0, len(bytes); i < n; i++ {
if bytes[i] == '\n' {
Expand Down
1 change: 1 addition & 0 deletions internal/config/config.go
Original file line number Diff line number Diff line change
Expand Up @@ -77,6 +77,7 @@ const (
LoaderDataURL
LoaderFile
LoaderBinary
LoaderCSS
)

func (loader Loader) IsTypeScript() bool {
Expand Down
1 change: 1 addition & 0 deletions pkg/api/api.go
Original file line number Diff line number Diff line change
Expand Up @@ -113,6 +113,7 @@ const (
LoaderDataURL
LoaderFile
LoaderBinary
LoaderCSS
)

type Platform uint8
Expand Down
4 changes: 3 additions & 1 deletion pkg/api/api_impl.go
Original file line number Diff line number Diff line change
Expand Up @@ -118,6 +118,8 @@ func validateLoader(value Loader) config.Loader {
return config.LoaderFile
case LoaderBinary:
return config.LoaderBinary
case LoaderCSS:
return config.LoaderCSS
default:
panic("Invalid loader")
}
Expand Down Expand Up @@ -224,7 +226,7 @@ func isValidExtension(ext string) bool {

func validateResolveExtensions(log logger.Log, order []string) []string {
if order == nil {
return []string{".tsx", ".ts", ".jsx", ".mjs", ".cjs", ".js", ".json"}
return []string{".tsx", ".ts", ".jsx", ".mjs", ".cjs", ".js", ".css", ".json"}
}
for _, ext := range order {
if !isValidExtension(ext) {
Expand Down
4 changes: 3 additions & 1 deletion pkg/cli/cli_impl.go
Original file line number Diff line number Diff line change
Expand Up @@ -424,6 +424,8 @@ func parseLoader(text string) (api.Loader, error) {
return api.LoaderTS, nil
case "tsx":
return api.LoaderTSX, nil
case "css":
return api.LoaderCSS, nil
case "json":
return api.LoaderJSON, nil
case "text":
Expand All @@ -438,7 +440,7 @@ func parseLoader(text string) (api.Loader, error) {
return api.LoaderBinary, nil
default:
return 0, fmt.Errorf("Invalid loader: %q (valid: "+
"js, jsx, ts, tsx, json, text, base64, dataurl, file, binary)", text)
"js, jsx, ts, tsx, css, json, text, base64, dataurl, file, binary)", text)
}
}

Expand Down

0 comments on commit 108db32

Please sign in to comment.