diff --git a/.changeset/add_new_option_javascriptjsxeverywhere.md b/.changeset/add_new_option_javascriptjsxeverywhere.md new file mode 100644 index 000000000000..dd4844604b6f --- /dev/null +++ b/.changeset/add_new_option_javascriptjsxeverywhere.md @@ -0,0 +1,9 @@ +--- +"@biomejs/biome": minor +--- + +Add new option `javascript.parser.jsxEverywhere`. This new option allows to control whether Biome should expect JSX syntax in `.js`/`.ts` files. + +When `jsxEverywhere` is set to `false`, having JSX syntax like `
` inside `.js`/`.ts` files will result in a **parsing error**. + +This option defaults to `true`. diff --git a/.changeset/add_the_new_rule_nofloatingpromises.md b/.changeset/add_the_new_rule_nofloatingpromises.md new file mode 100644 index 000000000000..3a80e6da2c43 --- /dev/null +++ b/.changeset/add_the_new_rule_nofloatingpromises.md @@ -0,0 +1,5 @@ +--- +"@biomejs/biome": minor +--- + +Add the new rule [`noFloatingPromises`](https://biomejs.dev/linter/rules/no-floating-promises). diff --git a/.changeset/add_the_new_rule_noimportcycles.md b/.changeset/add_the_new_rule_noimportcycles.md new file mode 100644 index 000000000000..b0296c588606 --- /dev/null +++ b/.changeset/add_the_new_rule_noimportcycles.md @@ -0,0 +1,5 @@ +--- +"@biomejs/biome": minor +--- + +Add the new rule [`noImportCycles`](https://biomejs.dev/linter/rules/no-import-cycles). diff --git a/.changeset/add_the_new_rule_notsignorehttps.md b/.changeset/add_the_new_rule_notsignorehttps.md new file mode 100644 index 000000000000..6999a8a84732 --- /dev/null +++ b/.changeset/add_the_new_rule_notsignorehttps.md @@ -0,0 +1,5 @@ +--- +"@biomejs/biome": minor +--- + +Add the new rule [`noTsIgnore`](https://biomejs.dev/linter/rules/no-ts-ignore). diff --git a/.changeset/add_the_new_rule_nounwantedpolyfillio.md b/.changeset/add_the_new_rule_nounwantedpolyfillio.md new file mode 100644 index 000000000000..e8fe8d8c1ce9 --- /dev/null +++ b/.changeset/add_the_new_rule_nounwantedpolyfillio.md @@ -0,0 +1,5 @@ +--- +"@biomejs/biome": minor +--- + +Add the new rule [`noUnwantedPolyfillio`](https://biomejs.dev/linter/rules/no-unwanted-polyfillio). diff --git a/.changeset/add_whitespace_after_css_selecters_preceded_by_comment.md b/.changeset/add_whitespace_after_css_selecters_preceded_by_comment.md new file mode 100644 index 000000000000..ff9ad381b107 --- /dev/null +++ b/.changeset/add_whitespace_after_css_selecters_preceded_by_comment.md @@ -0,0 +1,5 @@ +--- +"@biomejs/biome": patch +--- + +Fix [#5001](https://github.com/biomejs/biome/issues/5001), where the CSS formatter removes whitespace from selector preceded by a comment diff --git a/.changeset/added_a_json_format_option_expandslists.md b/.changeset/added_a_json_format_option_expandslists.md new file mode 100644 index 000000000000..67f1081bbaf5 --- /dev/null +++ b/.changeset/added_a_json_format_option_expandslists.md @@ -0,0 +1,5 @@ +--- +"@biomejs/biome": minor +--- + +Added a JSON format option `expand`. The option `json.formatter.expand` allows to enforce the formatting of arrays and objects on multiple lines, regardless of their length. diff --git a/.changeset/better_control_over_linter_groups.md b/.changeset/better_control_over_linter_groups.md new file mode 100644 index 000000000000..367420e91364 --- /dev/null +++ b/.changeset/better_control_over_linter_groups.md @@ -0,0 +1,30 @@ +--- +"@biomejs/biome": minor +--- + +Linter groups now accept new options to enable/disable all rules that belong to a group, and control the severity +of the rules that belong to those groups. + +For example, you can downgrade the severity of rules that belong to `"style"` to emit `"info"` diagnostics: + +```json +{ + "linter": { + "rules": { + "style": "info" + } + } +} +``` + +You can also enable all rules that belong to a group using the default severity of the rule using the `"on"` option: + +```json +{ + "linter": { + "rules": { + "complexity": "on" + } + } +} +``` diff --git a/.changeset/biome_assist.md b/.changeset/biome_assist.md new file mode 100644 index 000000000000..97cd20cd5b71 --- /dev/null +++ b/.changeset/biome_assist.md @@ -0,0 +1,53 @@ +--- +"@biomejs/biome": minor +--- + +Biome assist is a new feature of the Biome analyzer. The assist is meant to provide **actions**. Actions differ from linter rules in that they aren't meant to signal errors. + +The assist will provide code actions that users can opt into via configuration or via IDEs/editors, using the Language Server Protocol. + +The assist **is enabled by default**. However, you can turn if off via configuration: + +```json +{ + "assist": { + "enabled": false + } +} +``` + +You can turn on the actions that you want to use in your configuration. For example, you can enable the `useSortedKeys` action like this: + +```json +{ + "assist": { + "actions": { + "source": { + "useSortedKeys": "on" + } + } + } +} +``` + +Alternatively, IDE/editor users can decide which action to apply on save *directly from the editor settings*, as long as the assist is enabled. + +For example, in VS Code you can apply the `useSortedKeys` action when saving a file by adding the following snippet in `settings.json`: + +```json +{ + "editor.codeActionsOnSave": { + "source.biome.useSortedKeys": "explicit" + } +} +``` + +In Zed, you can achieve the same by adding the following snippet in `~/.config/zed/settings.json`: + +```json +{ + "code_actions_on_format": { + "source.biome.useSortedKeys": true + } +} +``` diff --git a/.changeset/biome_logs_a_warning_in_case_a_folder_contains_biomejson_and_biomejsonc_and_it_will_use_biomejson_by_default.md b/.changeset/biome_logs_a_warning_in_case_a_folder_contains_biomejson_and_biomejsonc_and_it_will_use_biomejson_by_default.md new file mode 100644 index 000000000000..ac4d506100b2 --- /dev/null +++ b/.changeset/biome_logs_a_warning_in_case_a_folder_contains_biomejson_and_biomejsonc_and_it_will_use_biomejson_by_default.md @@ -0,0 +1,5 @@ +--- +"@biomejs/biome": patch +--- + +Biome logs a warning in case a folder contains `biome.json` and `biome.jsonc`, and it will use `biome.json` by default. diff --git a/.changeset/biome_migrate_eslint_rule_overriding.md b/.changeset/biome_migrate_eslint_rule_overriding.md new file mode 100644 index 000000000000..91fc475b789f --- /dev/null +++ b/.changeset/biome_migrate_eslint_rule_overriding.md @@ -0,0 +1,38 @@ +--- +"@biomejs/biome": minor +--- + +Biome migrate eslint outputs a better overriding behavior. + +A Biome rule can have multiple ESLint equivalent rules. +For example, [useLiteralKeys](https://biomejs.dev/linter/rules/use-literal-keys/) has two ESLint equivalent rules: [dot-notation](https://eslint.org/docs/latest/rules/dot-notation) and [@typescript-eslint/dot-notation](https://typescript-eslint.io/rules/dot-notation/). + +Previously, Biome wouldn't always enable a Biome rule even if one of its equivalent rules was enabled. +Now Biome uses the higher severity level of all the equivalent ESLint rules to set the severity level of the Biome rule. + +The following ESLint configuration... + +```json +{ + "rules": { + "@typescript-eslint/dot-notation": "error", + "dot-notation": "off" + } +} +``` + +...is now migrated to... + +```json +{ + "linter": { + "rules": { + "complexity": { + "useLiteralKeys": "error" + } + } + } +} +``` + +...because `error` is higher than `off`. diff --git a/.changeset/biome_now_resolves_globs_and_paths_from_the_configuration_before_paths_and_globs_were_resolved_from_the_working_directory.md b/.changeset/biome_now_resolves_globs_and_paths_from_the_configuration_before_paths_and_globs_were_resolved_from_the_working_directory.md new file mode 100644 index 000000000000..1e653d38b4a8 --- /dev/null +++ b/.changeset/biome_now_resolves_globs_and_paths_from_the_configuration_before_paths_and_globs_were_resolved_from_the_working_directory.md @@ -0,0 +1,5 @@ +--- +"@biomejs/biome": major +--- + +Biome now resolves globs and paths from the configuration. Before, paths and globs were resolved from the working directory. diff --git a/.changeset/code_actions_via_ideeditor.md b/.changeset/code_actions_via_ideeditor.md new file mode 100644 index 000000000000..030e1720ae60 --- /dev/null +++ b/.changeset/code_actions_via_ideeditor.md @@ -0,0 +1,35 @@ +--- +"@biomejs/biome": minor +--- + +Biome users can now configure code actions from linter rules as well as assist actions directly in the settings of their IDE/editor. + +For example, let's consider the lint rule [`noSwitchDeclarations`](https://biomejs.dev/linter/rules/no-switch-declarations/), which has an unsafe fix. +Previously, if you wanted to use this rule, you were "forced" to enable it via configuration, and if you wanted to apply its fix when you saved a file, you were forced to mark the fix as safe: + +```json +{ + "linter": { + "rules": { + "correctness": { + "noSwitchDeclarations": { + "level": "error", + "fix": "safe" + } + } + } + } +} +``` + +Now, you can benefit from the code action without making the fix safe for the entire project. IDEs and editors that are LSP compatible allow to list a series of "filters" or code actions that can be applied on save. In the case of VS Code, you will need to add the following snippet in the `settings.json`: + +```json +{ + "editor.codeActionsOnSave": { + "quickfix.biome.correctness.noSwitchDeclarations": "explicit" + } +} +``` + +Upon save, Biome will inform the editor the apply the code action of the rule `noSwitchDeclarations`. diff --git a/.changeset/config.json b/.changeset/config.json new file mode 100644 index 000000000000..f3c8d5e988a3 --- /dev/null +++ b/.changeset/config.json @@ -0,0 +1,21 @@ +{ + "$schema": "https://unpkg.com/@changesets/config@3.0.5/schema.json", + "changelog": ["@changesets/changelog-github", { "repo": "withastro/astro" }], + "commit": false, + "fixed": [ + [ + "@biomejs/biome", + "@biomejs/cli-*", + "@biomejs/wasm-*" + ] + ], + "linked": [], + "access": "public", + "baseBranch": "main", + "updateInternalDependencies": "patch", + "ignore": [ + "@biomejs/benchmark", + "@biomejs/aria-data", + "tailwindcss-config-analyzer" + ] +} diff --git a/.changeset/enable_rule_with_default_severity.md b/.changeset/enable_rule_with_default_severity.md new file mode 100644 index 000000000000..18224faae141 --- /dev/null +++ b/.changeset/enable_rule_with_default_severity.md @@ -0,0 +1,33 @@ +--- +"@biomejs/biome": minor +--- + +You can now enable lint rules using the default severity suggested by Biome using the new variant `"on"`, when enabling a rule. + +For example, the default severity of the rule `style.noVar` is `error`, so you would use `"on"`, and then linting a code that uses `var`, will result in an error: + +```json +{ + "linter": { + "recommended": false, + "rules": { + "style": { + "noVar": "on" + } + } + } +} +``` + +```js +// main.js +var name = "tobias" +``` + +The command `biome lint main.js` will result in an error due to the default severity assigned to `noVar`. + +Refer to the documentation page of each rule to know their suggested diagnostic severity, or use the command `biome explain `: + +```shell +biome explain noVar +``` diff --git a/.changeset/export_named_type_parser.md b/.changeset/export_named_type_parser.md new file mode 100644 index 000000000000..01cb67b97d41 --- /dev/null +++ b/.changeset/export_named_type_parser.md @@ -0,0 +1,11 @@ +--- +"@biomejs/biome": patch +--- + +Export Named Type support `default` parser. + +The following code is now parsed successfully: + +```ts +export { type A as default } from './b.ts'; +``` diff --git a/.changeset/fix_1597_useexhaustivedependencies_now_consider_react_hooks_stable_within_parentheses_or_type_assertions.md b/.changeset/fix_1597_useexhaustivedependencies_now_consider_react_hooks_stable_within_parentheses_or_type_assertions.md new file mode 100644 index 000000000000..281f51da8692 --- /dev/null +++ b/.changeset/fix_1597_useexhaustivedependencies_now_consider_react_hooks_stable_within_parentheses_or_type_assertions.md @@ -0,0 +1,5 @@ +--- +"@biomejs/biome": patch +--- + +Fix #1597, useExhaustiveDependencies now consider React hooks stable within parentheses or type assertions. diff --git a/.changeset/fix_a_bug_where_config_path_accepted_configuration_files_with_unsupported_extensions.md b/.changeset/fix_a_bug_where_config_path_accepted_configuration_files_with_unsupported_extensions.md new file mode 100644 index 000000000000..8a8ab7bd34f1 --- /dev/null +++ b/.changeset/fix_a_bug_where_config_path_accepted_configuration_files_with_unsupported_extensions.md @@ -0,0 +1,5 @@ +--- +"@biomejs/biome": patch +--- + +Fix a bug where `--config-path` accepted configuration files with unsupported extensions. Now only `.json` and `.jsonc` are accepted, and an error is raised otherwise. diff --git a/.changeset/fix_fragament_4751.md b/.changeset/fix_fragament_4751.md new file mode 100644 index 000000000000..bf7e39366cf2 --- /dev/null +++ b/.changeset/fix_fragament_4751.md @@ -0,0 +1,24 @@ +--- +"@biomejs/biome": patch +--- + +Fix [#4751](https://github.com/biomejs/biome/issues/4751) by checking fragments inside `JSXElement` and conditional expressions. For example: + +The Case: + +```jsx +
+ <> +
+
+ +
; +``` + +And: + +```jsx +showFullName ? <>{fullName} : <>{firstName}; +``` + +It will report. diff --git a/.changeset/fix_no_fallthrough_switch_case_panic.md b/.changeset/fix_no_fallthrough_switch_case_panic.md new file mode 100644 index 000000000000..0cd7d3327678 --- /dev/null +++ b/.changeset/fix_no_fallthrough_switch_case_panic.md @@ -0,0 +1,5 @@ +--- +"@biomejs/biome": patch +--- + +The rule `noFallthroughSwitchCase` no longer panics on some incomplete code snippets. diff --git a/.changeset/fix_nomissingvarfunction_false_positives_for_container_name.md b/.changeset/fix_nomissingvarfunction_false_positives_for_container_name.md new file mode 100644 index 000000000000..0df2beed5748 --- /dev/null +++ b/.changeset/fix_nomissingvarfunction_false_positives_for_container_name.md @@ -0,0 +1,5 @@ +--- +"@biomejs/biome": patch +--- + +Fix [#5007](https://github.com/biomejs/biome/issues/5007) `noMissingVarFunction` false positives for `container-name`. diff --git a/.changeset/fix_the_use_strict_directive_insertion_logic_for_shebang_and_top_leading_comments.md b/.changeset/fix_the_use_strict_directive_insertion_logic_for_shebang_and_top_leading_comments.md new file mode 100644 index 000000000000..55e7dff6a1e9 --- /dev/null +++ b/.changeset/fix_the_use_strict_directive_insertion_logic_for_shebang_and_top_leading_comments.md @@ -0,0 +1,47 @@ +--- +"@biomejs/biome": patch +--- + +Fix [#4841](https://github.com/biomejs/biome/issues/4841), shebang and top leading comments in cjs files are now handled correctly + +- shebang only (keep it as is) + +``` +#!/usr/bin/env node +``` + +- comments only (keep it as is) + +``` +// comment +``` + +- with shebang + +```diff +- #!/usr/bin/env node"use strict"; ++ #!/usr/bin/env node ++ "use strict"; +let some_variable = "some value"; +``` + +- with comment + +```diff +- // comment +- "use strict"; // comment ++ "use strict"; ++ // comment +let some_variable = "some value"; +``` + +- with shebang and comment + +```diff +- #!/usr/bin/env node"use strict"; +- // comment ++ #!/usr/bin/env node ++ "use strict"; ++ // comment +let some_variable = "some value"; +``` diff --git a/.changeset/fixanalyzer_suppression_comment_fails_with_inner_comments_in_functions.md b/.changeset/fixanalyzer_suppression_comment_fails_with_inner_comments_in_functions.md new file mode 100644 index 000000000000..b96089a50b0c --- /dev/null +++ b/.changeset/fixanalyzer_suppression_comment_fails_with_inner_comments_in_functions.md @@ -0,0 +1,17 @@ +--- +"@biomejs/biome": patch +--- + +Suppression comment should not fail with inner comments in functions. + +The following code: + +```ts +// biome-ignore lint/complexity/useArrowFunction: not work +const foo0 = function (bar: string) { + // biome-ignore lint/style/noParameterAssign: work + bar = "baz"; +}; +``` + +The suppression comment `// biome-ignore lint/style/noParameterAssign: work` will not be invalid. diff --git a/.changeset/introduce_includes.md b/.changeset/introduce_includes.md new file mode 100644 index 000000000000..753136395f26 --- /dev/null +++ b/.changeset/introduce_includes.md @@ -0,0 +1,44 @@ +--- +"@biomejs/biome": minor +--- + +Introduce `includes`. + +Biome allows users to `include` and `ignore` files in its configuration using glob patterns. + +For example, in the following configuration, all files of the `src/` directory are checked except the ones ending with the extension `.test.js`. + +```json +{ + "files": { + "include": ["src/**"], + "ignore": ["**/*.test.js"] + } +} +``` + +Some Biome users have requested the ability to ignore a set of files except some of the files. +With the current system, this is not possible because `include` is always applied before `ignore`. + +Also, many Biome users [reported](https://github.com/biomejs/biome/issues/2421) [issues](https://github.com/biomejs/biome/issues/3345) with the behavior of the glob patterns. +Notably: + +- `src/**` is interpreted as `**/src/**` +- `*.js` is interpreted as `**/*.js` + +To solve all these issues, we introduce a new field `includes`, which replaces both `include` and `ignore`. +`includes` accepts an array of glob patterns with a stricter and more intuitive behavior than the previous glob pattern format. +A glob starting with a `!` is an exception. +This replaces `ignore` patterns. + +The previous configuration must be updated as follows: + +```json +{ + "files": { + "includes": ["src/**", "!**/*.test.js"] + } +} +``` + +You can run `biome migrate` to automatically convert from `include` and `ignore` to `includes`. diff --git a/.changeset/introduce_the_domains_linter_feature.md b/.changeset/introduce_the_domains_linter_feature.md new file mode 100644 index 000000000000..3f10fa1d8269 --- /dev/null +++ b/.changeset/introduce_the_domains_linter_feature.md @@ -0,0 +1,54 @@ +--- +"@biomejs/biome": minor +--- + +Introduce the `domains` linter feature. The Biome linter now has a new way to opt-in rules, with a concept called `domains`. + +Domains can be seen as concepts shared by different rules. + +You can enable and disable multiple rules that belong to a domain. When you assign `"all"`, Biome will enable all the rules, when you assign `"none"`, Biome will disable the rules, when you assign "recommended", Biome will enable all rules of the domain that are recommended. + +```json5 +// biome.jsonc +{ + "linter": { + "domains": { + "test": "all", // all rules that belong to this domain are enabled + "react": "recommended", // only the recommended rules from this domain are enabled + "solid": "none" // rules related to Solid are disabled + } + } +} +``` + +New domains introduced: + +- `test`: it will enable rules: + - `noExportsInTest` + - `noExcessiveNestedTestSuites` + - `noDuplicateTestHooks` + - `noFocusedTests` + And it will inject the following globals: + - `after` + - `afterAll` + - `afterEach` + - `before` + - `beforeEach` + - `beforeAll` + - `describe` + - `it` + - `expect` + - `test` +- `next`: it will enable rules for Next.js projects: + - `useExhaustiveDependencies` + - `useHookAtTopLevel` + - `noImgElement` + - `noHeadImportInDocument` + - `noHeadImportInDocument` +- `react`: it will enable rules for React projects: + - `useExhaustiveDependencies` + - `useHookAtTopLevel` +- `solid`: it will enable rules for Solid projects: + - `noReactSpecificProps` + +For more information regarding how Biome enables rules via domains, please refer to the documentation page of each rule. diff --git a/.changeset/mark_useselfclosingelements_as_safe_and_improve_error_message.md b/.changeset/mark_useselfclosingelements_as_safe_and_improve_error_message.md new file mode 100644 index 000000000000..0aecc868d2f2 --- /dev/null +++ b/.changeset/mark_useselfclosingelements_as_safe_and_improve_error_message.md @@ -0,0 +1,5 @@ +--- +"@biomejs/biome": patch +--- + +Mark `useSelfClosingElements` as safe and improve error message. diff --git a/.changeset/new_top_level_suppression_for_the_analyzer.md b/.changeset/new_top_level_suppression_for_the_analyzer.md new file mode 100644 index 000000000000..71417bb7d903 --- /dev/null +++ b/.changeset/new_top_level_suppression_for_the_analyzer.md @@ -0,0 +1,60 @@ +--- +"@biomejs/biome": minor +--- + +The Biome analyzer now supports a new top-level suppression. These suppression have to be placed at the top of the file, and they must be followed by two newlines (`\n\n\`). + +The analyzer rules specified inside the block comment will be suppressed for the whole file. + +In the example, we suppress the rules `lint/style/useConst` and `lint/suspicious/noDebugger` for the whole file: + +```js +// main.js +/** + * biome-ignore-all lint/style/useConst: i like let + * biome-ignore-all lint/suspicious/noDebugger: needed now + */ + +let path = "/path"; +let _tmp = undefined; +debugger +``` + +In this other example, we suppress `lint/suspicious/noEmptyBlock` for a whole CSS file: + +```css +/** +/* biome-ignore-all lint/suspicious/noEmptyBlock: it's fine to have empty blocks +*/ + +a {} +span {} +``` + +A new diagnostic is emitted if `biome-ignore-all` suppression isn't placed at the top of the file: + + +```block +file.js:3:1 suppressions/incorrect ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ + + ! Top level suppressions can only be used at the beginning of the file. + + 2 │ let foo = 2; + > 3 │ /** + │ ^^^ + > 4 │ * biome-ignore-all lint/style/useConst: reason + > 5 │ */ + │ ^^ + 6 │ let bar = 33; + + i Rename this to biome-ignore + + 2 │ let foo = 2; + 3 │ /** + > 4 │ * biome-ignore-all lint/style/useConst: reason + │ ^^^^^^^^^^^^^^^^ + 5 │ */ + 6 │ let bar = 33; + + +``` diff --git a/.changeset/no_more_trailing_commas_in_json_files.md b/.changeset/no_more_trailing_commas_in_json_files.md new file mode 100644 index 000000000000..0aa45692cb88 --- /dev/null +++ b/.changeset/no_more_trailing_commas_in_json_files.md @@ -0,0 +1,5 @@ +--- +"@biomejs/biome": major +--- + +The Biome formatter doesn't add a trailing command in `.json` files, even when `json.formatter.trailingCommas` is set to `true`. diff --git a/.changeset/old-eels-help.md b/.changeset/old-eels-help.md new file mode 100644 index 000000000000..a6580f596987 --- /dev/null +++ b/.changeset/old-eels-help.md @@ -0,0 +1,6 @@ +--- +"@biomejs/biome": patch +--- + +Fix [#4875](https://github.com/biomejs/biome/issues/4875), where the Jetbrains IDE terminal would output not clickable, relative file path link to the diagnostic file. This does not fix paths without line and column numbers. + diff --git a/.changeset/reduced_accepted_values.md b/.changeset/reduced_accepted_values.md new file mode 100644 index 000000000000..f8ff953355f5 --- /dev/null +++ b/.changeset/reduced_accepted_values.md @@ -0,0 +1,11 @@ +--- +"@biomejs/biome": major +--- + +Reduced accepted values for formatter options: +- The option `--quote-style` doesn't accept `Single` and `Double` anymore. +- The option `--quote-properties` doesn't accept `AsNeeded` and `Preserve` anymore. +- The option `--semicolons` doesn't accept `AsNeeded` and `Always` anymore. +- The option `--arrow-parenthesis` doesn't accept `AsNeeded` and `Always` anymore. +- The option `--trailing-commas` doesn't accept `ES5`, `All` and `None` anymore. +- The option `--attribute-position` doesn't accept `Single` and `Multiline` anymore. diff --git a/.changeset/remove_biome_log_dir.md b/.changeset/remove_biome_log_dir.md new file mode 100644 index 000000000000..724e1f6d3e18 --- /dev/null +++ b/.changeset/remove_biome_log_dir.md @@ -0,0 +1,9 @@ +--- +"@biomejs/biome": major +--- + +Remove `BIOME_LOG_DIR`. + +The environment variable `BIOME_LOG_DIR` isn't supported anymore. + +Use `BIOME_LOG_PATH` instead. diff --git a/.changeset/remove_deprecaterd_rules.md b/.changeset/remove_deprecaterd_rules.md new file mode 100644 index 000000000000..66b9574e52a3 --- /dev/null +++ b/.changeset/remove_deprecaterd_rules.md @@ -0,0 +1,15 @@ +--- +"@biomejs/biome": major +--- + +Remove deprecated rules. + +The following _deprecated_ rules have been deleted: + +- `noInvalidNewBuiltin` +- `noNewSymbol` +- `useShorthandArrayType` +- `useSingleCaseStatement` +- `noConsoleLog` + +Run the command `biome migrate --write` to update the configuration. diff --git a/.changeset/remove_indentsize_option.md b/.changeset/remove_indentsize_option.md new file mode 100644 index 000000000000..2f880776a38f --- /dev/null +++ b/.changeset/remove_indentsize_option.md @@ -0,0 +1,15 @@ +--- +"@biomejs/biome": major +--- + +Remove `indentSize` deprecated option. + +The deprecated option `indentSize`, and its relative CLI options, has been removed: +- Configuration file: `formatter.indentSize` +- Configuration file: `javascript.formatter.indentSize` +- Configuration file: `json.formatter.indentSize` +- CLI option `--indent-size` +- CLI option `--javascript-formatter-indent-size` +- CLI option `--json-formatter-indent-size` + +Use `indentWidth` and its relative CLI options instead. diff --git a/.changeset/remove_rome_binary.md b/.changeset/remove_rome_binary.md new file mode 100644 index 000000000000..1227e32d7790 --- /dev/null +++ b/.changeset/remove_rome_binary.md @@ -0,0 +1,7 @@ +--- +"@biomejs/biome": major +--- + +Remove `ROME_BINARY`. Use `BIOME_BINARY` instead. + + diff --git a/.changeset/remove_support_for_legacy_suppressions.md b/.changeset/remove_support_for_legacy_suppressions.md new file mode 100644 index 000000000000..4ba759773018 --- /dev/null +++ b/.changeset/remove_support_for_legacy_suppressions.md @@ -0,0 +1,13 @@ +--- +"@biomejs/biome": major +--- + +Remove support for legacy suppressions. + +Biome used to support "legacy suppressions" that looked like this: + +```js +// biome-ignore lint(complexity/useWhile): reason +``` + +This format is no longer supported. diff --git a/.changeset/remove_support_for_max_line_length_from_editorconfig_as_it_isnt_part_of_the_official_spec_anymore_.md b/.changeset/remove_support_for_max_line_length_from_editorconfig_as_it_isnt_part_of_the_official_spec_anymore_.md new file mode 100644 index 000000000000..6b3965e195ff --- /dev/null +++ b/.changeset/remove_support_for_max_line_length_from_editorconfig_as_it_isnt_part_of_the_official_spec_anymore_.md @@ -0,0 +1,5 @@ +--- +"@biomejs/biome": major +--- + +Remove support for `max_line_length` from `.editorconfig`, as it isn't part of the official spec anymore. diff --git a/.changeset/remove_support_for_rome_ignore_suppression_comment.md b/.changeset/remove_support_for_rome_ignore_suppression_comment.md new file mode 100644 index 000000000000..6d5fce844f8e --- /dev/null +++ b/.changeset/remove_support_for_rome_ignore_suppression_comment.md @@ -0,0 +1,7 @@ +--- +"@biomejs/biome": major +--- + +Remove support for `rome-ignore` suppression comment. + +Use the `biome-ignore` suppression comment instead. diff --git a/.changeset/remove_support_for_romejson.md b/.changeset/remove_support_for_romejson.md new file mode 100644 index 000000000000..817626dcd737 --- /dev/null +++ b/.changeset/remove_support_for_romejson.md @@ -0,0 +1,5 @@ +--- +"@biomejs/biome": major +--- + +Remove support for `rome.json`. diff --git a/.changeset/remove_the_option_all_from_the_linter.md b/.changeset/remove_the_option_all_from_the_linter.md new file mode 100644 index 000000000000..2dd50bc4fe36 --- /dev/null +++ b/.changeset/remove_the_option_all_from_the_linter.md @@ -0,0 +1,16 @@ +--- +"@biomejs/biome": major +--- + +Remove the option `all` from the linter. + +The options `linter.rules.all` and `linter.rules..all` has been removed. + +The number of rules in Biome have increased in scope and use cases, and sometimes some of them can conflict with each other. + +The option was useful at the beginning, but now it's deemed harmful, because it can unexpected behaviours in users projects. + +To automatically remove it, run the following command: +```shell +biome migrate --write +``` diff --git a/.changeset/remove_trailingcomma.md b/.changeset/remove_trailingcomma.md new file mode 100644 index 000000000000..fecf99930666 --- /dev/null +++ b/.changeset/remove_trailingcomma.md @@ -0,0 +1,21 @@ +--- +"@biomejs/biome": major +--- + +Removed the option `trailingComma` from the configuration and the CLI. Use the option `trailingCommas` instead: + +```diff +{ + "javascript": { + "formatter": { +- "trailingComma": "es5" ++ "trailingCommas": "es5" + } + } +} +``` + +```diff +-biome format --trailing-comma=es5 ++biome format --trailing-commas=es5 +``` diff --git a/.changeset/removed_apply_and_apply_unsafe.md b/.changeset/removed_apply_and_apply_unsafe.md new file mode 100644 index 000000000000..58d6558e5156 --- /dev/null +++ b/.changeset/removed_apply_and_apply_unsafe.md @@ -0,0 +1,17 @@ +--- +"@biomejs/biome": major +--- + +Removed `--apply` and `--apply-unsafe`. + +The CLI options `--apply` and `--apply-unasfe` aren't accepted anymore. Use `--write` and `--write --unafe` instead: + +```diff +-biome check --apply-unsafe ++biome check --write --unsafe +``` + +```diff +-biome check --apply ++biome check --write +``` diff --git a/.changeset/removed_support_for_assert.md b/.changeset/removed_support_for_assert.md new file mode 100644 index 000000000000..0763f0d03461 --- /dev/null +++ b/.changeset/removed_support_for_assert.md @@ -0,0 +1,15 @@ +--- +"@biomejs/biome": major +--- + +Removed support for `assert` syntax. + +Biome now longer supports the `assert` syntax, use the new `with` syntax instead + +```diff +-import {test} from "foo.json" assert { for: "for" } +-export * from "mod" assert { type: "json" } ++import {test} from "foo.json" with { for: "for" } ++export * from "mod" with { type: "json" } +``` + diff --git a/.changeset/renamed_useimportrestrictions_to_nopackageprivateimports.md b/.changeset/renamed_useimportrestrictions_to_nopackageprivateimports.md new file mode 100644 index 000000000000..192fdd2cc5bd --- /dev/null +++ b/.changeset/renamed_useimportrestrictions_to_nopackageprivateimports.md @@ -0,0 +1,8 @@ +--- +"@biomejs/biome": major +--- + +The rule `useImportRestrictions` has been renamed to `noPackagePrivateImports`. + +To avoid confusion with `noRestrictedImports`, `useImportRestrictions` has been +renamed to `noPackagePrivateImports`. diff --git a/.changeset/reworked_how_large_files_behave.md b/.changeset/reworked_how_large_files_behave.md new file mode 100644 index 000000000000..308064a8a3a9 --- /dev/null +++ b/.changeset/reworked_how_large_files_behave.md @@ -0,0 +1,7 @@ +--- +"@biomejs/biome": major +--- + +Previously, files that should exceed the configured size limit would throw an error, and the CLI would exit with an error code. + +Now, the CLI ignores the file, emits a *information* diagnostic and doesn't exit with an error code. diff --git a/.changeset/style_rules_arent_recommended_anymore_.md b/.changeset/style_rules_arent_recommended_anymore_.md new file mode 100644 index 000000000000..3e42bc5e259e --- /dev/null +++ b/.changeset/style_rules_arent_recommended_anymore_.md @@ -0,0 +1,34 @@ +--- +"@biomejs/biome": major +--- + +The `style` rules aren't recommended anymore. + +Linting rules that belong to the group `style` aren't recommended anymore. Here's the list of rules that aren't recommended anymore: + +- `useNumberNamespace` +- `noNonnullAssertion` +- `useAsConstAssertion` +- `noParameterAssign` +- `noInferrableTypes` +- `useNodejsImportProtocol` +- `useExportType` +- `useDefaultParameterLast` +- `noUnusedTemplateLiteral` +- `useExponentiationOperator` +- `useEnumInitializers` +- `useShorthandFunctionType` +- `useLiteralEnumMembers` +- `noVar` +- `noUselessElse` +- `useNumericLiterals` +- `noCommaOperator` +- `useConst` +- `noArguments` +- `useSelfClosingElements` +- `useImportType` +- `useTemplate` +- `useSingleVarDeclarator` +- `useWhile` + +Use `biome migrate` to enable these rules, to avoid breaking changes. diff --git a/.changeset/the_action_quickfixsuppressrule_is_removed.md b/.changeset/the_action_quickfixsuppressrule_is_removed.md new file mode 100644 index 000000000000..939471000bd0 --- /dev/null +++ b/.changeset/the_action_quickfixsuppressrule_is_removed.md @@ -0,0 +1,35 @@ +--- +"@biomejs/biome": major +--- + +Remove the code action `quickfix.suppressRule`. + +The code action `quickfix.suppressRule` was removed in favour of two new code actions: + +- `quickfix.suppressRule.inline.biome`: a code action that adds a suppression comment for each violation. +- `quickfix.suppressRule.topLevel.biome`: a code action that adds a suppression comment at the top of the file which suppresses a rule for the whole file. + + +Given the following code +```js +let foo = "one"; +debugger +``` + +The code action `quickfix.suppressRule.inline.biome` will result in the following code: +```js +// biome-ignore lint/style/useConst: +let foo = "one"; +// biome-ignore lint/suspicious/noDebugger: +debugger +``` + +The code action `quickfix.suppressRule.topLevel.biome`, instead, will result in the following code: +```js +/** biome-ignore lint/suspicious/noDebugger: */ +/** biome-ignore lint/style/useConst: */ + +let foo = "one"; +debugger; +``` + diff --git a/.changeset/the_file_packagejson.md b/.changeset/the_file_packagejson.md new file mode 100644 index 000000000000..ad79817a0abd --- /dev/null +++ b/.changeset/the_file_packagejson.md @@ -0,0 +1,17 @@ +--- +"@biomejs/biome": major +--- + +Changed default formatting of `package.json`. + +When Biome encounters a file called `package.json`, by default it will format the file with all objects and arrays expanded. + +```diff +- { "name": "project", "dependencies": { "foo": "latest" } } ++ { ++ "projectName": "project", ++ "dependencies": { ++ "foo": "^1.0.0" ++ } ++ } +``` diff --git a/.changeset/the_organizeimports_is_now_part_of_biome_assist.md b/.changeset/the_organizeimports_is_now_part_of_biome_assist.md new file mode 100644 index 000000000000..53ca8bc37772 --- /dev/null +++ b/.changeset/the_organizeimports_is_now_part_of_biome_assist.md @@ -0,0 +1,5 @@ +--- +"@biomejs/biome": major +--- + +The `organizeImports` is now part of Biome Assist diff --git a/.changeset/the_rule_noconsolelog_has_been_removed.md b/.changeset/the_rule_noconsolelog_has_been_removed.md new file mode 100644 index 000000000000..fbbef0f6277d --- /dev/null +++ b/.changeset/the_rule_noconsolelog_has_been_removed.md @@ -0,0 +1,5 @@ +--- +"@biomejs/biome": major +--- + +The rule `noConsoleLog` has been removed diff --git a/.changeset/the_rule_novar_now_belongs_to_the_suspicious_group.md b/.changeset/the_rule_novar_now_belongs_to_the_suspicious_group.md new file mode 100644 index 000000000000..361307e82732 --- /dev/null +++ b/.changeset/the_rule_novar_now_belongs_to_the_suspicious_group.md @@ -0,0 +1,5 @@ +--- +"@biomejs/biome": major +--- + +The rule `noVar` now belongs to the `suspicious` group diff --git a/.changeset/the_rule_useexhaustivedependencies_isnt_recommended_anymore.md b/.changeset/the_rule_useexhaustivedependencies_isnt_recommended_anymore.md new file mode 100644 index 000000000000..67c95af30139 --- /dev/null +++ b/.changeset/the_rule_useexhaustivedependencies_isnt_recommended_anymore.md @@ -0,0 +1,19 @@ +--- +"@biomejs/biome": major +--- + +The rule `useExhaustiveDependencies` is not recommended anymore. If your codebase uses `react` and relies on that rule, you have to enable it: + + +```jsonc +// biome.json +{ + "linter": { + "rules": { + "correctness": { + "useExhaustiveDependencies": "error" + } + } + } +} +``` diff --git a/.changeset/the_rule_usewhile_now_belongs_to_the_complexity_group.md b/.changeset/the_rule_usewhile_now_belongs_to_the_complexity_group.md new file mode 100644 index 000000000000..600a2e8578b2 --- /dev/null +++ b/.changeset/the_rule_usewhile_now_belongs_to_the_complexity_group.md @@ -0,0 +1,5 @@ +--- +"@biomejs/biome": major +--- + +The rule `useWhile` now belongs to the `complexity` group diff --git a/.changeset/tsconfigjson_files_will_now_be_treated_the_same_as_tsconfigjson_files.md b/.changeset/tsconfigjson_files_will_now_be_treated_the_same_as_tsconfigjson_files.md new file mode 100644 index 000000000000..3bc44ea008da --- /dev/null +++ b/.changeset/tsconfigjson_files_will_now_be_treated_the_same_as_tsconfigjson_files.md @@ -0,0 +1,5 @@ +--- +"@biomejs/biome": patch +--- + +`tsconfig.*.json` files will now be treated the same as `tsconfig.json` files. diff --git a/.changeset/use_new_workspace_apis.md b/.changeset/use_new_workspace_apis.md new file mode 100644 index 000000000000..d71dfffcac13 --- /dev/null +++ b/.changeset/use_new_workspace_apis.md @@ -0,0 +1,6 @@ +--- +"@biomejs/js-api": minor +"@biomejs/biome": minor +--- + +The package now requires `v2` of the WebAssembly packages. The internal APIs of Workspace are now `camelCase`. diff --git a/.changeset/usefilenamingconvention_and_usenamingconvention_now_require_ascii_names_by_default.md b/.changeset/usefilenamingconvention_and_usenamingconvention_now_require_ascii_names_by_default.md new file mode 100644 index 000000000000..47e01c88929b --- /dev/null +++ b/.changeset/usefilenamingconvention_and_usenamingconvention_now_require_ascii_names_by_default.md @@ -0,0 +1,37 @@ +--- +"@biomejs/biome": major +--- + +Prior to Biome 2.0, non-ASCII names were accepted by default. +They are now rejected. + +For example, the following code is now reported as invalid by the `useNamingConvention` rule. + +```js +let johnCafé; +``` + +If you want to allow non ASCII filenames and non-ASCII identifiers, you need to set the `requireAscii` options in your Biome configuration file to `false`: + +```json +{ + "linter": { + "rules": { + "style": { + "useFilenamingConvention": { + "level": "on", + "options": { + "requireAscii": false + } + } + "useFilenamingConvention": { + "level": "on", + "options": { + "requireAscii": false + } + } + } + } + } +} +``` diff --git a/.changeset/usenamingconvention_preserves_capitalization.md b/.changeset/usenamingconvention_preserves_capitalization.md new file mode 100644 index 000000000000..8b827a3fa621 --- /dev/null +++ b/.changeset/usenamingconvention_preserves_capitalization.md @@ -0,0 +1,29 @@ +--- +"@biomejs/biome": patch +--- + +The `useNamingConvention` rule now suggests a rename that preserves uppercase if possible. + +For instance, Biome suggested renaming `HTMLWrapper` as `htmlWrapper`: + +```diff +- import HTMLWrapper from "HTMLWrapper.tsx"; ++ import htmlWrapper from "HTMLWrapper.tsx"; + + function component() { +- return ; ++ return ; + } +``` + +Since both `PascalCase` and `CamelCase` are accepted, Biome now suggests renaming `HTMLWrapper` as `HtmlWrapper`: + +```diff +- import HTMLWrapper from "HTMLWrapper.tsx"; ++ import HtmlWrapper from "HTMLWrapper.tsx"; + + function component() { +- return ; ++ return ; + } +``` diff --git a/.gitattributes b/.gitattributes index bbd8e76b00ad..e9bcd97d8d38 100644 --- a/.gitattributes +++ b/.gitattributes @@ -4,26 +4,26 @@ /crates/biome_cli/src/execute/migrate/eslint_any_rule_to_biome.rs linguist-generated=true text=auto eol=lf /crates/biome_configuration/src/generated.rs linguist-generated=true text=auto eol=lf /crates/biome_configuration/src/analyzer/linter/rules.rs linguist-generated=true text=auto eol=lf -/crates/biome_configuration/src/analyzer/assists/actions.rs linguist-generated=true text=auto eol=lf +/crates/biome_configuration/src/analyzer/assist/actions.rs linguist-generated=true text=auto eol=lf /crates/biome_configuration/src/analyzer/parse/rules.rs linguist-generated=true text=auto eol=lf # GraphQL -/crates/biome_graphql_analyze/src/{lint,assists,syntax}.rs linguist-generated=true text=auto eol=lf -/crates/biome_graphql_analyze/src/{lint,assists,syntax}/*.rs linguist-generated=true text=auto eol=lf +/crates/biome_graphql_analyze/src/{lint,assist,syntax}.rs linguist-generated=true text=auto eol=lf +/crates/biome_graphql_analyze/src/{lint,assist,syntax}/*.rs linguist-generated=true text=auto eol=lf /crates/biome_graphql_analyze/src/options.rs linguist-generated=true text=auto eol=lf /crates/biome_graphql_analyze/src/registry.rs linguist-generated=true text=auto eol=lf # CSS -/crates/biome_css_analyze/src/{lint,assists,syntax}.rs linguist-generated=true text=auto eol=lf -/crates/biome_css_analyze/src/{lint,assists,syntax}/*.rs linguist-generated=true text=auto eol=lf +/crates/biome_css_analyze/src/{lint,assist,syntax}.rs linguist-generated=true text=auto eol=lf +/crates/biome_css_analyze/src/{lint,assist,syntax}/*.rs linguist-generated=true text=auto eol=lf /crates/biome_css_analyze/src/options.rs linguist-generated=true text=auto eol=lf /crates/biome_css_analyze/src/registry.rs linguist-generated=true text=auto eol=lf # JSON -/crates/biome_json_analyze/src/{lint,assists,syntax}.rs linguist-generated=true text=auto eol=lf -/crates/biome_json_analyze/src/{lint,assists,syntax}/*.rs linguist-generated=true text=auto eol=lf +/crates/biome_json_analyze/src/{lint,assist,syntax}.rs linguist-generated=true text=auto eol=lf +/crates/biome_json_analyze/src/{lint,assist,syntax}/*.rs linguist-generated=true text=auto eol=lf /crates/biome_js_analyze/src/options.rs linguist-generated=true text=auto eol=lf /crates/biome_js_analyze/src/registry.rs linguist-generated=true text=auto eol=lf # JS -/crates/biome_js_analyze/src/{lint,assists,syntax}.rs linguist-generated=true text=auto eol=lf -/crates/biome_js_analyze/src/{lint,assists,syntax}/*.rs linguist-generated=true text=auto eol=lf +/crates/biome_js_analyze/src/{lint,assist,syntax}.rs linguist-generated=true text=auto eol=lf +/crates/biome_js_analyze/src/{lint,assist,syntax}/*.rs linguist-generated=true text=auto eol=lf /crates/biome_js_analyze/src/options.rs linguist-generated=true text=auto eol=lf /crates/biome_js_analyze/src/registry.rs linguist-generated=true text=auto eol=lf # Grit diff --git a/CHANGELOG.md b/CHANGELOG.md index 0b30c11872c5..ef45e7f37f97 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -189,6 +189,8 @@ our [guidelines for writing a good changelog entry](https://github.com/biomejs/b - Add [noGlobalDirnameFilename](https://biomejs.dev/linter/rules/no-global-dirname-filename/). Contributed by @unvalley +- Add [noUnwantedPolyfillio](https://biomejs.dev/linter/rules/no-unwanted-polyfillio/). Contributed by @unvalley + - [noForEach](https://biomejs.dev/linter/rules/no-for-each/) now provides a new option `validIdentifiers` ([#3351](https://github.com/biomejs/biome/issues/3351)) to specify which variable names are allowed to call `forEach`. Identifiers containing dots (e.g., "lib._") or empty strings are not allowed. Invalid configurations will produce a diagnostic warning. diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index a54e90022076..c88ded571ec3 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -3,39 +3,39 @@ We can use help in a bunch of areas and any help is greatly appreciated! ## Table of Contents - - [🚀 Contributing](#-contributing) - - [Table of Contents](#table-of-contents) - - [Asking questions, making proposals](#asking-questions-making-proposals) - - [Reporting bugs](#reporting-bugs) - - [Getting Started](#getting-started) - - [Install the required tools](#install-the-required-tools) - - [Testing](#testing) - - [Debugging](#debugging) - - [Debug binaries](#debug-binaries) - - [Production binaries](#production-binaries) - - [Checks](#checks) - - [Crates development](#crates-development) - - [Create new crates](#create-new-crates) - - [Analyzers and lint rules](#analyzers-and-lint-rules) - - [Parser](#parser) - - [Formatter](#formatter) - - [Crate dependencies](#crate-dependencies) - - [Node.js development](#nodejs-development) - - [Translations](#translations) - - [Commit messages](#commit-messages) - - [Creating pull requests](#creating-pull-requests) - - [Changelog](#changelog) - - [Writing a changelog line](#writing-a-changelog-line) - - [Documentation](#documentation) - - [Versioning](#versioning) - - [Releasing](#releasing) - - [Resources](#resources) - - [Current Members](#current-members) - - [Lead team](#lead-team) - - [Core Contributors team](#core-contributors-team) - - [Maintainers team](#maintainers-team) - - [Past Maintainers](#past-maintainers) + * [Asking questions, making proposals](#asking-questions-making-proposals) + * [Reporting bugs](#reporting-bugs) + * [Getting Started](#getting-started) + * [Install the required tools](#install-the-required-tools) + * [Testing](#testing) + + [Debugging](#debugging) + * [Debug binaries](#debug-binaries) + * [Production binaries](#production-binaries) + * [Checks](#checks) + * [Crates development](#crates-development) + + [Create new crates](#create-new-crates) + + [Analyzers and lint rules](#analyzers-and-lint-rules) + + [Parser](#parser) + + [Formatter](#formatter) + * [Crate dependencies](#crate-dependencies) + * [Node.js development](#nodejs-development) + + [Translations](#translations) + * [Commit messages](#commit-messages) + * [Creating pull requests](#creating-pull-requests) + + [Changelog](#changelog) + - [Choose the correct packages](#choose-the-correct-packages) + - [Choose the correct type of change](#choose-the-correct-type-of-change) + - [Writing a changeset](#writing-a-changeset) + + [Documentation](#documentation) + + [Versioning](#versioning) + * [Releasing](#releasing) + * [Resources](#resources) + * [Current Members](#current-members) + + [Lead team](#lead-team) + + [Core Contributors team](#core-contributors-team) + + [Maintainers team](#maintainers-team) + + [Past Maintainers](#past-maintainers) ## Asking questions, making proposals @@ -249,7 +249,7 @@ things you would need to run and check: - `just f` (alias for `just format`), formats Rust and TOML files. - `just l` (alias for `just lint`), run the linter for the whole project. - Code generation. The code generation of the repository is spread in the different parts of the code base. Sometimes is needed and sometime it isn't: - - run `just gen-lint` when you're working on the **linter**; + - run `just gen-analyzer` when you're working on the **linter**; - run `just gen-bindings` in case you worked around the **workspace**. > [!NOTE] @@ -348,62 +348,50 @@ Please use the template provided. ### Changelog -If the PR you're about to open is a bugfix/feature visible to Biome users, you CAN add a new bullet point to [CHANGELOG.md](./CHANGELOG.md). Although **not required**, we appreciate the effort. - -At the top of the file you will see a `Unreleased` section. -The headings divide the sections by "scope"; you should be able to identify the scope that belongs to your change. If the change belongs to multiple scopes, you can copy the same sentence under those scopes. - -Here's a sample of the headings: - -```markdown -## Unreleased +This repository uses [changesets](https://github.com/changesets/changesets) to automate the releases of Biome's binaries, the JavaScript libraries and the creation of the `CHANGELOG.md` for each library. -### Analyzer +If the PR you're about to open is a bugfix/feature visible to users of the Biome toolchain or of the published Biome crates, you are encouraged to provide a **changeset** . To *create* a changeset, use the following command (*don't create it manually*): -### CLI +```shell +just new-changeset +``` +The command will present a prompt where you need to choose the libraries involved by the PR, the type of change (`major`, `minor` or `patch`) for each library, and a description of the change. The description will be used as name of the file. -### Configuration +The command will create the changeset(s) in the `.changeset` folder. You're free to open the file, and add more information in it. -### Editors +#### Choose the correct packages -### Formatter +In the vast majority of cases, you want to choose the `@biomejs/biome` package, which represents the main package. -### JavaScript APIs +The frontmatter of the changset will look like this: -### Linter +```markdown +--- +"@biomejs/biome": patch +--- -### Parser +Description here... ``` -When you edit a blank section: - -- If your PR adds a **breaking change**, create a new heading called `#### BREAKING CHANGES` and add - bullet point that explains the breaking changes; provide a migration path if possible. - Read [how we version Biome](https://biomejs.dev/internals/versioning/) to determine if your change is breaking. A breaking change results in a major release. -- If your PR adds a new feature, enhances an existing feature, or fixes a bug, create a new heading called `#### New features`, `#### Enhancements`, or `#### Bug fixes`. Ultimately, add a bullet point that explains the change. - -Make sure that the created subsections are ordered in the following order: +#### Choose the correct type of change -```md -#### BREAKING CHANGES +We are very strict about `major` changes in the `@biomejs/biome` package. To better understand type of your change *for this package*, please refer to our [versioning page](https://biomejs.dev/internals/versioning/). Generally: +- `patch`: any sort of change that fixes a bug. +- `minor`: new features available to the users. +- `major`: a change that breaks a user API. -#### New features - -#### Enhancements - -#### Bug fixes -``` +#### Writing a changeset -#### Writing a changelog line +The description of the changeset should follow the these guidelines: - Use the present tense, e.g. "Add new feature", "Fix edge case". - If you fix a bug, please add the link to the issue, e.g. "Fix edge case [#4444]()". -- You can add a mention `@user` for every contributor of the change. - Whenever applicable, add a code block to show your new changes. For example, for a new rule you might want to show an invalid case, for the formatter you might want to show how the new formatting changes, and so on. +- End each sentence with fullstops. -If in doubt, take a look to existing changelog lines. +If in doubt, take a look at existing or past changesets. ### Documentation diff --git a/Cargo.lock b/Cargo.lock index 07f8028a060e..7ffb24af1d6c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -53,6 +53,12 @@ version = "1.0.95" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "34ac096ce696dc2fcabef30516bb13c0a68a11d30131d3df6f04711467681b04" +[[package]] +name = "append-only-vec" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7992085ec035cfe96992dd31bfd495a2ebd31969bb95f624471cb6c0b349e571" + [[package]] name = "ascii_table" version = "4.0.5" @@ -117,8 +123,12 @@ dependencies = [ "biome_deserialize", "biome_deserialize_macros", "biome_diagnostics", + "biome_parser", "biome_rowan", + "biome_suppression", + "camino", "enumflags2", + "indexmap", "rustc-hash 2.1.0", "schemars", "serde", @@ -162,6 +172,8 @@ dependencies = [ "biome_flags", "biome_formatter", "biome_fs", + "biome_glob", + "biome_grit_patterns", "biome_js_analyze", "biome_js_formatter", "biome_json_formatter", @@ -173,10 +185,9 @@ dependencies = [ "biome_service", "biome_text_edit", "bpaf", + "camino", "crossbeam", "dashmap 6.1.0", - "hdrhistogram", - "indexmap 2.7.1", "insta", "libc", "mimalloc", @@ -188,6 +199,7 @@ dependencies = [ "serde", "serde_json", "smallvec", + "terminal_size", "tikv-jemallocator", "tokio", "tracing", @@ -209,26 +221,27 @@ dependencies = [ "biome_diagnostics", "biome_flags", "biome_formatter", + "biome_glob", "biome_graphql_analyze", "biome_graphql_syntax", + "biome_html_formatter", "biome_html_syntax", "biome_js_analyze", "biome_js_formatter", - "biome_js_syntax", "biome_json_analyze", "biome_json_formatter", "biome_json_parser", "biome_json_syntax", "biome_rowan", "bpaf", - "indexmap 2.7.1", + "camino", "insta", "oxc_resolver", "rustc-hash 2.1.0", "schemars", "serde", "serde_ini", - "serde_json", + "tests_macros", ] [[package]] @@ -265,10 +278,13 @@ dependencies = [ "biome_deserialize", "biome_deserialize_macros", "biome_diagnostics", + "biome_fs", + "biome_plugin_loader", "biome_rowan", "biome_string_case", "biome_suppression", "biome_test_utils", + "camino", "insta", "regex", "rustc-hash 2.1.0", @@ -301,6 +317,7 @@ dependencies = [ "biome_service", "biome_string_case", "biome_suppression", + "camino", "countme", "serde", "serde_json", @@ -323,6 +340,7 @@ dependencies = [ "biome_service", "biome_test_utils", "biome_unicode_table", + "camino", "insta", "quickcheck", "quickcheck_macros", @@ -346,10 +364,34 @@ version = "0.5.7" dependencies = [ "biome_rowan", "biome_string_case", + "camino", "schemars", "serde", ] +[[package]] +name = "biome_dependency_graph" +version = "0.0.1" +dependencies = [ + "biome_deserialize", + "biome_fs", + "biome_js_parser", + "biome_js_syntax", + "biome_json_parser", + "biome_json_value", + "biome_package", + "biome_project_layout", + "biome_rowan", + "camino", + "cfg-if", + "once_cell", + "oxc_resolver", + "papaya", + "rustc-hash 2.1.0", + "seize", + "serde_json", +] + [[package]] name = "biome_deserialize" version = "0.6.0" @@ -360,9 +402,9 @@ dependencies = [ "biome_json_parser", "biome_json_syntax", "biome_rowan", + "camino", "enumflags2", - "indexmap 2.7.1", - "schemars", + "indexmap", "serde", "serde_json", "smallvec", @@ -391,6 +433,7 @@ dependencies = [ "biome_text_edit", "biome_text_size", "bpaf", + "camino", "enumflags2", "insta", "oxc_resolver", @@ -399,6 +442,7 @@ dependencies = [ "serde_ini", "serde_json", "termcolor", + "terminal_size", "trybuild", "unicode-width 0.1.12", ] @@ -441,10 +485,11 @@ dependencies = [ "biome_js_syntax", "biome_rowan", "biome_string_case", + "camino", "cfg-if", "countme", "drop_bomb", - "indexmap 2.7.1", + "indexmap", "insta", "rustc-hash 2.1.0", "schemars", @@ -466,6 +511,7 @@ dependencies = [ "biome_parser", "biome_rowan", "biome_service", + "camino", "insta", "serde", "serde_json", @@ -478,11 +524,12 @@ name = "biome_fs" version = "0.5.7" dependencies = [ "biome_diagnostics", + "camino", "crossbeam", "directories", "enumflags2", - "indexmap 2.7.1", "oxc_resolver", + "papaya", "parking_lot", "rayon", "rustc-hash 2.1.0", @@ -520,6 +567,7 @@ dependencies = [ "biome_string_case", "biome_suppression", "biome_test_utils", + "camino", "insta", "schemars", "serde", @@ -549,6 +597,7 @@ dependencies = [ "biome_rowan", "biome_service", "biome_suppression", + "camino", "countme", "serde", "serde_json", @@ -590,6 +639,7 @@ version = "0.1.0" dependencies = [ "biome_rowan", "biome_string_case", + "camino", "schemars", "serde", ] @@ -616,6 +666,7 @@ dependencies = [ "biome_parser", "biome_rowan", "biome_service", + "camino", "countme", "serde", "serde_json", @@ -648,7 +699,10 @@ dependencies = [ name = "biome_grit_patterns" version = "0.0.1" dependencies = [ + "biome_analyze", "biome_console", + "biome_css_parser", + "biome_css_syntax", "biome_diagnostics", "biome_grit_parser", "biome_grit_syntax", @@ -658,6 +712,7 @@ dependencies = [ "biome_rowan", "biome_string_case", "biome_test_utils", + "camino", "grit-pattern-matcher", "grit-util", "insta", @@ -665,7 +720,9 @@ dependencies = [ "rand 0.8.5", "regex", "rustc-hash 2.1.0", + "schemars", "serde", + "serde_json", "tests_macros", ] @@ -675,6 +732,7 @@ version = "0.5.7" dependencies = [ "biome_rowan", "biome_string_case", + "camino", "schemars", "serde", ] @@ -691,6 +749,8 @@ dependencies = [ name = "biome_html_formatter" version = "0.0.0" dependencies = [ + "biome_deserialize", + "biome_deserialize_macros", "biome_diagnostics_categories", "biome_formatter", "biome_formatter_test", @@ -701,7 +761,10 @@ dependencies = [ "biome_rowan", "biome_service", "biome_suppression", + "camino", "countme", + "schemars", + "serde", "tests_macros", ] @@ -730,6 +793,7 @@ version = "0.5.7" dependencies = [ "biome_rowan", "biome_string_case", + "camino", "schemars", "serde", ] @@ -743,21 +807,26 @@ dependencies = [ "biome_aria_metadata", "biome_console", "biome_control_flow", + "biome_dependency_graph", "biome_deserialize", "biome_deserialize_macros", "biome_diagnostics", + "biome_fs", "biome_glob", "biome_js_factory", "biome_js_parser", "biome_js_semantic", "biome_js_syntax", - "biome_project", + "biome_package", + "biome_plugin_loader", + "biome_project_layout", "biome_rowan", "biome_string_case", "biome_suppression", "biome_test_utils", "biome_unicode_table", "bitvec", + "camino", "enumflags2", "globset", "insta", @@ -799,6 +868,7 @@ dependencies = [ "biome_suppression", "biome_text_size", "biome_unicode_table", + "camino", "countme", "quickcheck", "schemars", @@ -825,9 +895,10 @@ dependencies = [ "biome_service", "biome_test_utils", "biome_unicode_table", + "camino", "drop_bomb", "enumflags2", - "indexmap 2.7.1", + "indexmap", "insta", "quickcheck", "quickcheck_macros", @@ -864,6 +935,7 @@ dependencies = [ "biome_js_parser", "biome_rowan", "biome_string_case", + "camino", "enumflags2", "schemars", "serde", @@ -881,6 +953,7 @@ dependencies = [ "biome_js_syntax", "biome_rowan", "biome_test_utils", + "camino", "insta", "tests_macros", ] @@ -896,7 +969,9 @@ dependencies = [ "biome_json_parser", "biome_json_syntax", "biome_rowan", + "biome_suppression", "biome_test_utils", + "camino", "insta", "natord", "rustc-hash 2.1.0", @@ -927,6 +1002,8 @@ dependencies = [ "biome_rowan", "biome_service", "biome_suppression", + "biome_test_utils", + "camino", "countme", "schemars", "serde", @@ -959,10 +1036,26 @@ version = "0.5.7" dependencies = [ "biome_rowan", "biome_string_case", + "camino", "schemars", "serde", ] +[[package]] +name = "biome_json_value" +version = "0.1.0" +dependencies = [ + "biome_deserialize", + "biome_deserialize_macros", + "biome_json_parser", + "biome_json_syntax", + "biome_rowan", + "indexmap", + "oxc_resolver", + "rustc-hash 2.1.0", + "static_assertions", +] + [[package]] name = "biome_lsp" version = "0.0.0" @@ -978,7 +1071,9 @@ dependencies = [ "biome_rowan", "biome_service", "biome_text_edit", + "camino", "futures", + "papaya", "rustc-hash 2.1.0", "serde", "serde_json", @@ -1052,14 +1147,42 @@ dependencies = [ "biome_analyze", "biome_console", "biome_diagnostics", - "biome_json_analyze", + "biome_glob", "biome_json_factory", + "biome_json_formatter", "biome_json_parser", "biome_json_syntax", + "biome_package", "biome_rowan", "biome_test_utils", + "camino", + "insta", + "rustc-hash 2.1.0", + "tests_macros", +] + +[[package]] +name = "biome_package" +version = "0.5.7" +dependencies = [ + "biome_console", + "biome_deserialize", + "biome_deserialize_macros", + "biome_diagnostics", + "biome_json_parser", + "biome_json_syntax", + "biome_json_value", + "biome_parser", + "biome_rowan", + "biome_text_size", + "camino", + "indexmap", "insta", + "node-semver", + "oxc_resolver", "rustc-hash 2.1.0", + "serde", + "static_assertions", "tests_macros", ] @@ -1077,23 +1200,35 @@ dependencies = [ ] [[package]] -name = "biome_project" -version = "0.5.7" +name = "biome_plugin_loader" +version = "0.0.1" dependencies = [ + "biome_analyze", "biome_console", "biome_deserialize", "biome_deserialize_macros", "biome_diagnostics", + "biome_fs", + "biome_grit_patterns", "biome_json_parser", - "biome_json_syntax", "biome_parser", "biome_rowan", - "biome_text_size", + "camino", + "grit-pattern-matcher", + "grit-util", "insta", - "node-semver", - "rustc-hash 2.1.0", "serde", - "tests_macros", +] + +[[package]] +name = "biome_project_layout" +version = "0.0.1" +dependencies = [ + "biome_package", + "biome_parser", + "camino", + "papaya", + "rustc-hash 2.1.0", ] [[package]] @@ -1108,15 +1243,16 @@ dependencies = [ "quickcheck", "quickcheck_macros", "rustc-hash 2.1.0", + "schemars", "serde", "serde_json", - "tracing", ] [[package]] name = "biome_service" version = "0.0.0" dependencies = [ + "append-only-vec", "biome_analyze", "biome_configuration", "biome_console", @@ -1124,12 +1260,12 @@ dependencies = [ "biome_css_formatter", "biome_css_parser", "biome_css_syntax", + "biome_dependency_graph", "biome_deserialize", - "biome_deserialize_macros", "biome_diagnostics", - "biome_flags", "biome_formatter", "biome_fs", + "biome_glob", "biome_graphql_analyze", "biome_graphql_formatter", "biome_graphql_parser", @@ -1151,27 +1287,26 @@ dependencies = [ "biome_json_formatter", "biome_json_parser", "biome_json_syntax", + "biome_package", "biome_parser", - "biome_project", + "biome_project_layout", "biome_rowan", "biome_string_case", "biome_text_edit", - "bpaf", - "dashmap 6.1.0", + "camino", + "crossbeam", "enumflags2", "getrandom 0.2.15", "ignore", - "indexmap 2.7.1", "insta", - "oxc_resolver", + "papaya", + "rayon", "regex", "rustc-hash 2.1.0", "schemars", "serde", "serde_json", - "slotmap", "smallvec", - "tests_macros", "tracing", ] @@ -1186,6 +1321,7 @@ dependencies = [ "biome_console", "biome_diagnostics", "biome_rowan", + "log", ] [[package]] @@ -1206,12 +1342,18 @@ dependencies = [ "biome_analyze", "biome_configuration", "biome_console", + "biome_dependency_graph", "biome_deserialize", "biome_diagnostics", + "biome_formatter", + "biome_fs", + "biome_js_parser", "biome_json_parser", - "biome_project", + "biome_package", + "biome_project_layout", "biome_rowan", "biome_service", + "camino", "countme", "json_comments", "serde_json", @@ -1248,10 +1390,11 @@ version = "0.5.7" [[package]] name = "biome_wasm" -version = "1.7.3" +version = "1.9.4" dependencies = [ "biome_console", "biome_diagnostics", + "biome_fs", "biome_js_factory", "biome_js_formatter", "biome_rowan", @@ -1292,6 +1435,8 @@ name = "biome_yaml_syntax" version = "0.0.1" dependencies = [ "biome_rowan", + "biome_string_case", + "camino", "schemars", "serde", ] @@ -1383,6 +1528,15 @@ version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "89b2fd2a0dcf38d7971e2194b6b6eebab45ae01067456a7fd93d5547a61b70be" +[[package]] +name = "camino" +version = "1.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b96ec4966b5813e2c0507c1f86115c8c5abaadc3980879c3424042a02fd1ad3" +dependencies = [ + "serde", +] + [[package]] name = "case" version = "1.0.0" @@ -1557,7 +1711,7 @@ dependencies = [ "cookie", "document-features", "idna", - "indexmap 2.7.1", + "indexmap", "log", "serde", "serde_derive", @@ -2160,9 +2314,9 @@ dependencies = [ [[package]] name = "grit-pattern-matcher" -version = "0.4.0" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8430b130e086a1764789402b34685336f2e3f6ec37cd188535bede892f88e65b" +checksum = "4694b698b2b87b9ad1c2dfef1103de207b4e12821d338b153264c8058ea58fa4" dependencies = [ "elsa", "grit-util", @@ -2173,9 +2327,9 @@ dependencies = [ [[package]] name = "grit-util" -version = "0.4.0" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "99b005c4c15ce0c47022554c41a01fe2441d3622e586a82614a6fe681833d5d4" +checksum = "0cde701c8427e7260b65e979bc90b34be2681b5af20908ff6c6dfff683ff6f02" dependencies = [ "derive_builder", "once_cell", @@ -2190,12 +2344,6 @@ version = "1.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "eabb4a44450da02c90444cf74558da904edde8fb4e9035a9a6a4e15445af0bd7" -[[package]] -name = "hashbrown" -version = "0.12.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" - [[package]] name = "hashbrown" version = "0.14.5" @@ -2208,16 +2356,6 @@ version = "0.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e087f84d4f86bf4b218b927129862374b72199ae7d8657835f1e89000eea4fb" -[[package]] -name = "hdrhistogram" -version = "7.5.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "765c9198f173dd59ce26ff9f95ef0aafd0a0fe01fb9d72841bc5066a4c06511d" -dependencies = [ - "byteorder", - "num-traits", -] - [[package]] name = "hermit-abi" version = "0.3.9" @@ -2408,17 +2546,6 @@ dependencies = [ "winapi-util", ] -[[package]] -name = "indexmap" -version = "1.9.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" -dependencies = [ - "autocfg", - "hashbrown 0.12.3", - "serde", -] - [[package]] name = "indexmap" version = "2.7.1" @@ -2477,7 +2604,7 @@ checksum = "adcf93614601c8129ddf72e2d5633df827ba6551541c6d8c59520a371475be1f" dependencies = [ "hermit-abi", "io-lifetimes", - "rustix", + "rustix 0.37.7", "windows-sys 0.48.0", ] @@ -2601,6 +2728,12 @@ version = "0.3.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ef53942eb7bf7ff43a617b3e2c1c4a5ecf5944a7c1bc12d7ee39bbb15e5c1519" +[[package]] +name = "linux-raw-sys" +version = "0.4.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d26c52dbd32dccf2d10cac7725f8eae5296885fb5703b261f7d0a0739ec807ab" + [[package]] name = "litemap" version = "0.7.3" @@ -2841,16 +2974,17 @@ checksum = "c1b04fb49957986fdce4d6ee7a65027d55d4b6d2265e5848bbb507b58ccfdb6f" [[package]] name = "oxc_resolver" -version = "3.0.3" +version = "4.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bed381b6ab4bbfebfc7a011ad43b110ace8d201d02a39c0e09855f16b8f3f741" +checksum = "d0f82c2be3d07b2ac002fb4a414d6fab602b352a8d99ed9b59f6868a968c73ba" dependencies = [ "cfg-if", - "dashmap 6.1.0", - "indexmap 2.7.1", + "indexmap", "json-strip-comments", "once_cell", + "papaya", "rustc-hash 2.1.0", + "seize", "serde", "serde_json", "simdutf8", @@ -2858,6 +2992,16 @@ dependencies = [ "tracing", ] +[[package]] +name = "papaya" +version = "0.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc7c76487f7eaa00a0fc1d7f88dc6b295aec478d11b0fc79f857b62c2874124c" +dependencies = [ + "equivalent", + "seize", +] + [[package]] name = "parking_lot" version = "0.12.3" @@ -3074,7 +3218,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3ed1a693391a16317257103ad06a88c6529ac640846021da7c435a06fffdacd7" dependencies = [ "chrono", - "indexmap 2.7.1", + "indexmap", "newtype-uuid", "quick-xml", "strip-ansi-escapes", @@ -3349,6 +3493,7 @@ dependencies = [ "biome_json_syntax", "biome_rowan", "biome_service", + "camino", "pulldown-cmark", ] @@ -3389,10 +3534,23 @@ dependencies = [ "errno", "io-lifetimes", "libc", - "linux-raw-sys", + "linux-raw-sys 0.3.8", "windows-sys 0.45.0", ] +[[package]] +name = "rustix" +version = "0.38.25" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc99bc2d4f1fed22595588a013687477aedf3cdcfb26558c559edb67b4d9b22e" +dependencies = [ + "bitflags 2.6.0", + "errno", + "libc", + "linux-raw-sys 0.4.15", + "windows-sys 0.48.0", +] + [[package]] name = "rustls" version = "0.23.19" @@ -3462,8 +3620,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "09c024468a378b7e36765cd36702b7a90cc3cba11654f6685c8f233408e89e92" dependencies = [ "dyn-clone", - "indexmap 1.9.3", - "indexmap 2.7.1", + "indexmap", "schemars_derive", "serde", "serde_json", @@ -3488,6 +3645,16 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" +[[package]] +name = "seize" +version = "0.4.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d84b0c858bdd30cb56f5597f8b3bf702ec23829e652cc636a1e5a7b9de46ae93" +dependencies = [ + "libc", + "windows-sys 0.52.0", +] + [[package]] name = "serde" version = "1.0.217" @@ -3547,7 +3714,7 @@ version = "1.0.137" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "930cfb6e6abf99298aaad7d29abbef7a9999a9a8806a40088f55f0dcec03146b" dependencies = [ - "indexmap 2.7.1", + "indexmap", "itoa", "memchr", "ryu", @@ -3589,7 +3756,7 @@ version = "0.9.34+deprecated" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6a8b1a1a2ebf674015cc02edccce75287f1a0130d394307b36743c2f5d504b47" dependencies = [ - "indexmap 2.7.1", + "indexmap", "itoa", "ryu", "serde", @@ -3655,16 +3822,6 @@ dependencies = [ "autocfg", ] -[[package]] -name = "slotmap" -version = "1.0.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dbff4acf519f630b3a3ddcfaea6c06b42174d9a44bc70c620e9ed1649d58b82a" -dependencies = [ - "serde", - "version_check", -] - [[package]] name = "smallvec" version = "1.13.2" @@ -3790,6 +3947,16 @@ dependencies = [ "winapi-util", ] +[[package]] +name = "terminal_size" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5352447f921fda68cf61b4101566c0bdb5104eff6804d0678e5227580ab6a4e9" +dependencies = [ + "rustix 0.38.25", + "windows-sys 0.59.0", +] + [[package]] name = "tests_macros" version = "0.0.0" @@ -3993,7 +4160,7 @@ version = "0.22.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d3328d4f68a705b2a4498da1d580585d39a6510f98318a2cec3018a7ec61ddef" dependencies = [ - "indexmap 2.7.1", + "indexmap", "serde", "serde_spanned", "toml_datetime", @@ -4795,6 +4962,7 @@ dependencies = [ "biome_json_syntax", "biome_parser", "biome_rowan", + "camino", "codspeed-criterion-compat", "criterion", "mimalloc", @@ -4856,6 +5024,7 @@ dependencies = [ "biome_parser", "biome_rowan", "biome_string_case", + "camino", "colored 3.0.0", "indicatif", "pico-args", diff --git a/Cargo.toml b/Cargo.toml index 453ddadfff9c..eb6c5aa70118 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -109,6 +109,7 @@ biome_css_formatter = { version = "0.5.7", path = "./crates/biome_css_f biome_css_parser = { version = "0.5.7", path = "./crates/biome_css_parser" } biome_css_semantic = { version = "0.0.0", path = "./crates/biome_css_semantic" } biome_css_syntax = { version = "0.5.7", path = "./crates/biome_css_syntax" } +biome_dependency_graph = { version = "0.0.1", path = "./crates/biome_dependency_graph" } biome_deserialize = { version = "0.6.0", path = "./crates/biome_deserialize" } biome_deserialize_macros = { version = "0.6.0", path = "./crates/biome_deserialize_macros" } biome_diagnostics = { version = "0.5.7", path = "./crates/biome_diagnostics" } @@ -143,18 +144,21 @@ biome_json_factory = { version = "0.5.7", path = "./crates/biome_json_ biome_json_formatter = { version = "0.5.7", path = "./crates/biome_json_formatter" } biome_json_parser = { version = "0.5.7", path = "./crates/biome_json_parser" } biome_json_syntax = { version = "0.5.7", path = "./crates/biome_json_syntax" } +biome_json_value = { version = "0.1.0", path = "./crates/biome_json_value" } biome_lsp_converters = { version = "0.1.0", path = "./crates/biome_lsp_converters" } biome_markdown_factory = { version = "0.0.1", path = "./crates/biome_markdown_factory" } biome_markdown_parser = { version = "0.0.1", path = "./crates/biome_markdown_parser" } biome_markdown_syntax = { version = "0.0.1", path = "./crates/biome_markdown_syntax" } +biome_plugin_loader = { version = "0.0.1", path = "./crates/biome_plugin_loader" } +biome_project_layout = { version = "0.0.1", path = "./crates/biome_project_layout" } biome_ungrammar = { version = "0.3.1", path = "./crates/biome_ungrammar" } biome_yaml_factory = { version = "0.0.1", path = "./crates/biome_yaml_factory" } biome_yaml_parser = { version = "0.0.1", path = "./crates/biome_yaml_parser" } biome_yaml_syntax = { version = "0.0.1", path = "./crates/biome_yaml_syntax" } biome_markup = { version = "0.5.7", path = "./crates/biome_markup" } +biome_package = { version = "0.5.7", path = "./crates/biome_package" } biome_parser = { version = "0.5.7", path = "./crates/biome_parser" } -biome_project = { version = "0.5.7", path = "./crates/biome_project" } biome_rowan = { version = "0.5.7", path = "./crates/biome_rowan" } biome_string_case = { version = "0.5.7", path = "./crates/biome_string_case" } biome_suppression = { version = "0.5.7", path = "./crates/biome_suppression" } @@ -174,40 +178,45 @@ biome_test_utils = { path = "./crates/biome_test_utils" } tests_macros = { path = "./crates/tests_macros" } # Crates needed in the workspace -anyhow = "1.0.95" -bpaf = { version = "0.9.16", features = ["derive"] } -countme = "3.0.1" -crossbeam = "0.8.4" -dashmap = "6.1.0" -enumflags2 = "0.7.11" -getrandom = "0.2.15" -globset = "0.4.15" -ignore = "0.4.23" -indexmap = { version = "2.7.1", features = ["serde"] } -insta = "1.42.1" -natord = "1.0.9" -oxc_resolver = "3.0.3" -proc-macro2 = "1.0.86" -quickcheck = "1.0.3" -quickcheck_macros = "1.0.0" -quote = "1.0.38" -rayon = "1.10.0" -regex = "1.11.1" -rustc-hash = "2.1.0" -schemars = { version = "0.8.21", features = ["indexmap2", "smallvec"] } -serde = { version = "1.0.217", features = ["derive"] } -serde_ini = "0.2.0" -serde_json = "1.0.137" -similar = "2.7.0" -slotmap = "1.0.7" -smallvec = { version = "1.13.2", features = ["union", "const_new", "serde"] } -syn = "1.0.109" -termcolor = "1.4.1" -tokio = "1.43.0" -tracing = { version = "0.1.41", default-features = false, features = ["std"] } -tracing-subscriber = "0.3.19" -unicode-bom = "2.0.3" -unicode-width = "0.1.12" +anyhow = "1.0.95" +bpaf = { version = "0.9.16", features = ["derive"] } +camino = "1.1.9" +cfg-if = "1" +countme = "3.0.1" +crossbeam = "0.8.4" +dashmap = "6.1.0" +enumflags2 = "0.7.11" +getrandom = "0.2.15" +globset = "0.4.15" +grit-pattern-matcher = "0.5" +grit-util = "0.5" +ignore = "0.4.23" +indexmap = { version = "2.7.1" } +insta = "1.42.1" +natord = "1.0.9" +oxc_resolver = "4.0" +papaya = "0.1.8" +proc-macro2 = "1.0.86" +quickcheck = "1.0.3" +quickcheck_macros = "1.0.0" +quote = "1.0.38" +rayon = "1.10.0" +regex = "1.11.1" +rustc-hash = "2.1.0" +schemars = { version = "0.8.21", features = ["indexmap2", "smallvec"] } +serde = { version = "1.0.217", features = ["derive"] } +serde_ini = "0.2.0" +serde_json = "1.0.137" +similar = "2.7.0" +smallvec = { version = "1.13.2", features = ["union", "const_new", "serde"] } +syn = "1.0.109" +termcolor = "1.4.1" +terminal_size = "0.4.1" +tokio = "1.43.0" +tracing = { version = "0.1.41", default-features = false, features = ["std", "attributes"] } +tracing-subscriber = "0.3.19" +unicode-bom = "2.0.3" +unicode-width = "0.1.12" [profile.dev.package.biome_wasm] debug = true opt-level = "s" diff --git a/benchmark/biome.json b/benchmark/biome.json index 84d8e7851659..77e7210b2b6c 100644 --- a/benchmark/biome.json +++ b/benchmark/biome.json @@ -66,7 +66,6 @@ "noArguments": "error", "noCommaOperator": "error", "noParameterAssign": "error", - "noVar": "error", "useConst": "error", "useCollapsedElseIf": "error", "useDefaultParameterLast": "error", @@ -105,7 +104,8 @@ "useAwait": "error", "useDefaultSwitchClauseLast": "error", "useGetterReturn": "error", - "useValidTypeof": "error" + "useValidTypeof": "error", + "noVar": "error" } } } diff --git a/benchmark/package.json b/benchmark/package.json index 1b650334c87c..62778faa1b7f 100644 --- a/benchmark/package.json +++ b/benchmark/package.json @@ -12,6 +12,7 @@ }, "devDependencies": { "@typescript-eslint/eslint-plugin": "8.20.0", + "@typescript-eslint/parser": "8.3.0", "dprint": "0.48.0", "eslint": "9.17.0", "prettier": "3.4.2" diff --git a/biome.json b/biome.json index f2311cb3aca5..98fb7eae05d2 100644 --- a/biome.json +++ b/biome.json @@ -1,6 +1,6 @@ { "$schema": "./packages/@biomejs/biome/configuration_schema.json", - "assists": { + "assist": { "enabled": true, "ignore": [ "./packages/@biomejs/biome/configuration_schema.json" @@ -17,29 +17,28 @@ } }, "files": { - "ignore": [ - "crates/**", - "dist/**", - ".astro/**", - "assets/**", - "packages/@biomejs/backend-jsonrpc/src/workspace.ts", - "public/**", - "**/__snapshots__", - "**/undefined/**", - "_fonts/**", - "packages/@biomejs/wasm-*", - "benchmark/target/**" - ], - "include": [ - "packages/aria-data/*.js", - "packages/@biomejs/**", - "packages/tailwindcss-config-analyzer/**", - "benchmark/**" + "includes": [ + "**/packages/aria-data/*.js", + "**/packages/@biomejs/**", + "**/packages/tailwindcss-config-analyzer/**", + "**/benchmark/**", + "!**/crates/**", + "!**/dist/**", + "!**/.astro/**", + "!**/assets/**", + "!**/packages/@biomejs/backend-jsonrpc/src/workspace.ts", + "!**/public/**", + "!**/__snapshots__", + "!**/undefined/**", + "!**/_fonts/**", + "!**/packages/@biomejs/wasm-*", + "!**/benchmark/target/**" ] }, "formatter": { - "ignore": [ - "configuration_schema.json" + "includes": [ + "**", + "!**/configuration_schema.json" ] }, "json": { @@ -51,15 +50,38 @@ "linter": { "enabled": true, "rules": { - "recommended": true, "style": { - "noNonNullAssertion": "off" + "noNonNullAssertion": "off", + "useNodejsImportProtocol": "error", + "useLiteralEnumMembers": "error", + "noArguments": "error", + "noParameterAssign": "error", + "useShorthandFunctionType": "error", + "useExportType": "error", + "useDefaultParameterLast": "error", + "noCommaOperator": "error", + "useSingleVarDeclarator": "error", + "useConst": "error", + "noInferrableTypes": "error", + "useExponentiationOperator": "error", + "noUselessElse": "error", + "useSelfClosingElements": "error", + "useImportType": "error", + "useNumberNamespace": "error", + "useAsConstAssertion": "error", + "noUnusedTemplateLiteral": "error", + "useNumericLiterals": "error", + "useTemplate": "error", + "useEnumInitializers": "error" + }, + "correctness": { + "noUndeclaredDependencies": "error" + }, + "suspicious": { + "noVar": "on" } } }, - "organizeImports": { - "enabled": true - }, "vcs": { "clientKind": "git", "enabled": true, diff --git a/crates/biome_analyze/CONTRIBUTING.md b/crates/biome_analyze/CONTRIBUTING.md index db3d73195e50..10b44a2c2312 100644 --- a/crates/biome_analyze/CONTRIBUTING.md +++ b/crates/biome_analyze/CONTRIBUTING.md @@ -276,7 +276,6 @@ Don't forget to format your code with `just f` and lint with `just l`. That's it! Now, let's [test the rule](#testing-the-rule). - ### Coding Tips for Rules Below, there are many tips and guidelines on how to create a lint rule using Biome infrastructure. @@ -375,6 +374,59 @@ impl Rule for ExampleRule { } ``` +#### Rule severity + +The macro accepts a `severity` field, of type `biome_diagnostics::Severity`. By default, rules without `severity` will start with `Severity::Information`. + +If you want to change the default severity, you need to assign it: + +```diff ++ use biome_diagnostics::Severity; + +declare_lint_rule! { + /// Documentation + pub(crate) ExampleRule { + version: "next", + name: "myRuleName", + language: "js", + recommended: false, ++ severity: Severity::Warning, + } +} +``` + +#### Rule domains + +Domains are very specific ways to collect rules that belong to the same "concept". Domains are a way for users to opt-in/opt-out rules that belong to the same domain. + +Some examples of domains: testing, specific framework, specific runtime, specific library. A rule can belong to multiple domains. + +```diff ++ use biome_analyze::RuleDomain; + + +declare_lint_rule! { + /// Documentation + pub(crate) ExampleRule { + version: "next", + name: "myRuleName", + language: "js", + recommended: true, ++ domains: &[RuleDomain::Test], + } +} +``` + +Rule domains can unlock various perks in the Biome analyzer: +- A domain can define a number of `package.json` dependencies. When a user has one or more of these dependencies, Biome will automatically enable the recommended rules that belong to the domain. To add/update/remove dependencies to a domain, check the function `RuleDomain::manifest_dependencies`. +- A domain can define a number of "globals". These globals will be used by other rules, and improve the UX of them. To add/update/remove globals to a domain, check the function `RuleDomain::globals`. + +When a rule is **recommended** and _has domains_, the rule is enabled only when the user enables the relative domains via `"recommneded"` or `"all"`. +Instead, if the rule is **recommended** but _doesn't have domains_, the rule is always enabled by default. + +> [!NOTE] +> Before adding a new domain, please consult with the maintainers of the project. + #### Rule Options Some rules may allow customization [using per-rule options in `biome.json`](https://biomejs.dev/linter/#rule-options). @@ -1139,7 +1191,7 @@ declare_lint_rule! { For simplicity, use `just` to run all the commands with: ```shell -just gen-lint +just gen-analyzer ``` ### Commiting your work diff --git a/crates/biome_analyze/Cargo.toml b/crates/biome_analyze/Cargo.toml index 94b961b898e6..df83a2e465ec 100644 --- a/crates/biome_analyze/Cargo.toml +++ b/crates/biome_analyze/Cargo.toml @@ -17,16 +17,20 @@ biome_console = { workspace = true } biome_deserialize = { workspace = true, optional = true } biome_deserialize_macros = { workspace = true, optional = true } biome_diagnostics = { workspace = true } +biome_parser = { workspace = true } biome_rowan = { workspace = true } +biome_suppression = { workspace = true } +camino = { workspace = true } enumflags2 = { workspace = true } +indexmap = { workspace = true } rustc-hash = { workspace = true } schemars = { workspace = true, optional = true } serde = { workspace = true, features = ["derive"], optional = true } tracing = { workspace = true } - [features] -serde = ["dep:serde", "dep:schemars", "dep:biome_deserialize", "dep:biome_deserialize_macros"] +schema = ["dep:schemars", "biome_console/schema", "serde"] +serde = ["dep:serde", "dep:biome_deserialize", "dep:biome_deserialize_macros"] [lints] workspace = true diff --git a/crates/biome_analyze/src/analyzer_plugin.rs b/crates/biome_analyze/src/analyzer_plugin.rs new file mode 100644 index 000000000000..e7b59220ea27 --- /dev/null +++ b/crates/biome_analyze/src/analyzer_plugin.rs @@ -0,0 +1,13 @@ +use crate::RuleDiagnostic; +use biome_parser::AnyParse; +use camino::Utf8PathBuf; +use std::fmt::Debug; + +/// Definition of an analyzer plugin. +pub trait AnalyzerPlugin: Debug { + fn evaluate(&self, root: AnyParse, path: Utf8PathBuf) -> Vec; + + fn supports_css(&self) -> bool; + + fn supports_js(&self) -> bool; +} diff --git a/crates/biome_analyze/src/categories.rs b/crates/biome_analyze/src/categories.rs index 25d6a060543e..1b0835a82e9e 100644 --- a/crates/biome_analyze/src/categories.rs +++ b/crates/biome_analyze/src/categories.rs @@ -1,11 +1,14 @@ use enumflags2::{bitflags, BitFlags}; use std::borrow::Cow; +use std::fmt::{Display, Formatter}; #[derive(Copy, Clone, Debug, Eq, PartialEq)] #[cfg_attr( feature = "serde", - derive(serde::Serialize, serde::Deserialize, schemars::JsonSchema) + derive(serde::Serialize, serde::Deserialize), + serde(rename_all = "camelCase") )] +#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] pub enum RuleCategory { /// This rule checks the syntax according to the language specification /// and emits error diagnostics accordingly @@ -21,8 +24,20 @@ pub enum RuleCategory { Transformation, } +impl Display for RuleCategory { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + match self { + RuleCategory::Syntax => write!(f, "Syntax"), + RuleCategory::Lint => write!(f, "Lint"), + RuleCategory::Action => write!(f, "Action"), + RuleCategory::Transformation => write!(f, "Transformation"), + } + } +} + /// Actions that suppress rules should start with this string -pub const SUPPRESSION_ACTION_CATEGORY: &str = "quickfix.suppressRule"; +pub const SUPPRESSION_INLINE_ACTION_CATEGORY: &str = "quickfix.suppressRule.inline"; +pub const SUPPRESSION_TOP_LEVEL_ACTION_CATEGORY: &str = "quickfix.suppressRule.topLevel"; /// The category of a code action, this type maps directly to the /// [CodeActionKind] type in the Language Server Protocol specification @@ -31,8 +46,10 @@ pub const SUPPRESSION_ACTION_CATEGORY: &str = "quickfix.suppressRule"; #[derive(Clone, Debug, PartialEq, Eq)] #[cfg_attr( feature = "serde", - derive(serde::Serialize, serde::Deserialize, schemars::JsonSchema) + derive(serde::Serialize, serde::Deserialize), + serde(rename_all = "camelCase") )] +#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] pub enum ActionCategory { /// Base kind for quickfix actions: 'quickfix'. /// @@ -48,7 +65,23 @@ pub enum ActionCategory { Source(SourceActionKind), /// This action is using a base kind not covered by any of the previous /// variants - Other(Cow<'static, str>), + Other(OtherActionCategory), +} + +#[derive(Clone, Debug, PartialEq, Eq)] +#[cfg_attr( + feature = "serde", + derive(serde::Serialize, serde::Deserialize), + serde(rename_all = "camelCase") +)] +#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] +pub enum OtherActionCategory { + /// Base kind for inline suppressions actions: `quickfix.suppressRule.inline.biome` + InlineSuppression, + /// Base kind for inline suppressions actions: `quickfix.suppressRule.topLevel.biome` + ToplevelSuppression, + /// Generic action that can't be mapped + Generic(Cow<'static, str>), } impl ActionCategory { @@ -58,7 +91,7 @@ impl ActionCategory { /// /// ``` /// use std::borrow::Cow; - /// use biome_analyze::{ActionCategory, RefactorKind}; + /// use biome_analyze::{ActionCategory, RefactorKind, OtherActionCategory}; /// /// assert!(ActionCategory::QuickFix(Cow::from("quickfix")).matches("quickfix")); /// @@ -67,6 +100,9 @@ impl ActionCategory { /// /// assert!(ActionCategory::Refactor(RefactorKind::Extract).matches("refactor")); /// assert!(ActionCategory::Refactor(RefactorKind::Extract).matches("refactor.extract")); + /// + /// assert!(ActionCategory::Other(OtherActionCategory::InlineSuppression).matches("quickfix.suppressRule.inline.biome")); + /// assert!(ActionCategory::Other(OtherActionCategory::ToplevelSuppression).matches("quickfix.suppressRule.topLevel.biome")); /// ``` pub fn matches(&self, filter: &str) -> bool { self.to_str().starts_with(filter) @@ -105,10 +141,18 @@ impl ActionCategory { Cow::Borrowed("source.organizeImports.biome") } ActionCategory::Source(SourceActionKind::Other(tag)) => { - Cow::Owned(format!("source.{tag}.biome")) + Cow::Owned(format!("source.biome.{tag}")) } - ActionCategory::Other(tag) => Cow::Owned(format!("{tag}.biome")), + ActionCategory::Other(other_action) => match other_action { + OtherActionCategory::InlineSuppression => { + Cow::Borrowed("quickfix.suppressRule.inline.biome") + } + OtherActionCategory::ToplevelSuppression => { + Cow::Borrowed("quickfix.suppressRule.topLevel.biome") + } + OtherActionCategory::Generic(tag) => Cow::Owned(format!("{tag}.biome")), + }, } } } @@ -119,8 +163,10 @@ impl ActionCategory { #[derive(Clone, Debug, PartialEq, Eq)] #[cfg_attr( feature = "serde", - derive(serde::Serialize, serde::Deserialize, schemars::JsonSchema) + derive(serde::Serialize, serde::Deserialize), + serde(rename_all = "camelCase") )] +#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] pub enum RefactorKind { /// This action describes a refactor with no particular sub-category None, @@ -159,8 +205,10 @@ pub enum RefactorKind { #[derive(Clone, Debug, PartialEq, Eq)] #[cfg_attr( feature = "serde", - derive(serde::Serialize, serde::Deserialize, schemars::JsonSchema) + derive(serde::Serialize, serde::Deserialize), + serde(rename_all = "camelCase") )] +#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] pub enum SourceActionKind { /// This action describes a source action with no particular sub-category None, @@ -183,7 +231,7 @@ pub enum SourceActionKind { pub(crate) enum Categories { Syntax = 1 << RuleCategory::Syntax as u8, Lint = 1 << RuleCategory::Lint as u8, - Action = 1 << RuleCategory::Action as u8, + Assist = 1 << RuleCategory::Action as u8, Transformation = 1 << RuleCategory::Transformation as u8, } @@ -195,6 +243,26 @@ pub(crate) enum Categories { /// Use [RuleCategoriesBuilder] to generate the categories you want to query. pub struct RuleCategories(BitFlags); +impl Display for RuleCategories { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + if self.0.is_empty() { + write!(f, "No categories") + } else { + let mut list = f.debug_list(); + if self.0.contains(Categories::Syntax) { + list.entry(&RuleCategory::Syntax); + } + if self.0.contains(Categories::Lint) { + list.entry(&RuleCategory::Lint); + } + if self.0.contains(Categories::Assist) { + list.entry(&RuleCategory::Action); + } + list.finish() + } + } +} + impl RuleCategories { pub fn empty() -> Self { let empty: BitFlags = BitFlags::empty(); @@ -229,7 +297,7 @@ impl From for RuleCategories { match input { RuleCategory::Syntax => RuleCategories(BitFlags::from_flag(Categories::Syntax)), RuleCategory::Lint => RuleCategories(BitFlags::from_flag(Categories::Lint)), - RuleCategory::Action => RuleCategories(BitFlags::from_flag(Categories::Action)), + RuleCategory::Action => RuleCategories(BitFlags::from_flag(Categories::Assist)), RuleCategory::Transformation => { RuleCategories(BitFlags::from_flag(Categories::Transformation)) } @@ -253,7 +321,7 @@ impl serde::Serialize for RuleCategories { flags.push(RuleCategory::Lint); } - if self.0.contains(Categories::Action) { + if self.0.contains(Categories::Assist) { flags.push(RuleCategory::Action); } @@ -301,7 +369,7 @@ impl<'de> serde::Deserialize<'de> for RuleCategories { } } -#[cfg(feature = "serde")] +#[cfg(feature = "schema")] impl schemars::JsonSchema for RuleCategories { fn schema_name() -> String { String::from("RuleCategories") @@ -339,8 +407,8 @@ impl RuleCategoriesBuilder { self } - pub fn with_action(mut self) -> Self { - self.flags.insert(Categories::Action); + pub fn with_assist(mut self) -> Self { + self.flags.insert(Categories::Assist); self } diff --git a/crates/biome_analyze/src/context.rs b/crates/biome_analyze/src/context.rs index dc5027c562e4..650f3d52e5dd 100644 --- a/crates/biome_analyze/src/context.rs +++ b/crates/biome_analyze/src/context.rs @@ -2,8 +2,8 @@ use crate::options::{JsxRuntime, PreferredQuote}; use crate::{registry::RuleRoot, FromServices, Queryable, Rule, RuleKey, ServiceBag}; use crate::{GroupCategory, RuleCategory, RuleGroup, RuleMetadata}; use biome_diagnostics::{Error, Result}; +use camino::Utf8Path; use std::ops::Deref; -use std::path::Path; type RuleQueryResult = <::Query as Queryable>::Output; type RuleServiceBag = <::Query as Queryable>::Services; @@ -14,7 +14,7 @@ pub struct RuleContext<'a, R: Rule> { bag: &'a ServiceBag, services: RuleServiceBag, globals: &'a [&'a str], - file_path: &'a Path, + file_path: &'a Utf8Path, options: &'a R::Options, preferred_quote: &'a PreferredQuote, preferred_jsx_quote: &'a PreferredQuote, @@ -31,7 +31,7 @@ where root: &'a RuleRoot, services: &'a ServiceBag, globals: &'a [&'a str], - file_path: &'a Path, + file_path: &'a Utf8Path, options: &'a R::Options, preferred_quote: &'a PreferredQuote, preferred_jsx_quote: &'a PreferredQuote, @@ -162,7 +162,7 @@ where } /// The file path of the current file - pub fn file_path(&self) -> &Path { + pub fn file_path(&self) -> &Utf8Path { self.file_path } @@ -186,7 +186,7 @@ where } } -impl<'a, R> Deref for RuleContext<'a, R> +impl Deref for RuleContext<'_, R> where R: Rule, { diff --git a/crates/biome_analyze/src/diagnostics.rs b/crates/biome_analyze/src/diagnostics.rs index 44cf2631c876..436653d4c29e 100644 --- a/crates/biome_analyze/src/diagnostics.rs +++ b/crates/biome_analyze/src/diagnostics.rs @@ -1,11 +1,11 @@ -use biome_console::MarkupBuf; +use biome_console::{markup, MarkupBuf}; use biome_diagnostics::{ advice::CodeSuggestionAdvice, category, Advices, Category, Diagnostic, DiagnosticExt, - DiagnosticTags, Error, Location, Severity, Visit, + DiagnosticTags, Error, Location, LogCategory, MessageAndDescription, Severity, Visit, }; use biome_rowan::TextRange; use std::borrow::Cow; -use std::fmt::{Debug, Display, Formatter}; +use std::fmt::{Debug, Formatter}; use crate::rule::RuleDiagnostic; @@ -65,7 +65,7 @@ impl Diagnostic for AnalyzerDiagnostic { fn severity(&self) -> Severity { match &self.kind { - DiagnosticKind::Rule { .. } => Severity::Error, + DiagnosticKind::Rule(diagnostic) => diagnostic.severity(), DiagnosticKind::Raw(error) => error.severity(), } } @@ -141,38 +141,64 @@ impl AnalyzerDiagnostic { #[derive(Debug, Diagnostic, Clone)] #[diagnostic(severity = Warning)] -pub struct SuppressionDiagnostic { +pub struct AnalyzerSuppressionDiagnostic { #[category] category: &'static Category, #[location(span)] range: TextRange, #[message] #[description] - message: String, + message: MessageAndDescription, #[tags] tags: DiagnosticTags, + + #[advice] + advice: SuppressionAdvice, } -impl SuppressionDiagnostic { +impl AnalyzerSuppressionDiagnostic { pub(crate) fn new( category: &'static Category, range: TextRange, - message: impl Display, + message: impl biome_console::fmt::Display, ) -> Self { Self { category, range, - message: message.to_string(), + message: MessageAndDescription::from(markup! { {message} }.to_owned()), tags: DiagnosticTags::empty(), + advice: SuppressionAdvice::default(), } } - pub(crate) fn with_tags(mut self, tags: DiagnosticTags) -> Self { - self.tags |= tags; + pub(crate) fn note(mut self, message: MarkupBuf, range: impl Into) -> Self { + self.advice.messages.push((message, Some(range.into()))); + self + } + + pub(crate) fn hint(mut self, message: MarkupBuf) -> Self { + self.advice.messages.push((message, None)); self } } +#[derive(Debug, Default, Clone)] +struct SuppressionAdvice { + messages: Vec<(MarkupBuf, Option)>, +} + +impl Advices for SuppressionAdvice { + fn record(&self, visitor: &mut dyn Visit) -> std::io::Result<()> { + for (message, range) in &self.messages { + visitor.record_log(LogCategory::Info, &markup! {{message}})?; + let location = Location::builder().span(range); + + visitor.record_frame(location.build())? + } + Ok(()) + } +} + /// Series of errors encountered when running rules on a file #[derive(Debug, PartialEq, Eq, Clone)] #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] diff --git a/crates/biome_analyze/src/lib.rs b/crates/biome_analyze/src/lib.rs index 561adeb90e4f..21d7831aa0cf 100644 --- a/crates/biome_analyze/src/lib.rs +++ b/crates/biome_analyze/src/lib.rs @@ -1,12 +1,12 @@ #![deny(rustdoc::broken_intra_doc_links)] -use std::borrow::Cow; -use std::cmp::Ordering; +use biome_console::markup; +use biome_parser::AnyParse; use std::collections::{BTreeMap, BinaryHeap}; use std::fmt::{Debug, Display, Formatter}; use std::ops; -use tracing::trace; +mod analyzer_plugin; mod categories; pub mod context; mod diagnostics; @@ -18,17 +18,20 @@ mod rule; mod services; mod signals; mod suppression_action; +mod suppressions; mod syntax; mod visitor; // Re-exported for use in the `declare_group` macro pub use biome_diagnostics::category_concat; +pub use crate::analyzer_plugin::AnalyzerPlugin; pub use crate::categories::{ - ActionCategory, RefactorKind, RuleCategories, RuleCategoriesBuilder, RuleCategory, - SourceActionKind, SUPPRESSION_ACTION_CATEGORY, + ActionCategory, OtherActionCategory, RefactorKind, RuleCategories, RuleCategoriesBuilder, + RuleCategory, SourceActionKind, SUPPRESSION_INLINE_ACTION_CATEGORY, + SUPPRESSION_TOP_LEVEL_ACTION_CATEGORY, }; -pub use crate::diagnostics::{AnalyzerDiagnostic, RuleError, SuppressionDiagnostic}; +pub use crate::diagnostics::{AnalyzerDiagnostic, AnalyzerSuppressionDiagnostic, RuleError}; pub use crate::matcher::{InspectMatcher, MatchQueryParams, QueryMatcher, RuleKey, SignalEntry}; pub use crate::options::{AnalyzerConfiguration, AnalyzerOptions, AnalyzerRules}; pub use crate::query::{AddVisitor, QueryKey, QueryMatch, Queryable}; @@ -38,34 +41,38 @@ pub use crate::registry::{ }; pub use crate::rule::{ CategoryLanguage, FixKind, GroupCategory, GroupLanguage, Rule, RuleAction, RuleDiagnostic, - RuleGroup, RuleMeta, RuleMetadata, RuleSource, RuleSourceKind, SuppressAction, + RuleDomain, RuleGroup, RuleMeta, RuleMetadata, RuleSource, RuleSourceKind, SuppressAction, }; pub use crate::services::{FromServices, MissingServicesDiagnostic, ServiceBag}; pub use crate::signals::{ AnalyzerAction, AnalyzerSignal, AnalyzerTransformation, DiagnosticSignal, }; +use crate::suppressions::Suppressions; pub use crate::syntax::{Ast, SyntaxVisitor}; pub use crate::visitor::{NodeVisitor, Visitor, VisitorContext, VisitorFinishContext}; -pub use suppression_action::{ApplySuppression, SuppressionAction}; - -use biome_console::markup; -use biome_diagnostics::{ - category, Applicability, Diagnostic, DiagnosticExt, DiagnosticTags, Severity, -}; +use biome_diagnostics::{category, Diagnostic, DiagnosticExt}; use biome_rowan::{ - AstNode, BatchMutation, Direction, Language, SyntaxElement, SyntaxToken, TextLen, TextRange, - TextSize, TokenAtOffset, TriviaPiece, TriviaPieceKind, WalkEvent, + AstNode, BatchMutation, Direction, Language, SyntaxElement, SyntaxToken, TextRange, TextSize, + TokenAtOffset, TriviaPieceKind, WalkEvent, }; +use biome_suppression::{Suppression, SuppressionKind}; +pub use suppression_action::{ApplySuppression, SuppressionAction}; /// The analyzer is the main entry point into the `biome_analyze` infrastructure. /// Its role is to run a collection of [Visitor]s over a syntax tree, with each /// visitor implementing various analysis over this syntax tree to generate /// auxiliary data structures as well as emit "query match" events to be /// processed by lint rules and in turn emit "analyzer signals" in the form of -/// diagnostics, code actions or both +/// diagnostics, code actions or both. +/// The analyzer also has support for plugins, although do not (as of yet) +/// support the same visitor pattern. This makes them slower to execute, but +/// otherwise they act the same for consumers of the analyzer. They respect the +/// same suppression comments, and report signals in the same format. pub struct Analyzer<'analyzer, L: Language, Matcher, Break, Diag> { /// List of visitors being run by this instance of the analyzer for each phase phases: BTreeMap + 'analyzer>>>, + /// Plugins to be run after the phases for built-in rules. + plugins: Vec>, /// Holds the metadata for all the rules statically known to the analyzer metadata: &'analyzer MetadataRegistry, /// Executor for the query matches emitted by the visitors @@ -87,7 +94,7 @@ pub struct AnalyzerContext<'a, L: Language> { impl<'analyzer, L, Matcher, Break, Diag> Analyzer<'analyzer, L, Matcher, Break, Diag> where - L: Language, + L: Language + 'static, Matcher: QueryMatcher, Diag: Diagnostic + Clone + Send + Sync + 'static, { @@ -102,6 +109,7 @@ where ) -> Self { Self { phases: BTreeMap::new(), + plugins: Vec::new(), metadata, query_matcher, parse_suppression_comment, @@ -119,35 +127,40 @@ where self.phases.entry(phase).or_default().push(visitor); } + /// Registers an [AnalyzerPlugin] to be executed after the regular phases. + pub fn add_plugin(&mut self, plugin: Box) { + self.plugins.push(plugin); + } + pub fn run(self, mut ctx: AnalyzerContext) -> Option { let Self { phases, - metadata, + plugins, mut query_matcher, parse_suppression_comment, mut emit_signal, suppression_action, + metadata: _, } = self; let mut line_index = 0; - let mut line_suppressions = Vec::new(); + let mut suppressions = Suppressions::new(self.metadata); for (index, (phase, mut visitors)) in phases.into_iter().enumerate() { let runner = PhaseRunner { phase, visitors: &mut visitors, - metadata, query_matcher: &mut query_matcher, signal_queue: BinaryHeap::new(), parse_suppression_comment, line_index: &mut line_index, - line_suppressions: &mut line_suppressions, emit_signal: &mut emit_signal, root: &ctx.root, services: &ctx.services, range: ctx.range, suppression_action: suppression_action.as_ref(), options: ctx.options, + suppressions: &mut suppressions, }; // The first phase being run will inspect the tokens and parse the @@ -174,17 +187,60 @@ where } } - for suppression in line_suppressions { + for plugin in plugins { + let root: AnyParse = ctx.root.syntax().as_send().expect("not a root node").into(); + for diagnostic in plugin.evaluate(root, ctx.options.file_path.clone()) { + let signal = DiagnosticSignal::new(|| diagnostic.clone()); + + if let ControlFlow::Break(br) = (emit_signal)(&signal) { + return Some(br); + } + } + } + + for range_suppression in suppressions.range_suppressions.suppressions { + if range_suppression.did_suppress_signal { + continue; + } + if let Some(range) = range_suppression.already_suppressed { + let signal = DiagnosticSignal::new(|| { + AnalyzerSuppressionDiagnostic::new( + category!("suppressions/unused"), + range_suppression.start_comment_range, + "Suppression comment has no effect because another suppression comment suppresses the same rule.", + ).note( + markup!{"This is the suppression comment that was used."}.to_owned(), + range + ) + }); + if let ControlFlow::Break(br) = (emit_signal)(&signal) { + return Some(br); + } + } + } + + for suppression in suppressions.line_suppressions { if suppression.did_suppress_signal { continue; } let signal = DiagnosticSignal::new(|| { - SuppressionDiagnostic::new( + if let Some(range) = suppression.already_suppressed { + AnalyzerSuppressionDiagnostic::new( + category!("suppressions/unused"), + suppression.comment_span, + "Suppression comment has no effect because another suppression comment suppresses the same rule.", + ).note( + markup!{"This is the suppression comment that was used."}.to_owned(), + range + ) + } else { + AnalyzerSuppressionDiagnostic::new( category!("suppressions/unused"), suppression.comment_span, "Suppression comment has no effect. Remove the suppression or make sure you are suppressing the correct rule.", - ) + ) + } }); if let ControlFlow::Break(br) = (emit_signal)(&signal) { @@ -202,8 +258,6 @@ struct PhaseRunner<'analyzer, 'phase, L: Language, Matcher, Break, Diag> { phase: Phases, /// List of visitors being run by this instance of the analyzer for each phase visitors: &'phase mut [Box + 'analyzer>], - /// Holds the metadata for all the rules statically known to the analyzer - metadata: &'analyzer MetadataRegistry, /// Executor for the query matches emitted by the visitors query_matcher: &'phase mut Matcher, /// Queue for pending analyzer signals @@ -214,8 +268,6 @@ struct PhaseRunner<'analyzer, 'phase, L: Language, Matcher, Break, Diag> { suppression_action: &'phase dyn SuppressionAction, /// Line index at the current position of the traversal line_index: &'phase mut usize, - /// Track active suppression comments per-line, ordered by line index - line_suppressions: &'phase mut Vec, /// Handles analyzer signals emitted by individual rules emit_signal: &'phase mut SignalHandler<'analyzer, L, Break>, /// Root node of the file being analyzed @@ -226,31 +278,11 @@ struct PhaseRunner<'analyzer, 'phase, L: Language, Matcher, Break, Diag> { range: Option, /// Analyzer options options: &'phase AnalyzerOptions, + /// Tracks all suppressions during the analyzer phase + suppressions: &'phase mut Suppressions<'analyzer>, } -/// Single entry for a suppression comment in the `line_suppressions` buffer -#[derive(Debug)] -struct LineSuppression { - /// Line index this comment is suppressing lint rules for - line_index: usize, - /// Range of source text covered by the suppression comment - comment_span: TextRange, - /// Range of source text this comment is suppressing lint rules for - text_range: TextRange, - /// Set to true if this comment has set the `suppress_all` flag to true - /// (must be restored to false on expiration) - suppress_all: bool, - /// List of all the rules this comment has started suppressing (must be - /// removed from the suppressed set on expiration) - suppressed_rules: Vec>, - /// List of all the rule instances this comment has started suppressing. - suppressed_instances: Vec<(RuleFilter<'static>, String)>, - /// Set to `true` when a signal matching this suppression was emitted and - /// suppressed - did_suppress_signal: bool, -} - -impl<'a, 'phase, L, Matcher, Break, Diag> PhaseRunner<'a, 'phase, L, Matcher, Break, Diag> +impl PhaseRunner<'_, '_, L, Matcher, Break, Diag> where L: Language, Matcher: QueryMatcher, @@ -259,7 +291,6 @@ where /// Runs phase 0 over nodes and tokens to process line breaks and /// suppression comments fn run_first_phase(mut self) -> ControlFlow { - trace!("Running first analyzer phase"); let iter = self.root.syntax().preorder_with_tokens(Direction::Next); for event in iter { let node_event = match event { @@ -326,7 +357,7 @@ where /// Process the text for a single token, parsing suppression comments and /// handling line breaks, then flush all pending query signals in the queue - /// whose position is less then the end of the token within the file + /// whose position is less than the end of the token within the file fn handle_token(&mut self, token: SyntaxToken) -> ControlFlow { // Process the content of the token for comments and newline for (index, piece) in token.leading_trivia().pieces().enumerate() { @@ -377,30 +408,54 @@ where } } - // Search for an active suppression comment covering the range of + if self + .suppressions + .top_level_suppression + .suppressed_rule(&entry.rule) + || self.suppressions.top_level_suppression.suppress_all + { + self.signal_queue.pop(); + break; + } + + if self + .suppressions + .range_suppressions + .suppressed_rule(&entry.rule, &entry.text_range) + { + self.signal_queue.pop(); + break; + } + + // Search for an active line suppression comment covering the range of // this signal: first try to load the last line suppression and see // if it matches the current line index, otherwise perform a binary // search over all the previously seen suppressions to find one // with a matching range - let suppression = self.line_suppressions.last_mut().filter(|suppression| { - suppression.line_index == *self.line_index - && suppression.text_range.start() <= start - }); + let suppression = + self.suppressions + .line_suppressions + .last_mut() + .filter(|suppression| { + suppression.line_index == *self.line_index + && suppression.text_range.start() <= start + }); let suppression = match suppression { Some(suppression) => Some(suppression), None => { - let index = self.line_suppressions.binary_search_by(|suppression| { - if suppression.text_range.end() < entry.text_range.start() { - Ordering::Less - } else if entry.text_range.end() < suppression.text_range.start() { - Ordering::Greater - } else { - Ordering::Equal - } - }); - - index.ok().map(|index| &mut self.line_suppressions[index]) + let index = + self.suppressions + .line_suppressions + .partition_point(|suppression| { + suppression.text_range.end() < entry.text_range.start() + }); + + if index >= self.suppressions.line_suppressions.len() { + None + } else { + Some(&mut self.suppressions.line_suppressions[index]) + } } }; @@ -408,28 +463,22 @@ where if suppression.suppress_all { return true; } - - if suppression - .suppressed_rules - .iter() - .any(|filter| *filter == entry.rule) - { - return true; - } - - if entry.instances.is_empty() { - return false; - } - - entry.instances.iter().all(|value| { + if suppression.suppressed_instances.is_empty() { suppression - .suppressed_instances + .suppressed_rules .iter() - .any(|(filter, v)| *filter == entry.rule && v == value.as_ref()) - }) + .any(|filter| *filter == entry.rule) + } else { + entry.instances.iter().all(|value| { + suppression + .suppressed_instances + .iter() + .any(|(v, filter)| *filter == entry.rule && v == value.as_ref()) + }) + } }); - // If the signal is being suppressed mark the line suppression as + // If the signal is being suppressed, mark the line suppression as // hit, otherwise emit the signal if let Some(suppression) = suppression { suppression.did_suppress_signal = true; @@ -449,18 +498,15 @@ where fn handle_comment( &mut self, token: &SyntaxToken, - is_leading: bool, - index: usize, + _is_leading: bool, + _index: usize, text: &str, range: TextRange, ) -> ControlFlow { - let mut suppress_all = false; - let mut suppressed_rules = Vec::new(); - let mut suppressed_instances = Vec::new(); - let mut has_legacy = false; + let mut has_suppressions = false; - for result in (self.parse_suppression_comment)(text) { - let kind = match result { + for result in (self.parse_suppression_comment)(text, range) { + let suppression = match result { Ok(kind) => kind, Err(diag) => { // Emit the suppression parser diagnostic @@ -475,136 +521,26 @@ where } }; - if matches!(kind, SuppressionKind::Deprecated) { - let signal = DiagnosticSignal::new(move || { - SuppressionDiagnostic::new( - category!("suppressions/deprecatedSuppressionComment"), - range, - "// rome-ignore is deprecated, use // biome-ignore instead", - ) - .with_tags(DiagnosticTags::DEPRECATED_CODE) - .with_severity(Severity::Information) - }) - .with_action(move || create_suppression_comment_action(token)); - + if let Err(diagnostic) = + self.suppressions + .push_suppression(&suppression, range, token.text_range()) + { + let signal = DiagnosticSignal::new(|| diagnostic.clone()); (self.emit_signal)(&signal)?; + continue; } - let (rule, instance) = match kind { - SuppressionKind::Everything => (None, None), - SuppressionKind::Rule(rule) => (Some(rule), None), - SuppressionKind::RuleInstance(rule, instance) => (Some(rule), Some(instance)), - SuppressionKind::MaybeLegacy(rule) => (Some(rule), None), - SuppressionKind::Deprecated => (None, None), - }; - - if let Some(rule) = rule { - let group_rule = rule.split_once('/'); - - let key = match group_rule { - None => self.metadata.find_group(rule).map(RuleFilter::from), - Some((group, rule)) => { - self.metadata.find_rule(group, rule).map(RuleFilter::from) - } - }; - - match (key, instance) { - (Some(key), Some(value)) => suppressed_instances.push((key, value.to_owned())), - (Some(key), None) => { - suppressed_rules.push(key); - has_legacy |= matches!(kind, SuppressionKind::MaybeLegacy(_)); - } - _ if range_match(self.range, range) => { - // Emit a warning for the unknown rule - let signal = DiagnosticSignal::new(move || match group_rule { - Some((group, rule)) => SuppressionDiagnostic::new( - category!("suppressions/unknownRule"), - range, - format_args!( - "Unknown lint rule {group}/{rule} in suppression comment" - ), - ), - - None => SuppressionDiagnostic::new( - category!("suppressions/unknownGroup"), - range, - format_args!( - "Unknown lint rule group {rule} in suppression comment" - ), - ), - }); - - (self.emit_signal)(&signal)?; - } - _ => {} - } - } else { - suppressed_rules.clear(); - suppress_all = true; - // If this if a "suppress all lints" comment, no need to - // parse anything else - break; - } - } - - // Emit a warning for legacy suppression syntax - if has_legacy && range_match(self.range, range) { - let signal = DiagnosticSignal::new(move || { - SuppressionDiagnostic::new( - category!("suppressions/deprecatedSuppressionComment"), - range, - "Suppression is using a deprecated syntax", - ) - .with_tags(DiagnosticTags::DEPRECATED_CODE) - }); - - let signal = signal - .with_action(|| update_suppression(self.root, token, is_leading, index, text)); - - (self.emit_signal)(&signal)?; - } - - if !suppress_all && suppressed_rules.is_empty() && suppressed_instances.is_empty() { - return ControlFlow::Continue(()); + has_suppressions = true; } // Suppression comments apply to the next line - let line_index = *self.line_index + 1; + if has_suppressions { + let line_index = *self.line_index + 1; - // If the last suppression was on the same or previous line, extend its - // range and set of suppressed rules with the content for the new suppression - if let Some(last_suppression) = self.line_suppressions.last_mut() { - if last_suppression.line_index == line_index - || last_suppression.line_index + 1 == line_index - { - last_suppression.line_index = line_index; - last_suppression.text_range = last_suppression.text_range.cover(range); - last_suppression.suppress_all |= suppress_all; - if !last_suppression.suppress_all { - last_suppression.suppressed_rules.extend(suppressed_rules); - last_suppression - .suppressed_instances - .extend(suppressed_instances); - } else { - last_suppression.suppressed_rules.clear(); - last_suppression.suppressed_instances.clear(); - } - return ControlFlow::Continue(()); - } + self.suppressions + .overlap_last_suppression(line_index, range); } - let entry = LineSuppression { - line_index, - comment_span: range, - text_range: range, - suppress_all, - suppressed_rules, - suppressed_instances, - did_suppress_signal: false, - }; - - self.line_suppressions.push(entry); - ControlFlow::Continue(()) } @@ -613,168 +549,173 @@ where /// current suppression as required fn bump_line_index(&mut self, text: &str, range: TextRange) { let mut did_match = false; - for (index, _) in text.match_indices('\n') { - if let Some(last_suppression) = self.line_suppressions.last_mut() { - if last_suppression.line_index == *self.line_index { - let index = TextSize::try_from(index).expect( - "integer overflow while converting a suppression line to `TextSize`", - ); - let range = TextRange::at(range.start(), index); - last_suppression.text_range = last_suppression.text_range.cover(range); - did_match = true; - } - } + for (index, _) in text.match_indices(['\n']) { + let index = TextSize::try_from(index) + .expect("integer overflow while converting a suppression line to `TextSize`"); + let range = TextRange::at(range.start(), index); + did_match = self.suppressions.expand_range(range, *self.line_index); *self.line_index += 1; + self.suppressions.bump_line_index(*self.line_index); } if !did_match { - if let Some(last_suppression) = self.line_suppressions.last_mut() { - if last_suppression.line_index == *self.line_index { - last_suppression.text_range = last_suppression.text_range.cover(range); - } - } + self.suppressions.expand_range(range, *self.line_index); } } } -fn create_suppression_comment_action( - token: &SyntaxToken, -) -> Option> { - let first_node = token.parent()?; - let mut new_leading_trivia = vec![]; - let mut token_text = String::new(); - let mut new_trailing_trivia = vec![]; - let mut mutation = BatchMutation::new(first_node); - - for piece in token.leading_trivia().pieces() { - if !piece.is_comments() { - new_leading_trivia.push(TriviaPiece::new(piece.kind(), piece.text_len())); - token_text.push_str(piece.text()); - } - - if piece.text().contains("rome-ignore") { - let new_text = piece.text().replace("rome-ignore", "biome-ignore"); - new_leading_trivia.push(TriviaPiece::new(piece.kind(), new_text.text_len())); - token_text.push_str(&new_text); - } - } - - token_text.push_str(token.text_trimmed()); - - for piece in token.trailing_trivia().pieces() { - new_trailing_trivia.push(TriviaPiece::new(piece.kind(), piece.text_len())); - token_text.push_str(piece.text()); - } - - let new_token = SyntaxToken::new_detached( - token.kind(), - &token_text, - new_leading_trivia, - new_trailing_trivia, - ); - - mutation.replace_token_discard_trivia(token.clone(), new_token); - Some(AnalyzerAction { - mutation, - applicability: Applicability::MaybeIncorrect, - category: ActionCategory::QuickFix(Cow::Borrowed("")), - message: markup! { - "Use // biome-ignore instead" - } - .to_owned(), - rule_name: None, - }) -} - fn range_match(filter: Option, range: TextRange) -> bool { filter.map_or(true, |filter| filter.intersect(range).is_some()) } /// Signature for a suppression comment parser function /// -/// This function receives the text content of a comment and returns a list of -/// lint suppressions as an optional lint rule (if the lint rule is `None` the +/// This function receives two parameters: +/// 1. The text content of a comment. +/// 2. The range of the token the comment belongs too. The range is calculated from [SyntaxToken::text_range], so the range +/// includes all trivia. +/// +/// It returns the lint suppressions as an optional lint rule (if the lint rule is `None` the /// comment is interpreted as suppressing all lints) /// /// # Examples /// /// - `// biome-ignore format` -> `vec![]` /// - `// biome-ignore lint` -> `vec![Everything]` -/// - `// biome-ignore lint/style/useWhile` -> `vec![Rule("style/useWhile")]` -/// - `// biome-ignore lint/style/useWhile(foo)` -> `vec![RuleWithValue("style/useWhile", "foo")]` -/// - `// biome-ignore lint/style/useWhile lint/nursery/noUnreachable` -> `vec![Rule("style/useWhile"), Rule("nursery/noUnreachable")]` -/// - `// biome-ignore lint(style/useWhile)` -> `vec![MaybeLegacy("style/useWhile")]` -/// - `// biome-ignore lint(style/useWhile) lint(nursery/noUnreachable)` -> `vec![MaybeLegacy("style/useWhile"), MaybeLegacy("nursery/noUnreachable")]` -type SuppressionParser = fn(&str) -> Vec>; - +/// - `// biome-ignore lint/complexity/useWhile` -> `vec![Rule("complexity/useWhile")]` +/// - `// biome-ignore lint/complexity/useWhile(foo)` -> `vec![RuleWithValue("complexity/useWhile", "foo")]` +/// - `// biome-ignore lint/complexity/useWhile lint/nursery/noUnreachable` -> `vec![Rule("complexity/useWhile"), Rule("nursery/noUnreachable")]` +/// - `/** biome-ignore lint/complexity/useWhile */` if the comment is top-level -> `vec![TopLevel("complexity/useWhile")]` +type SuppressionParser = + for<'a> fn(&'a str, TextRange) -> Vec, D>>; + +#[derive(Debug, Clone)] /// This enum is used to categorize what is disabled by a suppression comment and with what syntax -pub enum SuppressionKind<'a> { +pub struct AnalyzerSuppression<'a> { + /// The kind of suppression + pub(crate) kind: AnalyzerSuppressionKind<'a>, + + /// The range where the `biome-ignore` comment is placed inside the whole text + pub(crate) ignore_range: Option, + + /// The kind of `biome-ignore` comment used for this suppression + pub(crate) variant: AnalyzerSuppressionVariant, +} + +#[derive(Debug, Clone)] +pub enum AnalyzerSuppressionVariant { + /// biome-ignore + Line, + /// biome-ignore-all + TopLevel, + /// biome-ignore-start + RangeStart, + /// biome-ignore-end + RangeEnd, +} + +impl From<&SuppressionKind> for AnalyzerSuppressionVariant { + fn from(value: &SuppressionKind) -> Self { + match value { + SuppressionKind::Classic => AnalyzerSuppressionVariant::Line, + SuppressionKind::All => AnalyzerSuppressionVariant::TopLevel, + SuppressionKind::RangeStart => AnalyzerSuppressionVariant::RangeStart, + SuppressionKind::RangeEnd => AnalyzerSuppressionVariant::RangeEnd, + } + } +} + +impl<'a> AnalyzerSuppression<'a> { + pub fn everything() -> Self { + Self { + kind: AnalyzerSuppressionKind::Everything, + ignore_range: None, + variant: AnalyzerSuppressionVariant::Line, + } + } + + pub fn rule_instance(rule: &'a str, instance: &'a str) -> Self { + Self { + kind: AnalyzerSuppressionKind::RuleInstance(rule, instance), + ignore_range: None, + variant: AnalyzerSuppressionVariant::Line, + } + } + pub fn rule(rule: &'a str) -> Self { + Self { + kind: AnalyzerSuppressionKind::Rule(rule), + ignore_range: None, + variant: AnalyzerSuppressionVariant::Line, + } + } + + #[must_use] + pub fn with_ignore_range(mut self, ignore_range: TextRange) -> Self { + self.ignore_range = Some(ignore_range); + self + } + + #[must_use] + pub fn with_variant(mut self, variant: impl Into) -> Self { + self.variant = variant.into(); + self + } +} +#[derive(Debug, Clone, Eq, PartialEq)] +pub enum AnalyzerSuppressionKind<'a> { /// A suppression disabling all lints eg. `// biome-ignore lint` Everything, - /// A suppression disabling a specific rule eg. `// biome-ignore lint/style/useWhile` + /// A suppression disabling a specific rule eg. `// biome-ignore lint/complexity/useWhile` Rule(&'a str), /// A suppression to be evaluated by a specific rule eg. `// biome-ignore lint/correctness/useExhaustiveDependencies(foo)` RuleInstance(&'a str, &'a str), - /// A suppression using the legacy syntax to disable a specific rule eg. `// biome-ignore lint(style/useWhile)` - MaybeLegacy(&'a str), - /// `rome-ignore` is legacy - Deprecated, } -fn update_suppression( - root: &L::Root, - token: &SyntaxToken, - is_leading: bool, - index: usize, - text: &str, -) -> Option> { - let old_token = token.clone(); - let new_token = token.clone().detach(); - - let old_trivia = if is_leading { - old_token.leading_trivia() - } else { - old_token.trailing_trivia() - }; - - let old_trivia: Vec<_> = old_trivia.pieces().collect(); - - let mut text = text.to_string(); - - while let Some(range_start) = text.find("lint(") { - let range_end = range_start + text[range_start..].find(')')?; - text.replace_range(range_end..range_end + 1, ""); - text.replace_range(range_start + 4..range_start + 5, "/"); - } - - let new_trivia = old_trivia.iter().enumerate().map(|(piece_index, piece)| { - if piece_index == index { - (piece.kind(), text.as_str()) +/// Takes a [Suppression] and returns a [AnalyzerSuppression] +pub fn to_analyzer_suppressions( + suppression: Suppression, + piece_range: TextRange, +) -> Vec { + let mut result = Vec::with_capacity(suppression.categories.len()); + let ignore_range = TextRange::new( + piece_range.add_start(suppression.range().start()).start(), + piece_range.add_start(suppression.range().end()).start(), + ); + for (key, value) in suppression.categories { + if key == category!("lint") { + result.push(AnalyzerSuppression::everything().with_variant(&suppression.kind)); } else { - (piece.kind(), piece.text()) - } - }); - - let new_token = if is_leading { - new_token.with_leading_trivia(new_trivia) - } else { - new_token.with_trailing_trivia(new_trivia) - }; - - let mut mutation = BatchMutation::new(root.syntax().clone()); - mutation.replace_token_discard_trivia(old_token, new_token); - - Some(AnalyzerAction { - rule_name: None, - category: ActionCategory::QuickFix(Cow::Borrowed("")), - applicability: Applicability::Always, - message: markup! { - "Rewrite suppression to use the newer syntax" + let category = key.name(); + if let Some(rule) = category.strip_prefix("lint/") { + let suppression = if let Some(instance) = value { + AnalyzerSuppression::rule_instance(rule, instance) + .with_ignore_range(ignore_range) + } else { + AnalyzerSuppression::rule(rule).with_ignore_range(ignore_range) + } + .with_variant(&suppression.kind); + result.push(suppression); + } } - .to_owned(), - mutation, - }) + } + + result +} + +impl AnalyzerSuppression<'_> { + pub const fn is_top_level(&self) -> bool { + matches!(self.variant, AnalyzerSuppressionVariant::TopLevel) + } + pub const fn is_range_start(&self) -> bool { + matches!(self.variant, AnalyzerSuppressionVariant::RangeStart) + } + pub const fn is_range_end(&self) -> bool { + matches!(self.variant, AnalyzerSuppressionVariant::RangeEnd) + } + pub const fn is_line(&self) -> bool { + matches!(self.variant, AnalyzerSuppressionVariant::Line) + } } /// Payload received by the function responsible to mark a suppression comment @@ -783,7 +724,7 @@ pub struct SuppressionCommentEmitterPayload<'a, L: Language> { pub token_offset: TokenAtOffset>, /// A [BatchMutation] where the consumer can apply the suppression comment pub mutation: &'a mut BatchMutation, - /// A string equals to "rome-ignore: lint(/)" + /// A string equals to "biome-ignore: lint(/)" pub suppression_text: &'a str, /// The original range of the diagnostic where the rule was triggered pub diagnostic_text_range: &'a TextRange, @@ -830,13 +771,13 @@ impl<'a> RuleFilter<'a> { } } -impl<'a> Debug for RuleFilter<'a> { +impl Debug for RuleFilter<'_> { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { Display::fmt(self, f) } } -impl<'a> Display for RuleFilter<'a> { +impl Display for RuleFilter<'_> { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { match self { RuleFilter::Group(group) => { @@ -849,7 +790,7 @@ impl<'a> Display for RuleFilter<'a> { } } -impl<'a> biome_console::fmt::Display for RuleFilter<'a> { +impl biome_console::fmt::Display for RuleFilter<'_> { fn fmt(&self, fmt: &mut biome_console::fmt::Formatter) -> std::io::Result<()> { match self { RuleFilter::Group(group) => { diff --git a/crates/biome_analyze/src/matcher.rs b/crates/biome_analyze/src/matcher.rs index ac0f3e5694f7..f70acc483f78 100644 --- a/crates/biome_analyze/src/matcher.rs +++ b/crates/biome_analyze/src/matcher.rs @@ -144,21 +144,21 @@ pub struct SignalEntry<'phase, L: Language> { } // SignalEntry is ordered based on the starting point of its `text_range` -impl<'phase, L: Language> Ord for SignalEntry<'phase, L> { +impl Ord for SignalEntry<'_, L> { fn cmp(&self, other: &Self) -> Ordering { other.text_range.start().cmp(&self.text_range.start()) } } -impl<'phase, L: Language> PartialOrd for SignalEntry<'phase, L> { +impl PartialOrd for SignalEntry<'_, L> { fn partial_cmp(&self, other: &Self) -> Option { Some(self.cmp(other)) } } -impl<'phase, L: Language> Eq for SignalEntry<'phase, L> {} +impl Eq for SignalEntry<'_, L> {} -impl<'phase, L: Language> PartialEq for SignalEntry<'phase, L> { +impl PartialEq for SignalEntry<'_, L> { fn eq(&self, other: &Self) -> bool { self.text_range.start() == other.text_range.start() } @@ -204,7 +204,7 @@ mod tests { ControlFlow, MetadataRegistry, Never, Phases, QueryMatcher, RuleKey, ServiceBag, SignalEntry, SuppressionAction, SyntaxVisitor, }; - use crate::{AnalyzerOptions, SuppressionKind}; + use crate::{AnalyzerOptions, AnalyzerSuppression}; use biome_diagnostics::{category, DiagnosticExt}; use biome_diagnostics::{Diagnostic, Severity}; use biome_rowan::{ @@ -350,12 +350,13 @@ mod tests { }; fn parse_suppression_comment( - comment: &'_ str, - ) -> Vec, Infallible>> { + comment: &str, + _piece_range: TextRange, + ) -> Vec> { comment .trim_start_matches("//") .split(' ') - .map(SuppressionKind::Rule) + .map(AnalyzerSuppression::rule) .map(Ok) .collect() } @@ -368,14 +369,14 @@ mod tests { impl SuppressionAction for TestAction { type Language = RawLanguage; - fn find_token_to_apply_suppression( + fn find_token_for_inline_suppression( &self, _: SyntaxToken, ) -> Option> { None } - fn apply_suppression( + fn apply_inline_suppression( &self, _: &mut BatchMutation, _: ApplySuppression, @@ -384,6 +385,15 @@ mod tests { ) { unreachable!("") } + + fn apply_top_level_suppression( + &self, + _: &mut BatchMutation, + _: SyntaxToken, + _: &str, + ) { + unreachable!("") + } } let mut analyzer = Analyzer::new( diff --git a/crates/biome_analyze/src/options.rs b/crates/biome_analyze/src/options.rs index 3394012ff1b6..357f33c1534f 100644 --- a/crates/biome_analyze/src/options.rs +++ b/crates/biome_analyze/src/options.rs @@ -1,9 +1,9 @@ +use camino::Utf8PathBuf; use rustc_hash::FxHashMap; use crate::{FixKind, Rule, RuleKey}; use std::any::{Any, TypeId}; use std::fmt::Debug; -use std::path::PathBuf; /// A convenient new type data structure to store the options that belong to a rule #[derive(Debug)] @@ -55,42 +55,88 @@ impl AnalyzerRules { #[derive(Debug, Default)] pub struct AnalyzerConfiguration { /// A list of rules and their options - pub rules: AnalyzerRules, + pub(crate) rules: AnalyzerRules, /// A collections of bindings that the analyzers should consider as "external". /// /// For example, lint rules should ignore them. - pub globals: Vec, + globals: Vec>, /// Allows to choose a different quote when applying fixes inside the lint rules - pub preferred_quote: PreferredQuote, + preferred_quote: PreferredQuote, /// Allows to choose a different JSX quote when applying fixes inside the lint rules pub preferred_jsx_quote: PreferredQuote, /// Indicates the type of runtime or transformation used for interpreting JSX. - pub jsx_runtime: Option, + jsx_runtime: Option, +} + +impl AnalyzerConfiguration { + pub fn with_rules(mut self, rules: AnalyzerRules) -> Self { + self.rules = rules; + self + } + + pub fn with_globals(mut self, globals: Vec>) -> Self { + self.globals = globals; + self + } + + pub fn with_jsx_runtime(mut self, jsx_runtime: JsxRuntime) -> Self { + self.jsx_runtime = Some(jsx_runtime); + self + } + + pub fn with_preferred_quote(mut self, preferred_quote: PreferredQuote) -> Self { + self.preferred_quote = preferred_quote; + self + } + + pub fn with_preferred_jsx_quote(mut self, preferred_jsx_quote: PreferredQuote) -> Self { + self.preferred_jsx_quote = preferred_jsx_quote; + self + } } /// A set of information useful to the analyzer infrastructure #[derive(Debug, Default)] pub struct AnalyzerOptions { /// A data structured derived from the [`biome.json`] file - pub configuration: AnalyzerConfiguration, + pub(crate) configuration: AnalyzerConfiguration, /// The file that is being analyzed - pub file_path: PathBuf, + pub file_path: Utf8PathBuf, /// Suppression reason used when applying a suppression code action - pub suppression_reason: Option, + pub(crate) suppression_reason: Option, } impl AnalyzerOptions { + pub fn with_file_path(mut self, file_path: impl Into) -> Self { + self.file_path = file_path.into(); + self + } + + pub fn with_configuration(mut self, analyzer_configuration: AnalyzerConfiguration) -> Self { + self.configuration = analyzer_configuration; + self + } + + pub fn with_suppression_reason(mut self, reason: Option<&str>) -> Self { + self.suppression_reason = reason.map(String::from); + self + } + + pub fn push_globals(&mut self, globals: Vec>) { + self.configuration.globals.extend(globals); + } + pub fn globals(&self) -> Vec<&str> { self.configuration .globals .iter() - .map(|global| global.as_str()) + .map(AsRef::as_ref) .collect() } diff --git a/crates/biome_analyze/src/registry.rs b/crates/biome_analyze/src/registry.rs index 0111d3c24b0f..f6ebffd7c398 100644 --- a/crates/biome_analyze/src/registry.rs +++ b/crates/biome_analyze/src/registry.rs @@ -402,20 +402,17 @@ impl RegistryRule { let preferred_jsx_quote = params.options.preferred_jsx_quote(); let jsx_runtime = params.options.jsx_runtime(); let options = params.options.rule_options::().unwrap_or_default(); - let ctx = match RuleContext::new( + let ctx = RuleContext::new( &query_result, params.root, params.services, &globals, - ¶ms.options.file_path, + params.options.file_path.as_path(), &options, preferred_quote, preferred_jsx_quote, jsx_runtime, - ) { - Ok(ctx) => ctx, - Err(error) => return Err(error), - }; + )?; for result in R::run(&ctx) { let text_range = diff --git a/crates/biome_analyze/src/rule.rs b/crates/biome_analyze/src/rule.rs index b72c5e871ee5..acc1a8f0f282 100644 --- a/crates/biome_analyze/src/rule.rs +++ b/crates/biome_analyze/src/rule.rs @@ -4,15 +4,15 @@ use crate::registry::{RegistryVisitor, RuleLanguage, RuleSuppressions}; use crate::{ Phase, Phases, Queryable, SourceActionKind, SuppressionAction, SuppressionCommentEmitterPayload, }; -use biome_console::fmt::Display; -use biome_console::{markup, MarkupBuf}; +use biome_console::fmt::{Display, Formatter}; +use biome_console::{markup, MarkupBuf, Padding}; use biome_diagnostics::advice::CodeSuggestionAdvice; use biome_diagnostics::location::AsSpan; -use biome_diagnostics::Applicability; use biome_diagnostics::{ Advices, Category, Diagnostic, DiagnosticTags, Location, LogCategory, MessageAndDescription, Visit, }; +use biome_diagnostics::{Applicability, Severity}; use biome_rowan::{AstNode, BatchMutation, BatchMutationExt, Language, TextRange}; use std::borrow::Cow; use std::cmp::Ordering; @@ -41,6 +41,159 @@ pub struct RuleMetadata { pub sources: &'static [RuleSource], /// The source kind of the rule pub source_kind: Option, + /// The default severity of the rule + pub severity: Severity, + /// Domains applied by this rule + pub domains: &'static [RuleDomain], +} + +impl biome_console::fmt::Display for RuleMetadata { + fn fmt(&self, fmt: &mut Formatter) -> std::io::Result<()> { + fmt.write_markup(markup! { + "Summary" + })?; + fmt.write_str("\n")?; + fmt.write_str("\n")?; + + fmt.write_markup(markup! { + "- Name: "{self.name} + })?; + fmt.write_str("\n")?; + match self.fix_kind { + FixKind::None => { + fmt.write_markup(markup! { + "- No fix available." + })?; + } + kind => { + fmt.write_markup(markup! { + "- Fix: "{kind} + })?; + } + } + fmt.write_str("\n")?; + + fmt.write_markup(markup! { + "- Default severity: "{self.severity} + })?; + fmt.write_str("\n")?; + + fmt.write_markup(markup! { + "- Available from version: "{self.version} + })?; + fmt.write_str("\n")?; + + if self.domains.is_empty() && self.recommended { + fmt.write_markup(markup! { + "- This rule is not recommended" + })?; + } + + let domains = DisplayDomains(self.domains, self.recommended); + + fmt.write_str("\n")?; + + fmt.write_markup(markup!({ domains }))?; + + fmt.write_str("\n")?; + + fmt.write_markup(markup! { + "Description" + })?; + fmt.write_str("\n")?; + fmt.write_str("\n")?; + + for line in self.docs.lines() { + if let Some((_, remainder)) = line.split_once("## ") { + fmt.write_markup(markup! { + {remainder.trim_start()} + })?; + } else if let Some((_, remainder)) = line.split_once("### ") { + fmt.write_markup(markup! { + {remainder.trim_start()} + })?; + } else { + fmt.write_str(line)?; + } + + fmt.write_str("\n")?; + } + + Ok(()) + } +} + +struct DisplayDomains(&'static [RuleDomain], bool); + +impl Display for DisplayDomains { + fn fmt(&self, fmt: &mut Formatter) -> std::io::Result<()> { + let domains = self.0; + let recommended = self.1; + + if domains.is_empty() { + return Ok(()); + } + + fmt.write_markup(markup!( + "Domains" + ))?; + fmt.write_str("\n")?; + fmt.write_str("\n")?; + + for domain in domains { + let dependencies = domain.manifest_dependencies(); + + fmt.write_markup(markup! { + "- Name: "{domain} + })?; + fmt.write_str("\n")?; + + if recommended { + fmt.write_markup(markup! { + "- The rule is recommended for this domain" + })?; + fmt.write_str("\n")?; + } + + if !dependencies.is_empty() { + fmt.write_markup(markup! { + "- The rule is enabled when one of these dependencies are detected:" + })?; + fmt.write_str("\n")?; + let padding = Padding::new(2); + for (index, (dep, range)) in dependencies.iter().enumerate() { + fmt.write_markup( + markup! { {padding}"- "{dep}"@"{range} }, + )?; + if index + 1 < dependencies.len() { + fmt.write_str("\n")?; + } + } + fmt.write_str("\n")?; + } + + let globals = domain.globals(); + + if !globals.is_empty() { + fmt.write_markup(markup! { + "- The rule adds the following globals: " + })?; + fmt.write_str("\n")?; + + let padding = Padding::new(2); + for (index, global) in globals.iter().enumerate() { + fmt.write_markup(markup! { {padding}"- "{global} })?; + if index + 1 < globals.len() { + fmt.write_str("\n")?; + } + } + fmt.write_str("\n")?; + } + fmt.write_str("\n")?; + } + + Ok(()) + } } #[derive(Clone, Copy, Debug, Default, Eq, PartialEq)] @@ -48,12 +201,12 @@ pub struct RuleMetadata { feature = "serde", derive( biome_deserialize_macros::Deserializable, - schemars::JsonSchema, serde::Deserialize, serde::Serialize ) )] #[cfg_attr(feature = "serde", serde(rename_all = "camelCase"))] +#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] /// Used to identify the kind of code action emitted by a rule pub enum FixKind { /// The rule doesn't emit code actions. @@ -69,9 +222,9 @@ pub enum FixKind { impl Display for FixKind { fn fmt(&self, fmt: &mut biome_console::fmt::Formatter) -> std::io::Result<()> { match self { - FixKind::None => fmt.write_str("None"), - FixKind::Safe => fmt.write_str("Safe"), - FixKind::Unsafe => fmt.write_str("Unsafe"), + FixKind::None => fmt.write_markup(markup!("none")), + FixKind::Safe => fmt.write_markup(markup!("safe")), + FixKind::Unsafe => fmt.write_markup(markup!("unsafe")), } } } @@ -88,7 +241,8 @@ impl TryFrom for Applicability { } #[derive(Debug, Clone, Eq)] -#[cfg_attr(feature = "serde", derive(serde::Serialize, schemars::JsonSchema))] +#[cfg_attr(feature = "serde", derive(serde::Serialize))] +#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] #[cfg_attr(feature = "serde", serde(rename_all = "camelCase"))] pub enum RuleSource { /// Rules from [Rust Clippy](https://rust-lang.github.io/rust-clippy/master/index.html) @@ -307,8 +461,9 @@ impl RuleSource { } #[derive(Debug, Default, Clone, Copy)] -#[cfg_attr(feature = "serde", derive(serde::Serialize, schemars::JsonSchema))] +#[cfg_attr(feature = "serde", derive(serde::Serialize))] #[cfg_attr(feature = "serde", serde(rename_all = "camelCase"))] +#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] pub enum RuleSourceKind { /// The rule implements the same logic of the source #[default] @@ -323,6 +478,81 @@ impl RuleSourceKind { } } +/// Rule domains +#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)] +#[cfg_attr( + feature = "serde", + derive( + serde::Deserialize, + serde::Serialize, + biome_deserialize_macros::Deserializable + ) +)] +#[cfg_attr(feature = "serde", serde(rename_all = "camelCase"))] +#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] +pub enum RuleDomain { + /// React library rules + React, + /// Testing rules + Test, + /// Solid.js framework rules + Solid, + /// Next.js framework rules + Next, +} + +impl Display for RuleDomain { + fn fmt(&self, fmt: &mut Formatter) -> std::io::Result<()> { + // use lower case naming, it needs to match the name of the configuration + match self { + RuleDomain::React => fmt.write_str("react"), + RuleDomain::Test => fmt.write_str("test"), + RuleDomain::Solid => fmt.write_str("solid"), + RuleDomain::Next => fmt.write_str("next"), + } + } +} + +impl RuleDomain { + /// If the project has one of these dependencies, the domain will be automatically enabled, unless it's explicitly disabled by the configuration. + /// + /// If the array is empty, it means that the rules that belong to a certain domain won't enable themselves automatically. + pub const fn manifest_dependencies(self) -> &'static [&'static (&'static str, &'static str)] { + match self { + RuleDomain::React => &[&("react", ">=16.0.0")], + RuleDomain::Test => &[ + &("jest", ">=26.0.0"), + &("mocha", ">=8.0.0"), + &("ava", ">=2.0.0"), + &("vitest", ">=1.0.0"), + ], + RuleDomain::Solid => &[&("solid", ">=1.0.0")], + RuleDomain::Next => &[&("next", ">=14.0.0")], + } + } + + /// Global identifiers that should be added to the `globals` of the [crate::AnalyzerConfiguration] type + pub const fn globals(self) -> &'static [&'static str] { + match self { + RuleDomain::React => &[], + RuleDomain::Test => &[ + "after", + "afterAll", + "afterEach", + "before", + "beforeEach", + "beforeAll", + "describe", + "it", + "expect", + "test", + ], + RuleDomain::Solid => &[], + RuleDomain::Next => &[], + } + } +} + impl RuleMetadata { pub const fn new( version: &'static str, @@ -340,6 +570,8 @@ impl RuleMetadata { fix_kind: FixKind::None, sources: &[], source_kind: None, + severity: Severity::Information, + domains: &[], } } @@ -376,6 +608,16 @@ impl RuleMetadata { self } + pub const fn severity(mut self, severity: Severity) -> Self { + self.severity = severity; + self + } + + pub const fn domains(mut self, domains: &'static [RuleDomain]) -> Self { + self.domains = domains; + self + } + pub fn applicability(&self) -> Applicability { self.fix_kind .try_into() @@ -488,6 +730,7 @@ macro_rules! declare_syntax_rule { version: $version, name: $name, language: $language, + severity: biome_diagnostics::Severity::Error, $( $key: $value, )* } ); @@ -566,7 +809,7 @@ macro_rules! declare_source_rule { /// This macro returns the corresponding [ActionCategory] to use inside the [RuleAction] #[expect(unused_macros)] macro_rules! rule_action_category { - () => { ActionCategory::Source(SourceActionKind::Other(Cow::Borrowed(concat!($language, ".", $name) ))) }; + () => { biome_analyze::ActionCategory::Source(biome_analyze::SourceActionKind::Other(Cow::Borrowed($name))) }; } }; } @@ -623,7 +866,7 @@ macro_rules! declare_lint_group { /// This macro is used by the codegen script to declare an analyzer rule group, /// and implement the [RuleGroup] trait for it #[macro_export] -macro_rules! declare_assists_group { +macro_rules! declare_assist_group { ( $vis:vis $id:ident { name: $name:tt, rules: [ $( $( $rule:ident )::* , )* ] } ) => { $vis enum $id {} @@ -648,7 +891,7 @@ macro_rules! declare_assists_group { // "lint" prefix, the name of this group, and the rule name argument #[expect(unused_macros)] macro_rules! group_category { - ( $rule_name:tt ) => { $crate::category_concat!( "assists", $name, $rule_name ) }; + ( $rule_name:tt ) => { $crate::category_concat!( "assist", $name, $rule_name ) }; } // Re-export the macro for child modules, so `declare_rule!` can access @@ -902,9 +1145,42 @@ pub trait Rule: RuleMeta + Sized { None } + fn top_level_suppression( + ctx: &RuleContext, + suppression_action: &dyn SuppressionAction>, + ) -> Option>> + where + Self: 'static, + { + if ::Category::CATEGORY == RuleCategory::Lint { + let rule_category = format!( + "lint/{}/{}", + ::NAME, + Self::METADATA.name + ); + let suppression_text = format!("biome-ignore-all {rule_category}"); + let root = ctx.root(); + + if let Some(first_token) = root.syntax().first_token() { + let mut mutation = root.begin(); + suppression_action.apply_top_level_suppression( + &mut mutation, + first_token, + suppression_text.as_str(), + ); + return Some(SuppressAction { + mutation, + message: markup! { "Suppress rule " {rule_category} " for the whole file."} + .to_owned(), + }); + } + } + None + } + /// Create a code action that allows to suppress the rule. The function /// returns the node to which the suppression comment is applied. - fn suppress( + fn inline_suppression( ctx: &RuleContext, text_range: &TextRange, suppression_action: &dyn SuppressionAction>, @@ -924,7 +1200,7 @@ pub trait Rule: RuleMeta + Sized { let root = ctx.root(); let token = root.syntax().token_at_offset(text_range.start()); let mut mutation = root.begin(); - suppression_action.apply_suppression_comment(SuppressionCommentEmitterPayload { + suppression_action.inline_suppression(SuppressionCommentEmitterPayload { suppression_text: suppression_text.as_str(), mutation: &mut mutation, token_offset: token, @@ -951,7 +1227,7 @@ pub trait Rule: RuleMeta + Sized { } /// Diagnostic object returned by a single analysis rule -#[derive(Debug, Diagnostic)] +#[derive(Clone, Debug, Diagnostic)] pub struct RuleDiagnostic { #[category] pub(crate) category: &'static Category, @@ -964,9 +1240,11 @@ pub struct RuleDiagnostic { pub(crate) tags: DiagnosticTags, #[advice] pub(crate) rule_advice: RuleAdvice, + #[severity] + pub(crate) severity: Severity, } -#[derive(Debug, Default)] +#[derive(Clone, Debug, Default)] /// It contains possible advices to show when printing a diagnostic that belong to the rule pub struct RuleAdvice { pub(crate) details: Vec, @@ -975,7 +1253,7 @@ pub struct RuleAdvice { pub(crate) code_suggestion_list: Vec>, } -#[derive(Debug, Default)] +#[derive(Clone, Debug, Default)] pub struct SuggestionList { pub(crate) message: MarkupBuf, pub(crate) list: Vec, @@ -1017,7 +1295,7 @@ impl Advices for RuleAdvice { } } -#[derive(Debug)] +#[derive(Clone, Debug)] pub struct Detail { pub log_category: LogCategory, pub message: MarkupBuf, @@ -1035,6 +1313,7 @@ impl RuleDiagnostic { message: MessageAndDescription::from(message), tags: DiagnosticTags::empty(), rule_advice: RuleAdvice::default(), + severity: Severity::default(), } } @@ -1062,6 +1341,14 @@ impl RuleDiagnostic { self } + /// Marks this diagnostic as verbose. + /// + /// The diagnostic will only be shown when using the `--verbose` argument. + pub fn verbose(mut self) -> Self { + self.tags |= DiagnosticTags::VERBOSE; + self + } + /// Attaches a label to this [`RuleDiagnostic`]. /// /// The given span has to be in the file that was provided while creating this [`RuleDiagnostic`]. diff --git a/crates/biome_analyze/src/signals.rs b/crates/biome_analyze/src/signals.rs index d95c60f6bcd8..543ec957f167 100644 --- a/crates/biome_analyze/src/signals.rs +++ b/crates/biome_analyze/src/signals.rs @@ -1,15 +1,17 @@ -use crate::categories::SUPPRESSION_ACTION_CATEGORY; +use crate::categories::{ + SUPPRESSION_INLINE_ACTION_CATEGORY, SUPPRESSION_TOP_LEVEL_ACTION_CATEGORY, +}; use crate::{ categories::ActionCategory, context::RuleContext, registry::{RuleLanguage, RuleRoot}, rule::Rule, - AnalyzerDiagnostic, AnalyzerOptions, Queryable, RuleGroup, ServiceBag, SuppressionAction, + AnalyzerDiagnostic, AnalyzerOptions, OtherActionCategory, Queryable, RuleGroup, ServiceBag, + SuppressionAction, }; use biome_console::MarkupBuf; use biome_diagnostics::{advice::CodeSuggestionAdvice, Applicability, CodeSuggestion, Error}; use biome_rowan::{BatchMutation, Language}; -use std::borrow::Cow; use std::iter::FusedIterator; use std::marker::PhantomData; use std::vec::IntoIter; @@ -115,7 +117,15 @@ pub struct AnalyzerAction { impl AnalyzerAction { pub fn is_suppression(&self) -> bool { - self.category.matches(SUPPRESSION_ACTION_CATEGORY) + self.is_inline_suppression() || self.is_top_level_suppression() + } + + pub fn is_inline_suppression(&self) -> bool { + self.category.matches(SUPPRESSION_INLINE_ACTION_CATEGORY) + } + + pub fn is_top_level_suppression(&self) -> bool { + self.category.matches(SUPPRESSION_TOP_LEVEL_ACTION_CATEGORY) } } @@ -339,7 +349,7 @@ where } } -impl<'bag, R> AnalyzerSignal> for RuleSignal<'bag, R> +impl AnalyzerSignal> for RuleSignal<'_, R> where R: Rule + 'static, { @@ -353,7 +363,7 @@ where self.root, self.services, &globals, - &self.options.file_path, + self.options.file_path.as_path(), &options, preferred_quote, preferred_jsx_quote, @@ -361,7 +371,10 @@ where ) .ok()?; - R::diagnostic(&ctx, &self.state).map(AnalyzerDiagnostic::from) + R::diagnostic(&ctx, &self.state).map(|mut diagnostic| { + diagnostic.severity = ctx.metadata().severity; + AnalyzerDiagnostic::from(diagnostic) + }) } fn actions(&self) -> AnalyzerActionIter> { @@ -385,15 +398,15 @@ where self.root, self.services, &globals, - &self.options.file_path, + self.options.file_path.as_path(), &options, self.options.preferred_quote(), self.options.preferred_jsx_quote(), self.options.jsx_runtime(), ) .ok(); + let mut actions = Vec::new(); if let Some(ctx) = ctx { - let mut actions = Vec::new(); if let Some(action) = R::action(&ctx, &self.state) { actions.push(AnalyzerAction { rule_name: Some((::NAME, R::METADATA.name)), @@ -404,7 +417,7 @@ where }); }; if let Some(text_range) = R::text_range(&ctx, &self.state) { - if let Some(suppression_action) = R::suppress( + if let Some(suppression_action) = R::inline_suppression( &ctx, &text_range, self.suppression_action, @@ -412,7 +425,7 @@ where ) { let action = AnalyzerAction { rule_name: Some((::NAME, R::METADATA.name)), - category: ActionCategory::Other(Cow::Borrowed(SUPPRESSION_ACTION_CATEGORY)), + category: ActionCategory::Other(OtherActionCategory::InlineSuppression), applicability: Applicability::Always, mutation: suppression_action.mutation, message: suppression_action.message, @@ -421,6 +434,19 @@ where } } + if let Some(suppression_action) = + R::top_level_suppression(&ctx, self.suppression_action) + { + let action = AnalyzerAction { + rule_name: Some((::NAME, R::METADATA.name)), + category: ActionCategory::Other(OtherActionCategory::ToplevelSuppression), + applicability: Applicability::Always, + mutation: suppression_action.mutation, + message: suppression_action.message, + }; + actions.push(action); + } + AnalyzerActionIter::new(actions) } else { AnalyzerActionIter::new(vec![]) @@ -435,7 +461,7 @@ where self.root, self.services, &globals, - &self.options.file_path, + self.options.file_path.as_path(), &options, self.options.preferred_quote(), self.options.preferred_jsx_quote(), diff --git a/crates/biome_analyze/src/suppression_action.rs b/crates/biome_analyze/src/suppression_action.rs index 77359a95f085..446eac999658 100644 --- a/crates/biome_analyze/src/suppression_action.rs +++ b/crates/biome_analyze/src/suppression_action.rs @@ -4,7 +4,7 @@ use biome_rowan::{BatchMutation, Language, SyntaxToken, TextRange, TokenAtOffset pub trait SuppressionAction { type Language: Language; - fn apply_suppression_comment(&self, payload: SuppressionCommentEmitterPayload) { + fn inline_suppression(&self, payload: SuppressionCommentEmitterPayload) { let SuppressionCommentEmitterPayload { token_offset, mutation, @@ -19,11 +19,11 @@ pub trait SuppressionAction { // considering that our suppression system works via lines, we need to look for the first newline, // so we can place the comment there let apply_suppression = original_token.as_ref().and_then(|original_token| { - self.find_token_to_apply_suppression(original_token.clone()) + self.find_token_for_inline_suppression(original_token.clone()) }); if let Some(apply_suppression) = apply_suppression { - self.apply_suppression( + self.apply_inline_suppression( mutation, apply_suppression, suppression_text, @@ -68,23 +68,30 @@ pub trait SuppressionAction { } } - fn find_token_to_apply_suppression( + fn find_token_for_inline_suppression( &self, original_token: SyntaxToken, ) -> Option>; - fn apply_suppression( + fn apply_inline_suppression( &self, mutation: &mut BatchMutation, apply_suppression: ApplySuppression, suppression_text: &str, suppression_reason: &str, ); + + fn apply_top_level_suppression( + &self, + mutation: &mut BatchMutation, + token: SyntaxToken, + suppression_text: &str, + ); } /// Convenient type to store useful information pub struct ApplySuppression { - /// If the token is following by trailing comments + /// If the token is followed by trailing comments pub token_has_trailing_comments: bool, /// The token to attach the suppression pub token_to_apply_suppression: SyntaxToken, diff --git a/crates/biome_analyze/src/suppressions.rs b/crates/biome_analyze/src/suppressions.rs new file mode 100644 index 000000000000..c8f14fb08f06 --- /dev/null +++ b/crates/biome_analyze/src/suppressions.rs @@ -0,0 +1,456 @@ +use crate::{ + AnalyzerSuppression, AnalyzerSuppressionDiagnostic, AnalyzerSuppressionKind, + AnalyzerSuppressionVariant, MetadataRegistry, RuleFilter, RuleKey, +}; +use biome_console::markup; +use biome_diagnostics::category; +use biome_rowan::{TextRange, TextSize}; +use rustc_hash::{FxHashMap, FxHashSet}; + +#[derive(Debug, Default)] +pub struct TopLevelSuppression { + /// Whether this suppression suppresses all filters + pub(crate) suppress_all: bool, + /// Filters for the current suppression + pub(crate) filters: FxHashSet>, + /// The range of the comment + pub(crate) comment_range: TextRange, + + /// The range covered by the current suppression. + /// Eventually, it should hit the entire document + pub(crate) range: TextRange, +} + +impl TopLevelSuppression { + fn push_suppression( + &mut self, + suppression: &AnalyzerSuppression, + filter: Option>, + token_range: TextRange, + comment_range: TextRange, + ) -> Result<(), AnalyzerSuppressionDiagnostic> { + if suppression.is_top_level() && token_range.start() > TextSize::from(0) { + let mut diagnostic = AnalyzerSuppressionDiagnostic::new( + category!("suppressions/incorrect"), + comment_range, + "Top level suppressions can only be used at the beginning of the file.", + ); + if let Some(ignore_range) = suppression.ignore_range { + diagnostic = diagnostic.note( + markup! {"Rename this to ""biome-ignore"" or move it to the top of the file"} + .to_owned(), + ignore_range, + ); + } + + return Err(diagnostic); + } + // The absence of a filter means that it's a suppression all + match filter { + None => self.suppress_all = true, + Some(filter) => self.insert(filter), + } + self.comment_range = comment_range; + + Ok(()) + } + + pub(crate) fn insert(&mut self, filter: RuleFilter<'static>) { + self.filters.insert(filter); + } + + pub(crate) fn suppressed_rule(&self, filter: &RuleKey) -> bool { + self.filters.iter().any(|f| f == filter) + } + + pub(crate) fn expand_range(&mut self, range: TextRange) { + self.range.cover(range); + } + + pub(crate) fn has_filter(&self, filter: &RuleFilter) -> bool { + self.filters.contains(filter) + } +} + +/// Single entry for a suppression comment in the `line_suppressions` buffer +#[derive(Default, Debug)] +pub(crate) struct LineSuppression { + /// Line index this comment is suppressing lint rules for + pub(crate) line_index: usize, + /// Range of source text covered by the suppression comment + pub(crate) comment_span: TextRange, + /// Range of source text this comment is suppressing lint rules for + pub(crate) text_range: TextRange, + /// Set to true if this comment has set the `suppress_all` flag to true + /// (must be restored to false on expiration) + pub(crate) suppress_all: bool, + /// List of all the rules this comment has started suppressing (must be + /// removed from the suppressed set on expiration) + pub(crate) suppressed_rules: FxHashSet>, + /// List of all the rule instances this comment has started suppressing. + pub(crate) suppressed_instances: FxHashMap>, + /// Set to `true` when a signal matching this suppression was emitted and + /// suppressed + pub(crate) did_suppress_signal: bool, + /// Set to `true` when this line suppresses a signal that was already suppressed by another entity e.g. top-level suppression + pub(crate) already_suppressed: Option, +} + +#[derive(Debug, Default)] +pub(crate) struct RangeSuppressions { + pub(crate) suppressions: Vec, +} + +#[derive(Debug, Default)] +pub(crate) struct RangeSuppression { + /// Whether the current suppression should suppress all signals + pub(crate) suppress_all: bool, + + /// The range of the `biome-ignore-start` suppressions + pub(crate) start_comment_range: TextRange, + + /// A range that indicates how long this suppression has effect + pub(crate) suppression_range: TextRange, + + /// Set to `true` when this line suppresses a signal that was already suppressed by another entity e.g. top-level suppression + pub(crate) already_suppressed: Option, + + /// Whether this suppression has suppressed a signal + pub(crate) did_suppress_signal: bool, + + /// The rules to suppress + pub(crate) filters: FxHashSet>, +} + +impl RangeSuppressions { + /// Expands the range of all range suppressions + pub(crate) fn expand_range(&mut self, text_range: TextRange) { + for range_suppression in self.suppressions.iter_mut() { + if !range_suppression.filters.is_empty() { + range_suppression.suppression_range = + range_suppression.suppression_range.cover(text_range); + } + } + } + pub(crate) fn push_suppression( + &mut self, + suppression: &AnalyzerSuppression, + filter: Option>, + text_range: TextRange, + already_suppressed: Option, + ) -> Result<(), AnalyzerSuppressionDiagnostic> { + if suppression.is_range_start() { + if let Some(range_suppression) = self.suppressions.last_mut() { + match filter { + None => { + range_suppression.suppress_all = true; + range_suppression.already_suppressed = already_suppressed; + } + Some(filter) => { + range_suppression.filters.insert(filter); + range_suppression.already_suppressed = already_suppressed; + } + } + } else { + let mut range_suppression = RangeSuppression::default(); + match filter { + None => range_suppression.suppress_all = true, + Some(filter) => { + range_suppression.filters.insert(filter); + } + } + range_suppression.suppression_range = text_range; + range_suppression.already_suppressed = already_suppressed; + range_suppression.start_comment_range = text_range; + self.suppressions.push(range_suppression); + } + } else if suppression.is_range_end() { + if self.suppressions.is_empty() { + // This an error. We found a range end suppression without having a range start + return Err(AnalyzerSuppressionDiagnostic::new( + category!("suppressions/incorrect"), + text_range, + markup!{"Found a ""biome-range-end"" suppression without a ""biome-range-start"" suppression. This is invalid"} + ).hint(markup!{ + "Remove this suppression." + }.to_owned())); + } + + match filter { + None => { + self.suppressions.pop(); + } + Some(filter) => { + // SAFETY: we checked if the vector isn't empty at the beginning + let range_suppression = self.suppressions.last_mut().unwrap(); + let present = range_suppression.filters.remove(&filter); + // the user tried to remove a filter that wasn't added, let's fire a diagnostic + if !present { + // This an error. We found a range end suppression without having a range start + return Err(AnalyzerSuppressionDiagnostic::new( + category!("suppressions/incorrect"), + text_range, + markup!{"Found a ""biome-range-end"" suppression without a ""biome-range-start"" suppression. This is invalid"} + ).hint(markup!{ + "Remove this suppression." + }.to_owned())); + } + } + } + } + Ok(()) + } + + /// Checks if there's suppression that suppresses the current rule in the range provided + pub(crate) fn suppressed_rule(&mut self, filter: &RuleKey, position: &TextRange) -> bool { + let range_suppression = self + .suppressions + .iter_mut() + .rev() + .find(|range_suppression| { + range_suppression + .suppression_range + .contains_range(*position) + }); + let range_suppression = range_suppression + .filter(|range_suppression| range_suppression.filters.iter().any(|f| f == filter)); + if let Some(range_suppression) = range_suppression { + range_suppression.did_suppress_signal = true; + true + } else { + false + } + } + + /// Whether if the provided `filter` matches ones, given a range. + pub(crate) fn matches_filter_in_range( + &self, + filter: &RuleFilter, + position: &TextRange, + ) -> Option { + for range_suppression in self.suppressions.iter().rev() { + if range_suppression + .suppression_range + .contains_range(*position) + && range_suppression.filters.contains(filter) + { + return Some(range_suppression.suppression_range); + } + } + + None + } +} + +#[derive(Debug)] +pub struct Suppressions<'analyzer> { + /// Current line index + pub(crate) line_index: usize, + /// Registry metadata, used to find match the rules + metadata: &'analyzer MetadataRegistry, + /// Used to track the last suppression pushed. + last_suppression: Option, + pub(crate) line_suppressions: Vec, + pub(crate) top_level_suppression: TopLevelSuppression, + pub(crate) range_suppressions: RangeSuppressions, +} + +impl<'analyzer> Suppressions<'analyzer> { + pub(crate) fn new(metadata: &'analyzer MetadataRegistry) -> Self { + Self { + line_index: 0, + metadata, + line_suppressions: vec![], + top_level_suppression: TopLevelSuppression::default(), + range_suppressions: RangeSuppressions::default(), + last_suppression: None, + } + } + + fn push_line_suppression( + &mut self, + filter: Option>, + instance: Option, + current_range: TextRange, + already_suppressed: Option, + ) -> Result<(), AnalyzerSuppressionDiagnostic> { + if let Some(suppression) = self.line_suppressions.last_mut() { + if (suppression.line_index) == (self.line_index) { + suppression.already_suppressed = already_suppressed; + + match filter { + None => { + suppression.suppress_all = true; + suppression.suppressed_rules.clear(); + suppression.suppressed_instances.clear(); + } + Some(filter) => { + suppression.suppressed_rules.insert(filter); + if let Some(instance) = instance { + suppression.suppressed_instances.insert(instance, filter); + } + suppression.suppress_all = false; + } + } + return Ok(()); + } + } + + let mut suppression = LineSuppression { + comment_span: current_range, + text_range: current_range, + line_index: self.line_index, + already_suppressed, + ..Default::default() + }; + match filter { + None => { + suppression.suppress_all = true; + } + Some(filter) => { + suppression.suppressed_rules.insert(filter); + if let Some(instance) = instance { + suppression.suppressed_instances.insert(instance, filter); + } + } + } + self.line_suppressions.push(suppression); + + Ok(()) + } + + /// Maps a [suppression](AnalyzerSuppressionKind) to a [RuleFilter] + fn map_to_rule_filter( + &self, + suppression_kind: &AnalyzerSuppressionKind, + text_range: TextRange, + ) -> Result>, AnalyzerSuppressionDiagnostic> { + let rule = match suppression_kind { + AnalyzerSuppressionKind::Everything => return Ok(None), + AnalyzerSuppressionKind::Rule(rule) => rule, + AnalyzerSuppressionKind::RuleInstance(rule, _) => rule, + }; + + let group_rule = rule.split_once('/'); + + let filter = match group_rule { + None => self.metadata.find_group(rule).map(RuleFilter::from), + Some((group, rule)) => self.metadata.find_rule(group, rule).map(RuleFilter::from), + }; + match filter { + None => Err(match group_rule { + Some((group, rule)) => AnalyzerSuppressionDiagnostic::new( + category!("suppressions/unknownRule"), + text_range, + format_args!("Unknown lint rule {group}/{rule} in suppression comment"), + ), + + None => AnalyzerSuppressionDiagnostic::new( + category!("suppressions/unknownGroup"), + text_range, + format_args!("Unknown lint rule group {rule} in suppression comment"), + ), + }), + Some(filter) => Ok(Some(filter)), + } + } + + fn map_to_rule_instances(&self, suppression_kind: &AnalyzerSuppressionKind) -> Option { + match suppression_kind { + AnalyzerSuppressionKind::Everything | AnalyzerSuppressionKind::Rule(_) => None, + AnalyzerSuppressionKind::RuleInstance(_, instances) => Some((*instances).to_string()), + } + } + + pub(crate) fn push_suppression( + &mut self, + suppression: &AnalyzerSuppression, + comment_range: TextRange, + token_range_not_trimmed: TextRange, + ) -> Result<(), AnalyzerSuppressionDiagnostic> { + let filter = self.map_to_rule_filter(&suppression.kind, comment_range)?; + let instances = self.map_to_rule_instances(&suppression.kind); + self.last_suppression = Some(suppression.variant.clone()); + let already_suppressed = self.already_suppressed(filter.as_ref(), &comment_range); + match suppression.variant { + AnalyzerSuppressionVariant::Line => { + self.push_line_suppression(filter, instances, comment_range, already_suppressed) + } + AnalyzerSuppressionVariant::TopLevel => self.top_level_suppression.push_suppression( + suppression, + filter, + token_range_not_trimmed, + comment_range, + ), + AnalyzerSuppressionVariant::RangeStart | AnalyzerSuppressionVariant::RangeEnd => self + .range_suppressions + .push_suppression(suppression, filter, comment_range, already_suppressed), + } + } + + pub(crate) fn expand_range(&mut self, text_range: TextRange, line_index: usize) -> bool { + self.top_level_suppression.expand_range(text_range); + self.range_suppressions.expand_range(text_range); + if let Some(last_suppression) = self.line_suppressions.last_mut() { + if last_suppression.line_index == line_index { + last_suppression.text_range = last_suppression.text_range.cover(text_range); + self.line_index = line_index; + return true; + } + } + false + } + + pub(crate) fn bump_line_index(&mut self, line_index: usize) { + self.line_index = line_index; + } + + /// If the last suppression was on the same or previous line, extend its range. + pub(crate) fn overlap_last_suppression( + &mut self, + next_line_index: usize, + text_range: TextRange, + ) { + if let Some(variant) = &self.last_suppression { + match variant { + AnalyzerSuppressionVariant::Line => { + if let Some(last_suppression) = self.line_suppressions.last_mut() { + if last_suppression.line_index == next_line_index + || last_suppression.line_index + 1 == next_line_index + { + last_suppression.line_index = next_line_index; + last_suppression.text_range = + last_suppression.text_range.cover(text_range); + } + } + } + AnalyzerSuppressionVariant::TopLevel => { + self.top_level_suppression.expand_range(text_range); + } + AnalyzerSuppressionVariant::RangeStart => { + self.range_suppressions.expand_range(text_range) + } + AnalyzerSuppressionVariant::RangeEnd => { + self.range_suppressions.expand_range(text_range) + } + } + } + } + + /// Checks if there's top-level suppression or a range suppression that suppresses the given filter. + /// If so, it returns the text range of that suppression. + fn already_suppressed( + &self, + filter: Option<&RuleFilter>, + range: &TextRange, + ) -> Option { + filter.and_then(|filter| { + self.top_level_suppression + .has_filter(filter) + .then_some(self.top_level_suppression.comment_range) + .or(self + .range_suppressions + .matches_filter_in_range(filter, range)) + }) + } +} diff --git a/crates/biome_analyze/src/syntax.rs b/crates/biome_analyze/src/syntax.rs index 9fbd2c8d736a..f4e532019044 100644 --- a/crates/biome_analyze/src/syntax.rs +++ b/crates/biome_analyze/src/syntax.rs @@ -80,7 +80,7 @@ impl Visitor for SyntaxVisitor { } if let Some(range) = ctx.range { - if node.text_range().ordering(range).is_ne() { + if node.text_range_with_trivia().ordering(range).is_ne() { self.skip_subtree = Some(node.clone()); return; } @@ -109,7 +109,7 @@ mod tests { nodes: Vec, } - impl<'a> QueryMatcher for &'a mut BufferMatcher { + impl QueryMatcher for &mut BufferMatcher { fn match_query(&mut self, params: MatchQueryParams) { self.nodes.push( params @@ -154,14 +154,14 @@ mod tests { impl SuppressionAction for TestAction { type Language = RawLanguage; - fn find_token_to_apply_suppression( + fn find_token_for_inline_suppression( &self, _: SyntaxToken, ) -> Option> { None } - fn apply_suppression( + fn apply_inline_suppression( &self, _: &mut BatchMutation, _: ApplySuppression, @@ -170,12 +170,21 @@ mod tests { ) { unreachable!("") } + + fn apply_top_level_suppression( + &self, + _: &mut BatchMutation, + _: SyntaxToken, + _: &str, + ) { + unreachable!("") + } } let mut analyzer = Analyzer::new( &metadata, &mut matcher, - |_| -> Vec> { unreachable!() }, + |_, _| -> Vec> { unreachable!() }, Box::new(TestAction), &mut emit_signal, ); diff --git a/crates/biome_analyze/src/visitor.rs b/crates/biome_analyze/src/visitor.rs index 9abfc0821f47..54082f1de436 100644 --- a/crates/biome_analyze/src/visitor.rs +++ b/crates/biome_analyze/src/visitor.rs @@ -19,7 +19,7 @@ pub struct VisitorContext<'phase, 'query, L: Language> { pub options: &'phase AnalyzerOptions, } -impl<'phase, 'query, L: Language> VisitorContext<'phase, 'query, L> { +impl VisitorContext<'_, '_, L> { pub fn match_query(&mut self, query: T) { self.query_matcher.match_query(MatchQueryParams { phase: self.phase, diff --git a/crates/biome_aria/src/roles.rs b/crates/biome_aria/src/roles.rs index e2c2bb99a201..ceaaa98b9723 100644 --- a/crates/biome_aria/src/roles.rs +++ b/crates/biome_aria/src/roles.rs @@ -229,10 +229,10 @@ impl AriaRoles { .find_attribute_by_name(|n| n == "type") .as_ref() .and_then(|attr| attr.value()) - .map_or(false, |value| value.as_ref() == "hidden"), + .is_some_and(|value| value.as_ref() == "hidden"), _ => self .get_implicit_role(element) - .map_or(false, |implicit_role| implicit_role.is_non_interactive()), + .is_some_and(|implicit_role| implicit_role.is_non_interactive()), } } diff --git a/crates/biome_cli/Cargo.toml b/crates/biome_cli/Cargo.toml index 1e1a89f9ad4b..27c224cd7585 100644 --- a/crates/biome_cli/Cargo.toml +++ b/crates/biome_cli/Cargo.toml @@ -24,10 +24,12 @@ biome_configuration = { workspace = true } biome_console = { workspace = true } biome_deserialize = { workspace = true } biome_deserialize_macros = { workspace = true } -biome_diagnostics = { workspace = true } +biome_diagnostics = { workspace = true, features = ["std", "bpaf"] } biome_flags = { workspace = true } biome_formatter = { workspace = true } biome_fs = { workspace = true } +biome_glob = { workspace = true } +biome_grit_patterns = { workspace = true } biome_js_analyze = { workspace = true } biome_js_formatter = { workspace = true } biome_json_formatter = { workspace = true } @@ -39,10 +41,9 @@ biome_rowan = { workspace = true } biome_service = { workspace = true } biome_text_edit = { workspace = true } bpaf = { workspace = true, features = ["bright-color"] } +camino = { workspace = true } crossbeam = { workspace = true } dashmap = { workspace = true } -hdrhistogram = { version = "7.5.4", default-features = false } -indexmap = { workspace = true } path-absolutize = { version = "3.1.1", optional = false, features = ["use_unix_paths_on_wasm"] } quick-junit = "0.5.1" rayon = { workspace = true } @@ -51,12 +52,14 @@ rustc-hash = { workspace = true } serde = { workspace = true, features = ["derive"] } serde_json = { workspace = true } smallvec = { workspace = true } +terminal_size = { workspace = true } tokio = { workspace = true, features = ["io-std", "io-util", "net", "time", "rt", "sync", "rt-multi-thread", "macros"] } tracing = { workspace = true } tracing-appender = "0.2.3" tracing-subscriber = { workspace = true, features = ["env-filter", "json"] } tracing-tree = "0.4.0" + [target.'cfg(unix)'.dependencies] libc = "0.2.169" tokio = { workspace = true, features = ["process"] } diff --git a/crates/biome_cli/examples/text_reporter.rs b/crates/biome_cli/examples/text_reporter.rs index 892dd63e192e..59ebc630aa18 100644 --- a/crates/biome_cli/examples/text_reporter.rs +++ b/crates/biome_cli/examples/text_reporter.rs @@ -1,21 +1,26 @@ use biome_cli::{ DiagnosticsPayload, Execution, Reporter, ReporterVisitor, TraversalSummary, VcsTargeted, }; +use biome_service::projects::ProjectKey; /// This will be the visitor, which where we **write** the data struct BufferVisitor(String); /// This is the reporter, which will be a type that will hold the information needed to the reporter struct TextReport { + project_key: ProjectKey, summary: TraversalSummary, } impl Reporter for TextReport { fn write(self, visitor: &mut dyn ReporterVisitor) -> std::io::Result<()> { - let execution = Execution::new_format(VcsTargeted { - staged: false, - changed: false, - }); + let execution = Execution::new_format( + self.project_key, + VcsTargeted { + staged: false, + changed: false, + }, + ); visitor.report_summary(&execution, self.summary)?; Ok(()) } @@ -42,13 +47,20 @@ impl ReporterVisitor for BufferVisitor { } pub fn main() { + // In a real scenario, the project key is obtained from the + // `Workspace::open_project()` call. + let project_key = ProjectKey::new(); + let summary = TraversalSummary { changed: 32, unchanged: 28, ..TraversalSummary::default() }; let mut visitor = BufferVisitor(String::new()); - let reporter = TextReport { summary }; + let reporter = TextReport { + project_key, + summary, + }; reporter.write(&mut visitor).unwrap(); assert_eq!(visitor.0.as_str(), "Total is 64") diff --git a/crates/biome_cli/src/changed.rs b/crates/biome_cli/src/changed.rs index 7b101ae841f1..492f92f3a479 100644 --- a/crates/biome_cli/src/changed.rs +++ b/crates/biome_cli/src/changed.rs @@ -1,12 +1,11 @@ use crate::CliDiagnostic; -use biome_configuration::PartialConfiguration; +use biome_configuration::Configuration; use biome_fs::FileSystem; -use biome_service::DynRef; use std::ffi::OsString; pub(crate) fn get_changed_files( - fs: &DynRef<'_, dyn FileSystem>, - configuration: &PartialConfiguration, + fs: &dyn FileSystem, + configuration: &Configuration, since: Option<&str>, ) -> Result, CliDiagnostic> { let default_branch = configuration @@ -28,9 +27,7 @@ pub(crate) fn get_changed_files( Ok(filtered_changed_files) } -pub(crate) fn get_staged_files( - fs: &DynRef<'_, dyn FileSystem>, -) -> Result, CliDiagnostic> { +pub(crate) fn get_staged_files(fs: &dyn FileSystem) -> Result, CliDiagnostic> { let staged_files = fs.get_staged_files()?; let filtered_staged_files = staged_files.iter().map(OsString::from).collect::>(); diff --git a/crates/biome_cli/src/cli_options.rs b/crates/biome_cli/src/cli_options.rs index 9be29855b256..3520bbc6b047 100644 --- a/crates/biome_cli/src/cli_options.rs +++ b/crates/biome_cli/src/cli_options.rs @@ -3,8 +3,8 @@ use crate::LoggingLevel; use biome_configuration::ConfigurationPathHint; use biome_diagnostics::Severity; use bpaf::Bpaf; +use camino::Utf8PathBuf; use std::fmt::{Display, Formatter}; -use std::path::PathBuf; use std::str::FromStr; /// Global options applied to all commands @@ -91,7 +91,7 @@ impl CliOptions { pub(crate) fn as_configuration_path_hint(&self) -> ConfigurationPathHint { match self.config_path.as_ref() { None => ConfigurationPathHint::default(), - Some(path) => ConfigurationPathHint::FromUser(PathBuf::from(path)), + Some(path) => ConfigurationPathHint::FromUser(Utf8PathBuf::from(path)), } } } diff --git a/crates/biome_cli/src/commands/check.rs b/crates/biome_cli/src/commands/check.rs index 7dce27be78d2..f544b36a004e 100644 --- a/crates/biome_cli/src/commands/check.rs +++ b/crates/biome_cli/src/commands/check.rs @@ -2,41 +2,38 @@ use super::{determine_fix_file_mode, FixFileModeOptions, LoadEditorConfig}; use crate::cli_options::CliOptions; use crate::commands::{get_files_to_process_with_cli_options, CommandRunner}; use crate::{CliDiagnostic, Execution, TraversalMode}; -use biome_configuration::analyzer::assists::PartialAssistsConfiguration; -use biome_configuration::{ - organize_imports::PartialOrganizeImports, PartialConfiguration, PartialFormatterConfiguration, - PartialLinterConfiguration, -}; +use biome_configuration::analyzer::assist::{AssistConfiguration, AssistEnabled}; +use biome_configuration::analyzer::LinterEnabled; +use biome_configuration::formatter::FormatterEnabled; +use biome_configuration::{Configuration, FormatterConfiguration, LinterConfiguration}; use biome_console::Console; use biome_deserialize::Merge; use biome_fs::FileSystem; -use biome_service::{configuration::LoadedConfiguration, DynRef, Workspace, WorkspaceError}; +use biome_service::projects::ProjectKey; +use biome_service::{configuration::LoadedConfiguration, Workspace, WorkspaceError}; use std::ffi::OsString; pub(crate) struct CheckCommandPayload { - pub(crate) apply: bool, - pub(crate) apply_unsafe: bool, pub(crate) write: bool, pub(crate) fix: bool, pub(crate) unsafe_: bool, - pub(crate) configuration: Option, + pub(crate) configuration: Option, pub(crate) paths: Vec, pub(crate) stdin_file_path: Option, - pub(crate) formatter_enabled: Option, - pub(crate) linter_enabled: Option, - pub(crate) organize_imports_enabled: Option, - pub(crate) assists_enabled: Option, + pub(crate) formatter_enabled: Option, + pub(crate) linter_enabled: Option, + pub(crate) assist_enabled: Option, pub(crate) staged: bool, pub(crate) changed: bool, pub(crate) since: Option, } impl LoadEditorConfig for CheckCommandPayload { - fn should_load_editor_config(&self, fs_configuration: &PartialConfiguration) -> bool { + fn should_load_editor_config(&self, fs_configuration: &Configuration) -> bool { self.configuration .as_ref() - .and_then(|c| c.use_editorconfig()) - .unwrap_or(fs_configuration.use_editorconfig().unwrap_or_default()) + .is_some_and(|c| c.use_editorconfig()) + || fs_configuration.use_editorconfig() } } @@ -46,9 +43,9 @@ impl CommandRunner for CheckCommandPayload { fn merge_configuration( &mut self, loaded_configuration: LoadedConfiguration, - fs: &DynRef<'_, dyn FileSystem>, + fs: &dyn FileSystem, console: &mut dyn Console, - ) -> Result { + ) -> Result { let editorconfig_search_path = loaded_configuration.directory_path.clone(); let LoadedConfiguration { configuration: biome_configuration, @@ -61,7 +58,7 @@ impl CommandRunner for CheckCommandPayload { let formatter = fs_configuration .formatter - .get_or_insert_with(PartialFormatterConfiguration::default); + .get_or_insert_with(FormatterConfiguration::default); if self.formatter_enabled.is_some() { formatter.enabled = self.formatter_enabled; @@ -69,26 +66,18 @@ impl CommandRunner for CheckCommandPayload { let linter = fs_configuration .linter - .get_or_insert_with(PartialLinterConfiguration::default); + .get_or_insert_with(LinterConfiguration::default); if self.linter_enabled.is_some() { linter.enabled = self.linter_enabled; } - let organize_imports = fs_configuration - .organize_imports - .get_or_insert_with(PartialOrganizeImports::default); + let assist = fs_configuration + .assist + .get_or_insert_with(AssistConfiguration::default); - if self.organize_imports_enabled.is_some() { - organize_imports.enabled = self.organize_imports_enabled; - } - - let assists = fs_configuration - .assists - .get_or_insert_with(PartialAssistsConfiguration::default); - - if self.assists_enabled.is_some() { - assists.enabled = self.assists_enabled; + if self.assist_enabled.is_some() { + assist.enabled = self.assist_enabled; } if let Some(mut configuration) = self.configuration.clone() { @@ -107,8 +96,8 @@ impl CommandRunner for CheckCommandPayload { fn get_files_to_process( &self, - fs: &DynRef<'_, dyn FileSystem>, - configuration: &PartialConfiguration, + fs: &dyn FileSystem, + configuration: &Configuration, ) -> Result, CliDiagnostic> { let paths = get_files_to_process_with_cli_options( self.since.as_deref(), @@ -135,21 +124,18 @@ impl CommandRunner for CheckCommandPayload { cli_options: &CliOptions, console: &mut dyn Console, _workspace: &dyn Workspace, + project_key: ProjectKey, ) -> Result { - let fix_file_mode = determine_fix_file_mode( - FixFileModeOptions { - apply: self.apply, - apply_unsafe: self.apply_unsafe, - write: self.write, - suppress: false, - suppression_reason: None, - fix: self.fix, - unsafe_: self.unsafe_, - }, - console, - )?; + let fix_file_mode = determine_fix_file_mode(FixFileModeOptions { + write: self.write, + suppress: false, + suppression_reason: None, + fix: self.fix, + unsafe_: self.unsafe_, + })?; Ok(Execution::new(TraversalMode::Check { + project_key, fix_file_mode, stdin: self.get_stdin(console)?, vcs_targeted: (self.staged, self.changed).into(), diff --git a/crates/biome_cli/src/commands/ci.rs b/crates/biome_cli/src/commands/ci.rs index be4689fcf04c..b16eb0251845 100644 --- a/crates/biome_cli/src/commands/ci.rs +++ b/crates/biome_cli/src/commands/ci.rs @@ -2,33 +2,34 @@ use crate::changed::get_changed_files; use crate::cli_options::CliOptions; use crate::commands::{CommandRunner, LoadEditorConfig}; use crate::{CliDiagnostic, Execution}; -use biome_configuration::analyzer::assists::PartialAssistsConfiguration; -use biome_configuration::{organize_imports::PartialOrganizeImports, PartialConfiguration}; -use biome_configuration::{PartialFormatterConfiguration, PartialLinterConfiguration}; +use biome_configuration::analyzer::assist::{AssistConfiguration, AssistEnabled}; +use biome_configuration::analyzer::LinterEnabled; +use biome_configuration::formatter::FormatterEnabled; +use biome_configuration::{Configuration, FormatterConfiguration, LinterConfiguration}; use biome_console::Console; use biome_deserialize::Merge; use biome_fs::FileSystem; use biome_service::configuration::LoadedConfiguration; -use biome_service::{DynRef, Workspace, WorkspaceError}; +use biome_service::projects::ProjectKey; +use biome_service::{Workspace, WorkspaceError}; use std::ffi::OsString; pub(crate) struct CiCommandPayload { - pub(crate) formatter_enabled: Option, - pub(crate) linter_enabled: Option, - pub(crate) organize_imports_enabled: Option, - pub(crate) assists_enabled: Option, + pub(crate) formatter_enabled: Option, + pub(crate) linter_enabled: Option, + pub(crate) assist_enabled: Option, pub(crate) paths: Vec, - pub(crate) configuration: Option, + pub(crate) configuration: Option, pub(crate) changed: bool, pub(crate) since: Option, } impl LoadEditorConfig for CiCommandPayload { - fn should_load_editor_config(&self, fs_configuration: &PartialConfiguration) -> bool { + fn should_load_editor_config(&self, fs_configuration: &Configuration) -> bool { self.configuration .as_ref() - .and_then(|c| c.use_editorconfig()) - .unwrap_or(fs_configuration.use_editorconfig().unwrap_or_default()) + .is_some_and(|c| c.use_editorconfig()) + || fs_configuration.use_editorconfig() } } @@ -38,9 +39,9 @@ impl CommandRunner for CiCommandPayload { fn merge_configuration( &mut self, loaded_configuration: LoadedConfiguration, - fs: &DynRef<'_, dyn FileSystem>, + fs: &dyn FileSystem, console: &mut dyn Console, - ) -> Result { + ) -> Result { let LoadedConfiguration { configuration: biome_configuration, directory_path: configuration_path, @@ -54,7 +55,7 @@ impl CommandRunner for CiCommandPayload { let formatter = fs_configuration .formatter - .get_or_insert_with(PartialFormatterConfiguration::default); + .get_or_insert_with(FormatterConfiguration::default); if self.formatter_enabled.is_some() { formatter.enabled = self.formatter_enabled; @@ -62,26 +63,18 @@ impl CommandRunner for CiCommandPayload { let linter = fs_configuration .linter - .get_or_insert_with(PartialLinterConfiguration::default); + .get_or_insert_with(LinterConfiguration::default); if self.linter_enabled.is_some() { linter.enabled = self.linter_enabled; } - let organize_imports = fs_configuration - .organize_imports - .get_or_insert_with(PartialOrganizeImports::default); + let assist = fs_configuration + .assist + .get_or_insert_with(AssistConfiguration::default); - if self.organize_imports_enabled.is_some() { - organize_imports.enabled = self.organize_imports_enabled; - } - - let assists = fs_configuration - .assists - .get_or_insert_with(PartialAssistsConfiguration::default); - - if self.assists_enabled.is_some() { - assists.enabled = self.assists_enabled; + if self.assist_enabled.is_some() { + assist.enabled = self.assist_enabled; } if let Some(mut configuration) = self.configuration.clone() { @@ -100,8 +93,8 @@ impl CommandRunner for CiCommandPayload { fn get_files_to_process( &self, - fs: &DynRef<'_, dyn FileSystem>, - configuration: &PartialConfiguration, + fs: &dyn FileSystem, + configuration: &Configuration, ) -> Result, CliDiagnostic> { if self.changed { get_changed_files(fs, configuration, self.since.as_deref()) @@ -123,16 +116,17 @@ impl CommandRunner for CiCommandPayload { cli_options: &CliOptions, _console: &mut dyn Console, _workspace: &dyn Workspace, + project_key: ProjectKey, ) -> Result { - Ok(Execution::new_ci((false, self.changed).into()).set_report(cli_options)) + Ok(Execution::new_ci(project_key, (false, self.changed).into()).set_report(cli_options)) } fn check_incompatible_arguments(&self) -> Result<(), CliDiagnostic> { - if matches!(self.formatter_enabled, Some(false)) - && matches!(self.linter_enabled, Some(false)) - && matches!(self.organize_imports_enabled, Some(false)) + if self.formatter_enabled.is_some_and(|v| !v.value()) + && self.linter_enabled.is_some_and(|v| !v.value()) + && self.assist_enabled.is_some_and(|v| !v.value()) { - return Err(CliDiagnostic::incompatible_end_configuration("Formatter, linter and organize imports are disabled, can't perform the command. At least one feature needs to be enabled. This is probably and error.")); + return Err(CliDiagnostic::incompatible_end_configuration("Formatter, linter and assist are disabled, can't perform the command. At least one feature needs to be enabled. This is probably and error.")); } if self.since.is_some() && !self.changed { return Err(CliDiagnostic::incompatible_arguments("since", "changed")); diff --git a/crates/biome_cli/src/commands/clean.rs b/crates/biome_cli/src/commands/clean.rs index a7b3f52b9cde..270c4412ced2 100644 --- a/crates/biome_cli/src/commands/clean.rs +++ b/crates/biome_cli/src/commands/clean.rs @@ -1,15 +1,15 @@ use crate::commands::daemon::default_biome_log_path; use crate::{CliDiagnostic, CliSession}; use biome_flags::biome_env; +use camino::Utf8PathBuf; use std::fs::{create_dir, remove_dir_all}; -use std::path::PathBuf; /// Runs the clean command pub fn clean(_cli_session: CliSession) -> Result<(), CliDiagnostic> { let logs_path = biome_env() .biome_log_path .value() - .map_or(default_biome_log_path(), PathBuf::from); + .map_or(default_biome_log_path(), Utf8PathBuf::from); remove_dir_all(logs_path.clone()).and_then(|_| create_dir(logs_path))?; Ok(()) } diff --git a/crates/biome_cli/src/commands/daemon.rs b/crates/biome_cli/src/commands/daemon.rs index 318a6a54282a..13bc9f9aa1b3 100644 --- a/crates/biome_cli/src/commands/daemon.rs +++ b/crates/biome_cli/src/commands/daemon.rs @@ -4,9 +4,11 @@ use crate::{ CliDiagnostic, CliSession, }; use biome_console::{markup, ConsoleExt}; +use biome_fs::OsFileSystem; use biome_lsp::ServerFactory; use biome_service::{workspace::WorkspaceClient, TransportError, WorkspaceError}; -use std::{env, fs, path::PathBuf}; +use camino::Utf8PathBuf; +use std::{env, fs}; use tokio::io; use tokio::runtime::Runtime; use tracing::subscriber::Interest; @@ -21,8 +23,8 @@ use tracing_tree::HierarchicalLayer; pub(crate) fn start( session: CliSession, - config_path: Option, - log_path: Option, + config_path: Option, + log_path: Option, log_file_name_prefix: Option, ) -> Result<(), CliDiagnostic> { let rt = Runtime::new()?; @@ -50,7 +52,7 @@ pub(crate) fn stop(session: CliSession) -> Result<(), CliDiagnostic> { let rt = Runtime::new()?; if let Some(transport) = open_transport(rt)? { - let client = WorkspaceClient::new(transport)?; + let client = WorkspaceClient::new(transport, Box::new(OsFileSystem::default()))?; match client.shutdown() { // The `ChannelClosed` error is expected since the server can // shutdown before sending a response @@ -72,8 +74,8 @@ pub(crate) fn stop(session: CliSession) -> Result<(), CliDiagnostic> { pub(crate) fn run_server( stop_on_disconnect: bool, - config_path: Option, - log_path: Option, + config_path: Option, + log_path: Option, log_file_name_prefix: Option, ) -> Result<(), CliDiagnostic> { setup_tracing_subscriber(log_path, log_file_name_prefix); @@ -106,8 +108,8 @@ pub(crate) fn print_socket() -> Result<(), CliDiagnostic> { } pub(crate) fn lsp_proxy( - config_path: Option, - log_path: Option, + config_path: Option, + log_path: Option, log_file_name_prefix: Option, ) -> Result<(), CliDiagnostic> { let rt = Runtime::new()?; @@ -126,8 +128,8 @@ pub(crate) fn lsp_proxy( /// Copy to the process on `stdout` when the LSP responds to a message async fn start_lsp_proxy( rt: &Runtime, - config_path: Option, - log_path: Option, + config_path: Option, + log_path: Option, log_file_name_prefix: Option, ) -> Result<(), CliDiagnostic> { ensure_daemon(true, config_path, log_path, log_file_name_prefix).await?; @@ -173,14 +175,14 @@ async fn start_lsp_proxy( } pub(crate) fn read_most_recent_log_file( - log_path: Option, + log_path: Option, log_file_name_prefix: String, ) -> io::Result> { let biome_log_path = log_path.unwrap_or(default_biome_log_path()); let most_recent = fs::read_dir(biome_log_path)? .flatten() - .filter(|file| file.file_type().map_or(false, |ty| ty.is_file())) + .filter(|file| file.file_type().is_ok_and(|ty| ty.is_file())) .filter_map(|file| { match file .file_name() @@ -205,8 +207,9 @@ pub(crate) fn read_most_recent_log_file( /// is written to log files rotated on a hourly basis (in /// `biome-logs/server.log.yyyy-MM-dd-HH` files inside the system temporary /// directory) -fn setup_tracing_subscriber(log_path: Option, log_file_name_prefix: Option) { - let biome_log_path = log_path.unwrap_or(biome_fs::ensure_cache_dir().join("biome-logs")); +fn setup_tracing_subscriber(log_path: Option, log_file_name_prefix: Option) { + let biome_log_path = + log_path.unwrap_or_else(|| biome_fs::ensure_cache_dir().join("biome-logs")); let appender_builder = tracing_appender::rolling::RollingFileAppender::builder(); let file_appender = appender_builder .filename_prefix(log_file_name_prefix.unwrap_or(String::from("server.log"))) @@ -229,14 +232,10 @@ fn setup_tracing_subscriber(log_path: Option, log_file_name_prefix: Opt .init(); } -pub fn default_biome_log_path() -> PathBuf { +pub fn default_biome_log_path() -> Utf8PathBuf { match env::var_os("BIOME_LOG_PATH") { - Some(directory) => PathBuf::from(directory), - // TODO: Remove in Biome v2, and use the None part as fallback. - None => match env::var_os("BIOME_LOG_DIR") { - Some(directory) => PathBuf::from(directory), - None => biome_fs::ensure_cache_dir().join("biome-logs"), - }, + Some(directory) => Utf8PathBuf::from(directory.as_os_str().to_str().unwrap()), + None => biome_fs::ensure_cache_dir().join("biome-logs"), } } diff --git a/crates/biome_cli/src/commands/explain.rs b/crates/biome_cli/src/commands/explain.rs index 81a69e07650a..0fe1603f638b 100644 --- a/crates/biome_cli/src/commands/explain.rs +++ b/crates/biome_cli/src/commands/explain.rs @@ -1,4 +1,4 @@ -use biome_analyze::{FixKind, RuleMetadata}; +use biome_analyze::RuleMetadata; use biome_console::{markup, ConsoleExt}; use biome_flags::biome_env; use biome_service::documentation::Doc; @@ -8,34 +8,7 @@ use crate::{CliDiagnostic, CliSession}; fn print_rule(session: CliSession, metadata: &RuleMetadata) { session.app.console.log(markup! { - "# "{metadata.name}"\n" - }); - - match metadata.fix_kind { - FixKind::None => { - session.app.console.log(markup! { - "No fix available.\n" - }); - } - kind => { - session.app.console.log(markup! { - "Fix is "{kind}".\n" - }); - } - } - - let docs = metadata - .docs - .lines() - .map(|line| line.trim_start()) - .collect::>() - .join("\n"); - - session.app.console.log(markup! { - "This rule is "{if metadata.recommended {"recommended."} else {"not recommended."}} - "\n\n" - "# Description\n" - {docs} + {metadata} }); } @@ -49,7 +22,7 @@ pub(crate) fn explain(session: CliSession, doc: Doc) -> Result<(), CliDiagnostic let cache_dir = biome_env() .biome_log_path .value() - .unwrap_or(default_biome_log_path().display().to_string()); + .unwrap_or(default_biome_log_path().to_string()); session.app.console.error(markup! { "The daemon logs are available in the directory: \n" }); diff --git a/crates/biome_cli/src/commands/format.rs b/crates/biome_cli/src/commands/format.rs index 6b876ff5691b..ce8e7399211d 100644 --- a/crates/biome_cli/src/commands/format.rs +++ b/crates/biome_cli/src/commands/format.rs @@ -1,29 +1,28 @@ use crate::cli_options::CliOptions; use crate::commands::{get_files_to_process_with_cli_options, CommandRunner, LoadEditorConfig}; -use crate::diagnostics::DeprecatedArgument; use crate::{CliDiagnostic, Execution, TraversalMode}; -use biome_configuration::vcs::PartialVcsConfiguration; -use biome_configuration::{ - PartialConfiguration, PartialCssFormatter, PartialFilesConfiguration, - PartialFormatterConfiguration, PartialGraphqlFormatter, PartialJavascriptFormatter, - PartialJsonFormatter, -}; -use biome_console::{markup, Console, ConsoleExt}; +use biome_configuration::css::CssFormatterConfiguration; +use biome_configuration::graphql::GraphqlFormatterConfiguration; +use biome_configuration::javascript::JsFormatterConfiguration; +use biome_configuration::json::JsonFormatterConfiguration; +use biome_configuration::vcs::VcsConfiguration; +use biome_configuration::{Configuration, FilesConfiguration, FormatterConfiguration}; +use biome_console::Console; use biome_deserialize::Merge; -use biome_diagnostics::PrintDiagnostic; use biome_fs::FileSystem; use biome_service::configuration::LoadedConfiguration; -use biome_service::{DynRef, Workspace, WorkspaceError}; +use biome_service::projects::ProjectKey; +use biome_service::{Workspace, WorkspaceError}; use std::ffi::OsString; pub(crate) struct FormatCommandPayload { - pub(crate) javascript_formatter: Option, - pub(crate) json_formatter: Option, - pub(crate) css_formatter: Option, - pub(crate) graphql_formatter: Option, - pub(crate) formatter_configuration: Option, - pub(crate) vcs_configuration: Option, - pub(crate) files_configuration: Option, + pub(crate) javascript_formatter: Option, + pub(crate) json_formatter: Option, + pub(crate) css_formatter: Option, + pub(crate) graphql_formatter: Option, + pub(crate) formatter_configuration: Option, + pub(crate) vcs_configuration: Option, + pub(crate) files_configuration: Option, pub(crate) stdin_file_path: Option, pub(crate) write: bool, pub(crate) fix: bool, @@ -34,11 +33,11 @@ pub(crate) struct FormatCommandPayload { } impl LoadEditorConfig for FormatCommandPayload { - fn should_load_editor_config(&self, fs_configuration: &PartialConfiguration) -> bool { + fn should_load_editor_config(&self, fs_configuration: &Configuration) -> bool { self.formatter_configuration .as_ref() - .and_then(|c| c.use_editorconfig) - .unwrap_or(fs_configuration.use_editorconfig().unwrap_or_default()) + .is_some_and(|c| c.use_editorconfig_resolved()) + || fs_configuration.use_editorconfig() } } @@ -48,9 +47,9 @@ impl CommandRunner for FormatCommandPayload { fn merge_configuration( &mut self, loaded_configuration: LoadedConfiguration, - fs: &DynRef<'_, dyn FileSystem>, + fs: &dyn FileSystem, console: &mut dyn Console, - ) -> Result { + ) -> Result { let LoadedConfiguration { configuration: biome_configuration, directory_path: configuration_path, @@ -63,77 +62,18 @@ impl CommandRunner for FormatCommandPayload { fs_configuration.merge_with(biome_configuration); let mut configuration = fs_configuration; - // TODO: remove in biome 2.0 - if let Some(config) = self.formatter_configuration.as_mut() { - if let Some(indent_size) = config.indent_size { - let diagnostic = DeprecatedArgument::new(markup! { - "The argument ""--indent-size"" is deprecated, it will be removed in the next major release. Use ""--indent-width"" instead." - }); - console.error(markup! { - {PrintDiagnostic::simple(&diagnostic)} - }); - - if config.indent_width.is_none() { - config.indent_width = Some(indent_size); - } - } - } - // TODO: remove in biome 2.0 - if let Some(js_formatter) = self.javascript_formatter.as_mut() { - if let Some(indent_size) = js_formatter.indent_size { - let diagnostic = DeprecatedArgument::new(markup! { - "The argument ""--javascript-formatter-indent-size"" is deprecated, it will be removed in the next major release. Use ""--javascript-formatter-indent-width"" instead." - }); - console.error(markup! { - {PrintDiagnostic::simple(&diagnostic)} - }); - - if js_formatter.indent_width.is_none() { - js_formatter.indent_width = Some(indent_size); - } - } - - if let Some(trailing_comma) = js_formatter.trailing_comma { - let diagnostic = DeprecatedArgument::new(markup! { - "The argument ""--trailing-comma"" is deprecated, it will be removed in the next major release. Use ""--trailing-commas"" instead." - }); - console.error(markup! { - {PrintDiagnostic::simple(&diagnostic)} - }); - - if js_formatter.trailing_commas.is_none() { - js_formatter.trailing_commas = Some(trailing_comma); - } - } - } - // TODO: remove in biome 2.0 - if let Some(json_formatter) = self.json_formatter.as_mut() { - if let Some(indent_size) = json_formatter.indent_size { - let diagnostic = DeprecatedArgument::new(markup! { - "The argument ""--json-formatter-indent-size"" is deprecated, it will be removed in the next major release. Use ""--json-formatter-indent-width"" instead." - }); - console.error(markup! { - {PrintDiagnostic::simple(&diagnostic)} - }); - - if json_formatter.indent_width.is_none() { - json_formatter.indent_width = Some(indent_size); - } - } - } - // merge formatter options - if !configuration + if configuration .formatter .as_ref() - .is_some_and(PartialFormatterConfiguration::is_disabled) + .is_none_or(|f| f.is_enabled()) { let formatter = configuration.formatter.get_or_insert_with(Default::default); if let Some(formatter_configuration) = self.formatter_configuration.clone() { formatter.merge_with(formatter_configuration); } - formatter.enabled = Some(true); + formatter.enabled = Some(true.into()); } if self.css_formatter.is_some() { let css = configuration.css.get_or_insert_with(Default::default); @@ -167,8 +107,8 @@ impl CommandRunner for FormatCommandPayload { fn get_files_to_process( &self, - fs: &DynRef<'_, dyn FileSystem>, - configuration: &PartialConfiguration, + fs: &dyn FileSystem, + configuration: &Configuration, ) -> Result, CliDiagnostic> { let paths = get_files_to_process_with_cli_options( self.since.as_deref(), @@ -195,8 +135,10 @@ impl CommandRunner for FormatCommandPayload { cli_options: &CliOptions, console: &mut dyn Console, _workspace: &dyn Workspace, + project_key: ProjectKey, ) -> Result { Ok(Execution::new(TraversalMode::Format { + project_key, ignore_errors: cli_options.skip_errors, write: self.should_write(), stdin: self.get_stdin(console)?, diff --git a/crates/biome_cli/src/commands/init.rs b/crates/biome_cli/src/commands/init.rs index 8ef1817fbdaf..619bda7d8419 100644 --- a/crates/biome_cli/src/commands/init.rs +++ b/crates/biome_cli/src/commands/init.rs @@ -1,12 +1,12 @@ use crate::{CliDiagnostic, CliSession}; -use biome_configuration::PartialConfiguration; +use biome_configuration::Configuration; use biome_console::{markup, ConsoleExt}; use biome_fs::ConfigName; use biome_service::configuration::create_config; -pub(crate) fn init(mut session: CliSession, emit_jsonc: bool) -> Result<(), CliDiagnostic> { - let fs = &mut session.app.fs; - create_config(fs, PartialConfiguration::init(), emit_jsonc)?; +pub(crate) fn init(session: CliSession, emit_jsonc: bool) -> Result<(), CliDiagnostic> { + let fs = session.app.workspace.fs(); + create_config(fs, Configuration::init(), emit_jsonc)?; let file_created = if emit_jsonc { ConfigName::biome_jsonc() } else { diff --git a/crates/biome_cli/src/commands/lint.rs b/crates/biome_cli/src/commands/lint.rs index bacf55f92747..115275f4f554 100644 --- a/crates/biome_cli/src/commands/lint.rs +++ b/crates/biome_cli/src/commands/lint.rs @@ -3,32 +3,29 @@ use crate::cli_options::CliOptions; use crate::commands::{get_files_to_process_with_cli_options, CommandRunner}; use crate::{CliDiagnostic, Execution, TraversalMode}; use biome_configuration::analyzer::RuleSelector; -use biome_configuration::css::PartialCssLinter; -use biome_configuration::javascript::PartialJavascriptLinter; -use biome_configuration::json::PartialJsonLinter; -use biome_configuration::vcs::PartialVcsConfiguration; -use biome_configuration::{ - PartialConfiguration, PartialFilesConfiguration, PartialGraphqlLinter, - PartialLinterConfiguration, -}; +use biome_configuration::css::CssLinterConfiguration; +use biome_configuration::graphql::GraphqlLinterConfiguration; +use biome_configuration::javascript::JsLinterConfiguration; +use biome_configuration::json::JsonLinterConfiguration; +use biome_configuration::vcs::VcsConfiguration; +use biome_configuration::{Configuration, FilesConfiguration, LinterConfiguration}; use biome_console::Console; use biome_deserialize::Merge; use biome_fs::FileSystem; use biome_service::configuration::LoadedConfiguration; -use biome_service::{DynRef, Workspace, WorkspaceError}; +use biome_service::projects::ProjectKey; +use biome_service::{Workspace, WorkspaceError}; use std::ffi::OsString; pub(crate) struct LintCommandPayload { - pub(crate) apply: bool, - pub(crate) apply_unsafe: bool, pub(crate) write: bool, pub(crate) fix: bool, pub(crate) unsafe_: bool, pub(crate) suppress: bool, pub(crate) suppression_reason: Option, - pub(crate) linter_configuration: Option, - pub(crate) vcs_configuration: Option, - pub(crate) files_configuration: Option, + pub(crate) linter_configuration: Option, + pub(crate) vcs_configuration: Option, + pub(crate) files_configuration: Option, pub(crate) paths: Vec, pub(crate) only: Vec, pub(crate) skip: Vec, @@ -36,10 +33,10 @@ pub(crate) struct LintCommandPayload { pub(crate) staged: bool, pub(crate) changed: bool, pub(crate) since: Option, - pub(crate) javascript_linter: Option, - pub(crate) json_linter: Option, - pub(crate) css_linter: Option, - pub(crate) graphql_linter: Option, + pub(crate) javascript_linter: Option, + pub(crate) json_linter: Option, + pub(crate) css_linter: Option, + pub(crate) graphql_linter: Option, } impl CommandRunner for LintCommandPayload { @@ -48,19 +45,19 @@ impl CommandRunner for LintCommandPayload { fn merge_configuration( &mut self, loaded_configuration: LoadedConfiguration, - _fs: &DynRef<'_, dyn FileSystem>, + _fs: &dyn FileSystem, _console: &mut dyn Console, - ) -> Result { + ) -> Result { let LoadedConfiguration { configuration: mut fs_configuration, .. } = loaded_configuration; - fs_configuration.merge_with(PartialConfiguration { + fs_configuration.merge_with(Configuration { linter: if fs_configuration .linter .as_ref() - .is_some_and(PartialLinterConfiguration::is_disabled) + .is_some_and(LinterConfiguration::is_enabled) { None } else { @@ -102,8 +99,8 @@ impl CommandRunner for LintCommandPayload { fn get_files_to_process( &self, - fs: &DynRef<'_, dyn FileSystem>, - configuration: &PartialConfiguration, + fs: &dyn FileSystem, + configuration: &Configuration, ) -> Result, CliDiagnostic> { let paths = get_files_to_process_with_cli_options( self.since.as_deref(), @@ -130,20 +127,17 @@ impl CommandRunner for LintCommandPayload { cli_options: &CliOptions, console: &mut dyn Console, _workspace: &dyn Workspace, + project_key: ProjectKey, ) -> Result { - let fix_file_mode = determine_fix_file_mode( - FixFileModeOptions { - apply: self.apply, - apply_unsafe: self.apply_unsafe, - write: self.write, - fix: self.fix, - unsafe_: self.unsafe_, - suppress: self.suppress, - suppression_reason: self.suppression_reason.clone(), - }, - console, - )?; + let fix_file_mode = determine_fix_file_mode(FixFileModeOptions { + write: self.write, + fix: self.fix, + unsafe_: self.unsafe_, + suppress: self.suppress, + suppression_reason: self.suppression_reason.clone(), + })?; Ok(Execution::new(TraversalMode::Lint { + project_key, fix_file_mode, stdin: self.get_stdin(console)?, only: self.only.clone(), diff --git a/crates/biome_cli/src/commands/migrate.rs b/crates/biome_cli/src/commands/migrate.rs index ef60810e1299..bf64b11ae4fd 100644 --- a/crates/biome_cli/src/commands/migrate.rs +++ b/crates/biome_cli/src/commands/migrate.rs @@ -5,20 +5,21 @@ use crate::cli_options::CliOptions; use crate::diagnostics::MigrationDiagnostic; use crate::execute::{Execution, TraversalMode}; use crate::CliDiagnostic; -use biome_configuration::PartialConfiguration; +use biome_configuration::Configuration; use biome_console::{markup, Console, ConsoleExt}; use biome_fs::FileSystem; use biome_service::configuration::LoadedConfiguration; -use biome_service::{DynRef, Workspace, WorkspaceError}; +use biome_service::projects::ProjectKey; +use biome_service::{Workspace, WorkspaceError}; +use camino::Utf8PathBuf; use std::ffi::OsString; -use std::path::PathBuf; pub(crate) struct MigrateCommandPayload { pub(crate) write: bool, pub(crate) fix: bool, pub(crate) sub_command: Option, - pub(crate) configuration_file_path: Option, - pub(crate) configuration_directory_path: Option, + pub(crate) configuration_file_path: Option, + pub(crate) configuration_directory_path: Option, } impl CommandRunner for MigrateCommandPayload { @@ -27,9 +28,9 @@ impl CommandRunner for MigrateCommandPayload { fn merge_configuration( &mut self, loaded_configuration: LoadedConfiguration, - _fs: &DynRef<'_, dyn FileSystem>, + _fs: &dyn FileSystem, _console: &mut dyn Console, - ) -> Result { + ) -> Result { self.configuration_file_path = loaded_configuration.file_path; self.configuration_directory_path = loaded_configuration.directory_path; Ok(loaded_configuration.configuration) @@ -37,8 +38,8 @@ impl CommandRunner for MigrateCommandPayload { fn get_files_to_process( &self, - _fs: &DynRef<'_, dyn FileSystem>, - _configuration: &PartialConfiguration, + _fs: &dyn FileSystem, + _configuration: &Configuration, ) -> Result, CliDiagnostic> { Ok(vec![]) } @@ -56,15 +57,13 @@ impl CommandRunner for MigrateCommandPayload { _cli_options: &CliOptions, console: &mut dyn Console, _workspace: &dyn Workspace, + project_key: ProjectKey, ) -> Result { - if let (Some(path), Some(directory_path)) = ( - self.configuration_file_path.clone(), - self.configuration_directory_path.clone(), - ) { + if let Some(path) = self.configuration_file_path.clone() { Ok(Execution::new(TraversalMode::Migrate { + project_key, write: self.should_write(), configuration_file_path: path, - configuration_directory_path: directory_path, sub_command: self.sub_command.clone(), })) } else { @@ -79,8 +78,6 @@ impl CommandRunner for MigrateCommandPayload { fn check_incompatible_arguments(&self) -> Result<(), CliDiagnostic> { check_fix_incompatible_arguments(FixFileModeOptions { - apply: false, - apply_unsafe: false, write: self.write, fix: self.fix, unsafe_: false, diff --git a/crates/biome_cli/src/commands/mod.rs b/crates/biome_cli/src/commands/mod.rs index 1c79535d5600..a0abb5ee7a6f 100644 --- a/crates/biome_cli/src/commands/mod.rs +++ b/crates/biome_cli/src/commands/mod.rs @@ -1,38 +1,44 @@ use crate::changed::{get_changed_files, get_staged_files}; use crate::cli_options::{cli_options, CliOptions, CliReporter, ColorsArg}; -use crate::diagnostics::{DeprecatedArgument, DeprecatedConfigurationFile}; -use crate::execute::Stdin; +use crate::execute::{ReportMode, Stdin}; use crate::logging::LoggingKind; use crate::{ execute_mode, setup_cli_subscriber, CliDiagnostic, CliSession, Execution, LoggingLevel, VERSION, }; -use biome_configuration::analyzer::RuleSelector; -use biome_configuration::css::PartialCssLinter; -use biome_configuration::javascript::PartialJavascriptLinter; -use biome_configuration::json::PartialJsonLinter; +use biome_configuration::analyzer::assist::AssistEnabled; +use biome_configuration::analyzer::{LinterEnabled, RuleSelector}; +use biome_configuration::css::{CssFormatterConfiguration, CssLinterConfiguration}; +use biome_configuration::formatter::FormatterEnabled; +use biome_configuration::graphql::{GraphqlFormatterConfiguration, GraphqlLinterConfiguration}; +use biome_configuration::javascript::{JsFormatterConfiguration, JsLinterConfiguration}; +use biome_configuration::json::{JsonFormatterConfiguration, JsonLinterConfiguration}; +use biome_configuration::vcs::VcsConfiguration; use biome_configuration::{ - css::partial_css_formatter, css::partial_css_linter, graphql::partial_graphql_formatter, - graphql::partial_graphql_linter, javascript::partial_javascript_formatter, - javascript::partial_javascript_linter, json::partial_json_formatter, json::partial_json_linter, - partial_configuration, partial_files_configuration, partial_formatter_configuration, - partial_linter_configuration, vcs::partial_vcs_configuration, vcs::PartialVcsConfiguration, - PartialCssFormatter, PartialFilesConfiguration, PartialFormatterConfiguration, - PartialGraphqlFormatter, PartialGraphqlLinter, PartialJavascriptFormatter, - PartialJsonFormatter, PartialLinterConfiguration, + configuration, css::css_formatter_configuration, css::css_linter_configuration, + files_configuration, formatter_configuration, graphql::graphql_formatter_configuration, + graphql::graphql_linter_configuration, javascript::js_formatter_configuration, + javascript::js_linter_configuration, json::json_formatter_configuration, + json::json_linter_configuration, linter_configuration, vcs::vcs_configuration, + FilesConfiguration, FormatterConfiguration, LinterConfiguration, }; -use biome_configuration::{BiomeDiagnostic, PartialConfiguration}; +use biome_configuration::{BiomeDiagnostic, Configuration}; use biome_console::{markup, Console, ConsoleExt}; -use biome_diagnostics::{Diagnostic, PrintDiagnostic}; +use biome_diagnostics::{Diagnostic, PrintDiagnostic, Severity}; use biome_fs::{BiomePath, FileSystem}; +use biome_grit_patterns::GritTargetLanguage; use biome_service::configuration::{ - load_configuration, load_editorconfig, LoadedConfiguration, PartialConfigurationExt, + load_configuration, load_editorconfig, ConfigurationExt, LoadedConfiguration, }; use biome_service::documentation::Doc; -use biome_service::workspace::{FixFileMode, RegisterProjectFolderParams, UpdateSettingsParams}; -use biome_service::{DynRef, Workspace, WorkspaceError}; +use biome_service::projects::ProjectKey; +use biome_service::workspace::{ + FixFileMode, OpenProjectParams, ScanProjectFolderParams, UpdateSettingsParams, +}; +use biome_service::{Workspace, WorkspaceError}; use bpaf::Bpaf; +use camino::Utf8PathBuf; use std::ffi::OsString; -use std::path::PathBuf; +use tracing::info; pub(crate) mod check; pub(crate) mod ci; @@ -91,11 +97,11 @@ pub enum BiomeCommand { hide_usage, fallback(biome_fs::ensure_cache_dir().join("biome-logs")), )] - log_path: PathBuf, + log_path: Utf8PathBuf, /// Allows to set a custom file path to the configuration file, /// or a custom directory path to find `biome.json` or `biome.jsonc` #[bpaf(env("BIOME_CONFIG_PATH"), long("config-path"), argument("PATH"))] - config_path: Option, + config_path: Option, }, /// Stops the Biome daemon server process. @@ -117,14 +123,6 @@ pub enum BiomeCommand { #[bpaf(long("fix"), switch, hide_usage)] fix: bool, - /// Alias for `--write`, writes safe fixes, formatting and import sorting (deprecated, use `--write`) - #[bpaf(long("apply"), switch, hide_usage)] - apply: bool, - - /// Alias for `--write --unsafe`, writes safe and unsafe fixes, formatting and import sorting (deprecated, use `--write --unsafe`) - #[bpaf(long("apply-unsafe"), switch, hide_usage)] - apply_unsafe: bool, - /// Allow to enable or disable the formatter check. #[bpaf( long("formatter-enabled"), @@ -132,25 +130,17 @@ pub enum BiomeCommand { optional, hide_usage )] - formatter_enabled: Option, + formatter_enabled: Option, /// Allow to enable or disable the linter check. #[bpaf(long("linter-enabled"), argument("true|false"), optional, hide_usage)] - linter_enabled: Option, - /// Allow to enable or disable the organize imports. - #[bpaf( - long("organize-imports-enabled"), - argument("true|false"), - optional, - hide_usage - )] - organize_imports_enabled: Option, + linter_enabled: Option, - /// Allow to enable or disable the assists. - #[bpaf(long("assists-enabled"), argument("true|false"), optional)] - assists_enabled: Option, + /// Allow to enable or disable the assist. + #[bpaf(long("assist-enabled"), argument("true|false"), optional)] + assist_enabled: Option, - #[bpaf(external(partial_configuration), hide_usage, optional)] - configuration: Option, + #[bpaf(external(configuration), hide_usage, optional)] + configuration: Option, #[bpaf(external, hide_usage)] cli_options: CliOptions, /// Use this option when you want to format code piped from `stdin`, and print the output to `stdout`. @@ -195,14 +185,6 @@ pub enum BiomeCommand { #[bpaf(long("fix"), switch, hide_usage)] fix: bool, - /// Alias for `--write`, writes safe fixes (deprecated, use `--write`) - #[bpaf(long("apply"), switch, hide_usage)] - apply: bool, - - /// Alias for `--write --unsafe`, writes safe and unsafe fixes (deprecated, use `--write --unsafe`) - #[bpaf(long("apply-unsafe"), switch, hide_usage)] - apply_unsafe: bool, - /// Fixes lint rule violations with a comment a suppression instead of using a rule code action (fix) #[bpaf(long("suppress"))] suppress: bool, @@ -211,26 +193,26 @@ pub enum BiomeCommand { #[bpaf(long("reason"), argument("STRING"))] suppression_reason: Option, - #[bpaf(external(partial_linter_configuration), hide_usage, optional)] - linter_configuration: Option, + #[bpaf(external(linter_configuration), hide_usage, optional)] + linter_configuration: Option, - #[bpaf(external(partial_vcs_configuration), optional, hide_usage)] - vcs_configuration: Option, + #[bpaf(external(vcs_configuration), optional, hide_usage)] + vcs_configuration: Option, - #[bpaf(external(partial_files_configuration), optional, hide_usage)] - files_configuration: Option, + #[bpaf(external(files_configuration), optional, hide_usage)] + files_configuration: Option, - #[bpaf(external(partial_javascript_linter), optional, hide_usage)] - javascript_linter: Option, + #[bpaf(external(js_linter_configuration), optional, hide_usage)] + javascript_linter: Option, - #[bpaf(external(partial_json_linter), optional, hide_usage)] - json_linter: Option, + #[bpaf(external(json_linter_configuration), optional, hide_usage)] + json_linter: Option, - #[bpaf(external(partial_css_linter), optional, hide_usage, hide)] - css_linter: Option, + #[bpaf(external(css_linter_configuration), optional, hide_usage, hide)] + css_linter: Option, - #[bpaf(external(partial_graphql_linter), optional, hide_usage, hide)] - graphql_linter: Option, + #[bpaf(external(graphql_linter_configuration), optional, hide_usage, hide)] + graphql_linter: Option, #[bpaf(external, hide_usage)] cli_options: CliOptions, @@ -276,26 +258,26 @@ pub enum BiomeCommand { /// Run the formatter on a set of files. #[bpaf(command)] Format { - #[bpaf(external(partial_formatter_configuration), optional, hide_usage)] - formatter_configuration: Option, + #[bpaf(external(formatter_configuration), optional, hide_usage)] + formatter_configuration: Option, - #[bpaf(external(partial_javascript_formatter), optional, hide_usage)] - javascript_formatter: Option, + #[bpaf(external(js_formatter_configuration), optional, hide_usage)] + javascript_formatter: Option, - #[bpaf(external(partial_json_formatter), optional, hide_usage)] - json_formatter: Option, + #[bpaf(external(json_formatter_configuration), optional, hide_usage)] + json_formatter: Option, - #[bpaf(external(partial_css_formatter), optional, hide_usage, hide)] - css_formatter: Option, + #[bpaf(external(css_formatter_configuration), optional, hide_usage, hide)] + css_formatter: Option, - #[bpaf(external(partial_graphql_formatter), optional, hide_usage, hide)] - graphql_formatter: Option, + #[bpaf(external(graphql_formatter_configuration), optional, hide_usage, hide)] + graphql_formatter: Option, - #[bpaf(external(partial_vcs_configuration), optional, hide_usage)] - vcs_configuration: Option, + #[bpaf(external(vcs_configuration), optional, hide_usage)] + vcs_configuration: Option, - #[bpaf(external(partial_files_configuration), optional, hide_usage)] - files_configuration: Option, + #[bpaf(external(files_configuration), optional, hide_usage)] + files_configuration: Option, /// Use this option when you want to format code piped from `stdin`, and print the output to `stdout`. /// /// The file doesn't need to exist on disk, what matters is the extension of the file. Based on the extension, Biome knows how to format the code. @@ -341,20 +323,17 @@ pub enum BiomeCommand { Ci { /// Allow to enable or disable the formatter check. #[bpaf(long("formatter-enabled"), argument("true|false"), optional)] - formatter_enabled: Option, + formatter_enabled: Option, /// Allow to enable or disable the linter check. #[bpaf(long("linter-enabled"), argument("true|false"), optional)] - linter_enabled: Option, - /// Allow to enable or disable the organize imports. - #[bpaf(long("organize-imports-enabled"), argument("true|false"), optional)] - organize_imports_enabled: Option, + linter_enabled: Option, - /// Allow to enable or disable the assists. - #[bpaf(long("assists-enabled"), argument("true|false"), optional)] - assists_enabled: Option, + /// Allow to enable or disable the assist. + #[bpaf(long("assist-enabled"), argument("true|false"), optional)] + assist_enabled: Option, - #[bpaf(external(partial_configuration), hide_usage, optional)] - configuration: Option, + #[bpaf(external(configuration), hide_usage, optional)] + configuration: Option, #[bpaf(external, hide_usage)] cli_options: CliOptions, @@ -401,11 +380,11 @@ pub enum BiomeCommand { hide_usage, fallback(biome_fs::ensure_cache_dir().join("biome-logs")), )] - log_path: PathBuf, + log_path: Utf8PathBuf, /// Allows to set a custom file path to the configuration file, /// or a custom directory path to find `biome.json` or `biome.jsonc` #[bpaf(env("BIOME_CONFIG_PATH"), long("config-path"), argument("PATH"))] - config_path: Option, + config_path: Option, /// Bogus argument to make the command work with vscode-languageclient #[bpaf(long("stdio"), hide, hide_usage, switch)] stdio: bool, @@ -444,11 +423,11 @@ pub enum BiomeCommand { #[bpaf(external, hide_usage)] cli_options: CliOptions, - #[bpaf(external(partial_files_configuration), optional, hide_usage)] - files_configuration: Option, + #[bpaf(external(files_configuration), optional, hide_usage)] + files_configuration: Option, - #[bpaf(external(partial_vcs_configuration), optional, hide_usage)] - vcs_configuration: Option, + #[bpaf(external(vcs_configuration), optional, hide_usage)] + vcs_configuration: Option, /// Use this option when you want to search through code piped from /// `stdin`, and print the output to `stdout`. @@ -461,6 +440,16 @@ pub enum BiomeCommand { #[bpaf(long("stdin-file-path"), argument("PATH"), hide_usage)] stdin_file_path: Option, + /// The language to which the pattern applies. + /// + /// Grit queries are specific to the grammar of the language they + /// target, so we currently do not support writing queries that apply + /// to multiple languages at once. + /// + /// If none given, the default language is JavaScript. + #[bpaf(long("language"), short('l'))] + language: Option, + /// The GritQL pattern to search for. /// /// Note that the search command (currently) does not support rewrites. @@ -512,16 +501,18 @@ pub enum BiomeCommand { long("log-path"), argument("PATH"), hide_usage, - fallback(biome_fs::ensure_cache_dir().join("biome-logs")), + fallback( + biome_fs::ensure_cache_dir().join("biome-logs") + ), )] - log_path: PathBuf, + log_path: Utf8PathBuf, #[bpaf(long("stop-on-disconnect"), hide_usage)] stop_on_disconnect: bool, /// Allows to set a custom file path to the configuration file, /// or a custom directory path to find `biome.json` or `biome.jsonc` #[bpaf(env("BIOME_CONFIG_PATH"), long("config-path"), argument("PATH"))] - config_path: Option, + config_path: Option, }, #[bpaf(command("__print_socket"), hide)] PrintSocket, @@ -598,13 +589,9 @@ impl BiomeCommand { } } - pub const fn has_metrics(&self) -> bool { - false - } - pub fn is_verbose(&self) -> bool { self.cli_options() - .map_or(false, |cli_options| cli_options.verbose) + .is_some_and(|cli_options| cli_options.verbose) } pub fn log_level(&self) -> LoggingLevel { @@ -618,7 +605,7 @@ impl BiomeCommand { } } -/// It accepts a [LoadedPartialConfiguration] and it prints the diagnostics emitted during parsing and deserialization. +/// It accepts a [LoadedConfiguration] and it prints the diagnostics emitted during parsing and deserialization. /// /// If it contains [errors](Severity::Error) or higher, it returns an error. pub(crate) fn validate_configuration_diagnostics( @@ -626,21 +613,6 @@ pub(crate) fn validate_configuration_diagnostics( console: &mut dyn Console, verbose: bool, ) -> Result<(), CliDiagnostic> { - if let Some(file_path) = loaded_configuration - .file_path - .as_ref() - .and_then(|f| f.file_name()) - .and_then(|f| f.to_str()) - { - if file_path == "rome.json" { - let diagnostic = DeprecatedConfigurationFile::new(file_path); - if diagnostic.tags().is_verbose() && verbose { - console.error(markup! {{PrintDiagnostic::verbose(&diagnostic)}}) - } else { - console.error(markup! {{PrintDiagnostic::simple(&diagnostic)}}) - } - } - } let diagnostics = loaded_configuration.as_diagnostics_iter(); for diagnostic in diagnostics { if diagnostic.tags().is_verbose() && verbose { @@ -662,28 +634,12 @@ pub(crate) fn validate_configuration_diagnostics( Ok(()) } -fn resolve_manifest( - fs: &DynRef<'_, dyn FileSystem>, -) -> Result, WorkspaceError> { - let result = fs.auto_search( - &fs.working_directory().unwrap_or_default(), - &["package.json"], - false, - )?; - - if let Some(result) = result { - return Ok(Some((BiomePath::new(result.file_path), result.content))); - } - - Ok(None) -} - fn get_files_to_process_with_cli_options( since: Option<&str>, changed: bool, staged: bool, - fs: &DynRef<'_, dyn FileSystem>, - configuration: &PartialConfiguration, + fs: &dyn FileSystem, + configuration: &Configuration, ) -> Result>, CliDiagnostic> { if since.is_some() { if !changed { @@ -708,8 +664,6 @@ fn get_files_to_process_with_cli_options( /// Holds the options to determine the fix file mode. pub(crate) struct FixFileModeOptions { - apply: bool, - apply_unsafe: bool, write: bool, suppress: bool, suppression_reason: Option, @@ -722,11 +676,8 @@ pub(crate) struct FixFileModeOptions { /// - [FixFileMode]: if safe or unsafe fixes are requested pub(crate) fn determine_fix_file_mode( options: FixFileModeOptions, - console: &mut dyn Console, ) -> Result, CliDiagnostic> { let FixFileModeOptions { - apply, - apply_unsafe, write, fix, suppress, @@ -734,22 +685,10 @@ pub(crate) fn determine_fix_file_mode( unsafe_, } = options; - if apply || apply_unsafe { - let (deprecated, alternative) = if apply { - ("--apply", "--write") - } else { - ("--apply-unsafe", "--write --unsafe") - }; - let diagnostic = DeprecatedArgument::new(markup! { - "The argument "{deprecated}" is deprecated, it will be removed in the next major release. Use "{alternative}" instead." - }); - console.error(markup! {{PrintDiagnostic::simple(&diagnostic)}}); - } - check_fix_incompatible_arguments(options)?; - let safe_fixes = apply || write || fix; - let unsafe_fixes = apply_unsafe || ((write || safe_fixes) && unsafe_); + let safe_fixes = write || fix; + let unsafe_fixes = (write || safe_fixes) && unsafe_; if unsafe_fixes { Ok(Some(FixFileMode::SafeAndUnsafeFixes)) @@ -765,35 +704,13 @@ pub(crate) fn determine_fix_file_mode( /// Checks if the fix file options are incompatible. fn check_fix_incompatible_arguments(options: FixFileModeOptions) -> Result<(), CliDiagnostic> { let FixFileModeOptions { - apply, - apply_unsafe, write, suppress, suppression_reason, fix, - unsafe_, + .. } = options; - if apply && apply_unsafe { - return Err(CliDiagnostic::incompatible_arguments( - "--apply", - "--apply-unsafe", - )); - } else if apply_unsafe && unsafe_ { - return Err(CliDiagnostic::incompatible_arguments( - "--apply-unsafe", - "--unsafe", - )); - } else if apply && (fix || write) { - return Err(CliDiagnostic::incompatible_arguments( - "--apply", - if fix { "--fix" } else { "--write" }, - )); - } else if apply_unsafe && (fix || write) { - return Err(CliDiagnostic::incompatible_arguments( - "--apply-unsafe", - if fix { "--fix" } else { "--write" }, - )); - } else if write && fix { + if write && fix { return Err(CliDiagnostic::incompatible_arguments("--write", "--fix")); } else if suppress && write { return Err(CliDiagnostic::incompatible_arguments( @@ -829,9 +746,9 @@ pub(crate) trait CommandRunner: Sized { /// The main command to use. fn run(&mut self, session: CliSession, cli_options: &CliOptions) -> Result<(), CliDiagnostic> { setup_cli_subscriber(cli_options.log_level, cli_options.log_kind); - let fs = &session.app.fs; let console = &mut *session.app.console; let workspace = &*session.app.workspace; + let fs = workspace.fs(); self.check_incompatible_arguments()?; let (execution, paths) = self.configure_workspace(fs, console, workspace, cli_options)?; execute_mode(execution, session, cli_options, paths) @@ -842,11 +759,10 @@ pub(crate) trait CommandRunner: Sized { /// - Configure the VCS integration /// - Computes the paths to traverse/handle. This changes based on the VCS arguments that were passed. /// - Register a project folder using the working directory. - /// - Resolves the closets manifest AKA `package.json` and registers it. /// - Updates the settings that belong to the project registered fn configure_workspace( &mut self, - fs: &DynRef<'_, dyn FileSystem>, + fs: &dyn FileSystem, console: &mut dyn Console, workspace: &dyn Workspace, cli_options: &CliOptions, @@ -860,30 +776,59 @@ pub(crate) trait CommandRunner: Sized { cli_options.verbose, )?; } + if loaded_configuration.double_configuration_found { + console.log(markup! { + "Both biome.json and biome.jsonc files were found in the same folder. Biome will use the biome.json file." + }) + } + info!( + "Configuration file loaded: {:?}, diagnostics detected {}", + loaded_configuration.file_path, + loaded_configuration.diagnostics.len(), + ); let configuration_path = loaded_configuration.directory_path.clone(); let configuration = self.merge_configuration(loaded_configuration, fs, console)?; - let vcs_base_path = configuration_path.or(fs.working_directory()); + let vcs_base_path = configuration_path.clone().or(fs.working_directory()); let (vcs_base_path, gitignore_matches) = configuration.retrieve_gitignore_matches(fs, vcs_base_path.as_deref())?; let paths = self.get_files_to_process(fs, &configuration)?; - workspace.register_project_folder(RegisterProjectFolderParams { - path: fs.working_directory(), - set_as_current_workspace: true, + let project_path = fs + .working_directory() + .map(BiomePath::from) + .unwrap_or_default(); + let project_key = workspace.open_project(OpenProjectParams { + path: project_path.clone(), + open_uninitialized: true, })?; - let manifest_data = resolve_manifest(fs)?; - - if let Some(manifest_data) = manifest_data { - workspace.set_manifest_for_project(manifest_data.into())?; - } workspace.update_settings(UpdateSettingsParams { - workspace_directory: fs.working_directory(), + project_key, + workspace_directory: configuration_path.map(BiomePath::from), configuration, - vcs_base_path, + vcs_base_path: vcs_base_path.map(BiomePath::from), gitignore_matches, })?; - let execution = self.get_execution(cli_options, console, workspace)?; + let execution = self.get_execution(cli_options, console, workspace, project_key)?; + + if execution.traversal_mode().should_scan_project() { + let result = workspace.scan_project_folder(ScanProjectFolderParams { + project_key, + path: Some(project_path), + })?; + for diagnostic in result.diagnostics { + if diagnostic.severity() == Severity::Fatal { + console.log(markup! {{PrintDiagnostic::simple(&diagnostic)}}); + } + } + if cli_options.verbose && matches!(execution.report_mode(), ReportMode::Terminal { .. }) + { + console.log(markup! { + "Scanned project folder in "{result.duration}"." + }); + } + } + Ok((execution, paths)) } @@ -895,7 +840,7 @@ pub(crate) trait CommandRunner: Sized { let stdin = if let Some(stdin_file_path) = self.get_stdin_file_path() { let input_code = console.read(); if let Some(input_code) = input_code { - let path = PathBuf::from(stdin_file_path); + let path = Utf8PathBuf::from(stdin_file_path); Some((path, input_code).into()) } else { // we provided the argument without a piped stdin, we bail @@ -916,15 +861,15 @@ pub(crate) trait CommandRunner: Sized { fn merge_configuration( &mut self, loaded_configuration: LoadedConfiguration, - fs: &DynRef<'_, dyn FileSystem>, + fs: &dyn FileSystem, console: &mut dyn Console, - ) -> Result; + ) -> Result; /// It returns the paths that need to be handled/traversed. fn get_files_to_process( &self, - fs: &DynRef<'_, dyn FileSystem>, - configuration: &PartialConfiguration, + fs: &dyn FileSystem, + configuration: &Configuration, ) -> Result, CliDiagnostic>; /// It returns the file path to use in `stdin` mode. @@ -939,6 +884,7 @@ pub(crate) trait CommandRunner: Sized { cli_options: &CliOptions, console: &mut dyn Console, workspace: &dyn Workspace, + project_key: ProjectKey, ) -> Result; // Below, methods that consumers can implement @@ -958,16 +904,16 @@ pub(crate) trait CommandRunner: Sized { pub trait LoadEditorConfig: CommandRunner { /// Whether this command should load the `.editorconfig` file. - fn should_load_editor_config(&self, fs_configuration: &PartialConfiguration) -> bool; + fn should_load_editor_config(&self, fs_configuration: &Configuration) -> bool; - /// It loads the `.editorconfig` from the file system, parses it and deserialize it into a [PartialConfiguration] + /// It loads the `.editorconfig` from the file system, parses it and deserialize it into a [Configuration] fn load_editor_config( &self, - configuration_path: Option, - fs_configuration: &PartialConfiguration, - fs: &DynRef<'_, dyn FileSystem>, + configuration_path: Option, + fs_configuration: &Configuration, + fs: &dyn FileSystem, console: &mut dyn Console, - ) -> Result { + ) -> Result { Ok(if self.should_load_editor_config(fs_configuration) { let (editorconfig, editorconfig_diagnostics) = { let search_path = configuration_path @@ -989,56 +935,34 @@ pub trait LoadEditorConfig: CommandRunner { #[cfg(test)] mod tests { - use biome_console::BufferConsole; - use super::*; #[test] fn incompatible_arguments() { - for (apply, apply_unsafe, write, suppress, suppression_reason, fix, unsafe_) in [ - (true, true, false, false, None, false, false), // --apply --apply-unsafe - (true, false, true, false, None, false, false), // --apply --write - (true, false, false, false, None, true, false), // --apply --fix - (false, true, false, false, None, false, true), // --apply-unsafe --unsafe - (false, true, true, false, None, false, false), // --apply-unsafe --write - (false, true, false, false, None, true, false), // --apply-unsafe --fix - (false, false, true, false, None, true, false), // --write --fix - ] { - assert!(check_fix_incompatible_arguments(FixFileModeOptions { - apply, - apply_unsafe, - write, - suppress, - suppression_reason, - fix, - unsafe_ - }) - .is_err()); - } + assert!(check_fix_incompatible_arguments(FixFileModeOptions { + write: true, + fix: true, + unsafe_: false, + suppress: false, + suppression_reason: None + }) + .is_err()); } #[test] fn safe_fixes() { - let mut console = BufferConsole::default(); - - for (apply, apply_unsafe, write, suppress, suppression_reason, fix, unsafe_) in [ - (true, false, false, false, None, false, false), // --apply - (false, false, true, false, None, false, false), // --write - (false, false, false, false, None, true, false), // --fix + for (write, suppress, suppression_reason, fix, unsafe_) in [ + (true, false, None, false, false), // --write + (false, false, None, true, false), // --fix ] { assert_eq!( - determine_fix_file_mode( - FixFileModeOptions { - apply, - apply_unsafe, - write, - suppress, - suppression_reason, - fix, - unsafe_ - }, - &mut console - ) + determine_fix_file_mode(FixFileModeOptions { + write, + suppress, + suppression_reason, + fix, + unsafe_ + },) .unwrap(), Some(FixFileMode::SafeFixes) ); @@ -1047,26 +971,18 @@ mod tests { #[test] fn safe_and_unsafe_fixes() { - let mut console = BufferConsole::default(); - - for (apply, apply_unsafe, write, suppress, suppression_reason, fix, unsafe_) in [ - (false, true, false, false, None, false, false), // --apply-unsafe - (false, false, true, false, None, false, true), // --write --unsafe - (false, false, false, false, None, true, true), // --fix --unsafe + for (write, fix, unsafe_, suppress, suppression_reason) in [ + (true, false, true, false, None), // --write --unsafe + (false, true, true, false, None), // --fix --unsafe ] { assert_eq!( - determine_fix_file_mode( - FixFileModeOptions { - apply, - apply_unsafe, - write, - suppress, - suppression_reason, - fix, - unsafe_ - }, - &mut console - ) + determine_fix_file_mode(FixFileModeOptions { + write, + suppress, + suppression_reason, + fix, + unsafe_ + },) .unwrap(), Some(FixFileMode::SafeAndUnsafeFixes) ); @@ -1075,23 +991,16 @@ mod tests { #[test] fn no_fix() { - let mut console = BufferConsole::default(); - - let (apply, apply_unsafe, write, suppress, suppression_reason, fix, unsafe_) = - (false, false, false, false, None, false, false); + let (write, suppress, suppression_reason, fix, unsafe_) = + (false, false, None, false, false); assert_eq!( - determine_fix_file_mode( - FixFileModeOptions { - apply, - apply_unsafe, - write, - suppress, - suppression_reason, - fix, - unsafe_ - }, - &mut console - ) + determine_fix_file_mode(FixFileModeOptions { + write, + suppress, + suppression_reason, + fix, + unsafe_ + },) .unwrap(), None ); diff --git a/crates/biome_cli/src/commands/rage.rs b/crates/biome_cli/src/commands/rage.rs index 4d3f42c9dd5f..33b477a0dc36 100644 --- a/crates/biome_cli/src/commands/rage.rs +++ b/crates/biome_cli/src/commands/rage.rs @@ -1,24 +1,25 @@ +use crate::commands::daemon::read_most_recent_log_file; +use crate::service::enumerate_pipes; +use crate::{service, CliDiagnostic, CliSession, VERSION}; use biome_configuration::{ConfigurationPathHint, Rules}; use biome_console::fmt::{Display, Formatter}; use biome_console::{ - fmt, markup, ConsoleExt, DebugDisplay, DebugDisplayOption, HorizontalLine, KeyValuePair, - Padding, SOFT_LINE, + fmt, markup, ConsoleExt, DebugDisplay, DisplayOption, HorizontalLine, KeyValuePair, Padding, + SOFT_LINE, }; use biome_diagnostics::termcolor::{ColorChoice, WriteColor}; use biome_diagnostics::{termcolor, PrintDescription}; use biome_flags::biome_env; -use biome_fs::FileSystem; +use biome_fs::{FileSystem, OsFileSystem}; use biome_service::configuration::{load_configuration, LoadedConfiguration}; +use biome_service::settings::Settings; use biome_service::workspace::{client, RageEntry, RageParams}; -use biome_service::{DynRef, Workspace}; -use std::path::PathBuf; +use biome_service::Workspace; +use camino::Utf8PathBuf; use std::{env, io, ops::Deref}; +use terminal_size::terminal_size; use tokio::runtime::Runtime; -use crate::commands::daemon::read_most_recent_log_file; -use crate::service::enumerate_pipes; -use crate::{service, CliDiagnostic, CliSession, VERSION}; - /// Handler for the `rage` command pub(crate) fn rage( session: CliSession, @@ -47,7 +48,7 @@ pub(crate) fn rage( {EnvVarOs("JS_RUNTIME_NAME")} {EnvVarOs("NODE_PACKAGE_MANAGER")} - {RageConfiguration { fs: &session.app.fs, formatter, linter }} + {RageConfiguration { fs: session.app.workspace.fs(), formatter, linter }} {WorkspaceRage(session.app.workspace.deref())} )); @@ -63,7 +64,7 @@ pub(crate) fn rage( .app .console .log(markup!("Discovering running Biome servers...")); - session.app.console.log(markup!({ RunningRomeServer })); + session.app.console.log(markup!({ RunningBiomeServer })); } } } @@ -105,9 +106,9 @@ impl Display for WorkspaceRage<'_> { } /// Prints information about other running biome server instances. -struct RunningRomeServer; +struct RunningBiomeServer; -impl Display for RunningRomeServer { +impl Display for RunningBiomeServer { fn fmt(&self, f: &mut Formatter) -> io::Result<()> { let versions = match enumerate_pipes() { Ok(iter) => iter, @@ -117,7 +118,7 @@ impl Display for RunningRomeServer { } }; - for version in versions { + for (version, path) in versions { if version == biome_configuration::VERSION { let runtime = Runtime::new()?; match service::open_transport(runtime) { @@ -130,13 +131,23 @@ impl Display for RunningRomeServer { continue; } Ok(Some(transport)) => { - markup!("\n""Running Biome Server:"" "{HorizontalLine::new(78)}" + let header = "Running Biome Server: "; + let width = { + if cfg!(debug_assertions) { + 78 + } else { + terminal_size().map_or(78, |(width, _)| width.0 as usize) + } + }; + let width = width.saturating_sub(header.len()); + + markup!("\n"{header}{HorizontalLine::new(width)}" ""\u{2139} The client isn't connected to any server but rage discovered this running Biome server."" ") .fmt(f)?; - match client(transport) { + match client(transport, Box::new(OsFileSystem::default())) { Ok(client) => WorkspaceRage(client.deref()).fmt(f)?, Err(err) => { markup!("\u{2716} Failed to connect: ").fmt(f)?; @@ -152,7 +163,16 @@ impl Display for RunningRomeServer { BiomeServerLog.fmt(f)?; } else { - markup!("\n""Incompatible Biome Server:"" "{HorizontalLine::new(78)}" + let header = "Incompatible Biome Server: "; + let width = { + if cfg!(debug_assertions) { + 78 + } else { + terminal_size().map_or(78, |(width, _)| width.0 as usize) + } + }; + let width = width.saturating_sub(header.len()); + markup!("\n"{header}{HorizontalLine::new(width)}" ""\u{2139} Rage discovered this running server using an incompatible version of Biome."" ") @@ -161,6 +181,7 @@ impl Display for RunningRomeServer { markup!( {Section("Server")} {KeyValuePair("Version", markup!({version.as_str()}))} + {KeyValuePair("Path", markup!({path.as_str()}))} ) .fmt(f)?; } @@ -170,26 +191,32 @@ impl Display for RunningRomeServer { } } -struct RageConfiguration<'a, 'app> { - fs: &'a DynRef<'app, dyn FileSystem>, +struct RageConfiguration<'a> { + fs: &'a dyn FileSystem, formatter: bool, linter: bool, } -impl Display for RageConfiguration<'_, '_> { +impl Display for RageConfiguration<'_> { fn fmt(&self, fmt: &mut Formatter) -> io::Result<()> { Section("Biome Configuration").fmt(fmt)?; match load_configuration(self.fs, ConfigurationPathHint::default()) { Ok(loaded_configuration) => { if loaded_configuration.directory_path.is_none() { - KeyValuePair("Status", markup!("unset")).fmt(fmt)?; + KeyValuePair("Status", markup!("Not set")).fmt(fmt)?; } else { let LoadedConfiguration { configuration, diagnostics, .. } = loaded_configuration; + let vcs_enabled = configuration.is_vcs_enabled(); + let mut settings = Settings::default(); + settings + .merge_with_configuration(configuration.clone(), None, None, &[]) + .unwrap(); + let status = if !diagnostics.is_empty() { for diagnostic in diagnostics { (markup! { @@ -206,46 +233,65 @@ impl Display for RageConfiguration<'_, '_> { markup! ( {KeyValuePair("Status", status)} - {KeyValuePair("Formatter disabled", markup!({DebugDisplay(configuration.is_formatter_disabled())}))} - {KeyValuePair("Linter disabled", markup!({DebugDisplay(configuration.is_linter_disabled())}))} - {KeyValuePair("Organize imports disabled", markup!({DebugDisplay(configuration.is_organize_imports_disabled())}))} - {KeyValuePair("VCS disabled", markup!({DebugDisplay(configuration.is_vcs_disabled())}))} + {KeyValuePair("Formatter enabled", markup!({DebugDisplay(settings.is_formatter_enabled())}))} + {KeyValuePair("Linter enabled", markup!({DebugDisplay(settings.is_linter_enabled())}))} + {KeyValuePair("Assist enabled", markup!({DebugDisplay(settings.is_assist_enabled())}))} + {KeyValuePair("VCS enabled", markup!({DebugDisplay(vcs_enabled)}))} ).fmt(fmt)?; // Print formatter configuration if --formatter option is true if self.formatter { let formatter_configuration = configuration.get_formatter_configuration(); + let ignore = formatter_configuration.ignore.map(|list| { + list.iter() + .map(|s| s.to_string()) + .collect::>() + .join(", ") + }); + let include = formatter_configuration.include.map(|list| { + list.iter() + .map(|s| s.to_string()) + .collect::>() + .join(", ") + }); + let includes = formatter_configuration.includes.map(|list| { + list.iter() + .map(|s| s.to_string()) + .collect::>() + .join(", ") + }); markup! ( {Section("Formatter")} - {KeyValuePair("Format with errors", markup!({DebugDisplay(configuration.get_formatter_configuration().format_with_errors)}))} - {KeyValuePair("Indent style", markup!({DebugDisplay(formatter_configuration.indent_style)}))} - {KeyValuePair("Indent width", markup!({DebugDisplay(formatter_configuration.indent_width)}))} - {KeyValuePair("Line ending", markup!({DebugDisplay(formatter_configuration.line_ending)}))} - {KeyValuePair("Line width", markup!({DebugDisplay(formatter_configuration.line_width.value())}))} - {KeyValuePair("Attribute position", markup!({DebugDisplay(formatter_configuration.attribute_position)}))} - {KeyValuePair("Bracket spacing", markup!({DebugDisplay(formatter_configuration.bracket_spacing)}))} - {KeyValuePair("Ignore", markup!({DebugDisplay(formatter_configuration.ignore.iter().collect::>())}))} - {KeyValuePair("Include", markup!({DebugDisplay(formatter_configuration.include.iter().collect::>())}))} + {KeyValuePair("Format with errors", markup!({DisplayOption(configuration.get_formatter_configuration().format_with_errors)}))} + {KeyValuePair("Indent style", markup!({DisplayOption(formatter_configuration.indent_style)}))} + {KeyValuePair("Indent width", markup!({DisplayOption(formatter_configuration.indent_width)}))} + {KeyValuePair("Line ending", markup!({DisplayOption(formatter_configuration.line_ending)}))} + {KeyValuePair("Line width", markup!({DisplayOption(formatter_configuration.line_width)}))} + {KeyValuePair("Attribute position", markup!({DisplayOption(formatter_configuration.attribute_position)}))} + {KeyValuePair("Bracket spacing", markup!({DisplayOption(formatter_configuration.bracket_spacing)}))} + {KeyValuePair("Ignore", markup!({DisplayOption(ignore)}))} + {KeyValuePair("Include", markup!({DisplayOption(include)}))} + {KeyValuePair("Includes", markup!({DisplayOption(includes)}))} ).fmt(fmt)?; let javascript_formatter_configuration = configuration.get_javascript_formatter_configuration(); markup! ( {Section("JavaScript Formatter")} - {KeyValuePair("Enabled", markup!({DebugDisplay(javascript_formatter_configuration.enabled)}))} - {KeyValuePair("JSX quote style", markup!({DebugDisplay(javascript_formatter_configuration.jsx_quote_style)}))} - {KeyValuePair("Quote properties", markup!({DebugDisplay(javascript_formatter_configuration.quote_properties)}))} - {KeyValuePair("Trailing commas", markup!({DebugDisplay(javascript_formatter_configuration.trailing_commas)}))} - {KeyValuePair("Semicolons", markup!({DebugDisplay(javascript_formatter_configuration.semicolons)}))} - {KeyValuePair("Arrow parentheses", markup!({DebugDisplay(javascript_formatter_configuration.arrow_parentheses)}))} - {KeyValuePair("Bracket spacing", markup!({DebugDisplayOption(javascript_formatter_configuration.bracket_spacing)}))} - {KeyValuePair("Bracket same line", markup!({DebugDisplay(javascript_formatter_configuration.bracket_same_line)}))} - {KeyValuePair("Quote style", markup!({DebugDisplay(javascript_formatter_configuration.quote_style)}))} - {KeyValuePair("Indent style", markup!({DebugDisplayOption(javascript_formatter_configuration.indent_style)}))} - {KeyValuePair("Indent width", markup!({DebugDisplayOption(javascript_formatter_configuration.indent_width)}))} - {KeyValuePair("Line ending", markup!({DebugDisplayOption(javascript_formatter_configuration.line_ending)}))} - {KeyValuePair("Line width", markup!({DebugDisplayOption(javascript_formatter_configuration.line_width.map(|lw| lw.value()))}))} - {KeyValuePair("Attribute position", markup!({DebugDisplayOption(javascript_formatter_configuration.attribute_position)}))} + {KeyValuePair("Enabled", markup!({DisplayOption(javascript_formatter_configuration.enabled)}))} + {KeyValuePair("JSX quote style", markup!({DisplayOption(javascript_formatter_configuration.jsx_quote_style)}))} + {KeyValuePair("Quote properties", markup!({DisplayOption(javascript_formatter_configuration.quote_properties)}))} + {KeyValuePair("Trailing commas", markup!({DisplayOption(javascript_formatter_configuration.trailing_commas)}))} + {KeyValuePair("Semicolons", markup!({DisplayOption(javascript_formatter_configuration.semicolons)}))} + {KeyValuePair("Arrow parentheses", markup!({DisplayOption(javascript_formatter_configuration.arrow_parentheses)}))} + {KeyValuePair("Bracket spacing", markup!({DisplayOption(javascript_formatter_configuration.bracket_spacing)}))} + {KeyValuePair("Bracket same line", markup!({DisplayOption(javascript_formatter_configuration.bracket_same_line)}))} + {KeyValuePair("Quote style", markup!({DisplayOption(javascript_formatter_configuration.quote_style)}))} + {KeyValuePair("Indent style", markup!({DisplayOption(javascript_formatter_configuration.indent_style)}))} + {KeyValuePair("Indent width", markup!({DisplayOption(javascript_formatter_configuration.indent_width)}))} + {KeyValuePair("Line ending", markup!({DisplayOption(javascript_formatter_configuration.line_ending)}))} + {KeyValuePair("Line width", markup!({DisplayOption(javascript_formatter_configuration.line_width.map(|lw| lw.value()))}))} + {KeyValuePair("Attribute position", markup!({DisplayOption(javascript_formatter_configuration.attribute_position)}))} ) .fmt(fmt)?; @@ -253,37 +299,38 @@ impl Display for RageConfiguration<'_, '_> { configuration.get_json_formatter_configuration(); markup! ( {Section("JSON Formatter")} - {KeyValuePair("Enabled", markup!({DebugDisplay(json_formatter_configuration.enabled)}))} - {KeyValuePair("Indent style", markup!({DebugDisplayOption(json_formatter_configuration.indent_style)}))} - {KeyValuePair("Indent width", markup!({DebugDisplayOption(json_formatter_configuration.indent_width)}))} - {KeyValuePair("Line ending", markup!({DebugDisplayOption(json_formatter_configuration.line_ending)}))} - {KeyValuePair("Line width", markup!({DebugDisplayOption(json_formatter_configuration.line_width.map(|lw| lw.value()))}))} - {KeyValuePair("Trailing Commas", markup!({DebugDisplayOption(json_formatter_configuration.trailing_commas)}))} + {KeyValuePair("Enabled", markup!({DisplayOption(json_formatter_configuration.enabled)}))} + {KeyValuePair("Indent style", markup!({DisplayOption(json_formatter_configuration.indent_style)}))} + {KeyValuePair("Indent width", markup!({DisplayOption(json_formatter_configuration.indent_width)}))} + {KeyValuePair("Line ending", markup!({DisplayOption(json_formatter_configuration.line_ending)}))} + {KeyValuePair("Line width", markup!({DisplayOption(json_formatter_configuration.line_width.map(|lw| lw.value()))}))} + {KeyValuePair("Trailing Commas", markup!({DisplayOption(json_formatter_configuration.trailing_commas)}))} + {KeyValuePair("Expand lists", markup!({DisplayOption(json_formatter_configuration.expand)}))} ).fmt(fmt)?; let css_formatter_configuration = configuration.get_css_formatter_configuration(); markup! ( {Section("CSS Formatter")} - {KeyValuePair("Enabled", markup!({DebugDisplay(css_formatter_configuration.enabled)}))} - {KeyValuePair("Indent style", markup!({DebugDisplayOption(css_formatter_configuration.indent_style)}))} - {KeyValuePair("Indent width", markup!({DebugDisplayOption(css_formatter_configuration.indent_width)}))} - {KeyValuePair("Line ending", markup!({DebugDisplayOption(css_formatter_configuration.line_ending)}))} - {KeyValuePair("Line width", markup!({DebugDisplayOption(css_formatter_configuration.line_width)}))} - {KeyValuePair("Quote style", markup!({DebugDisplay(css_formatter_configuration.quote_style)}))} + {KeyValuePair("Enabled", markup!({DisplayOption(css_formatter_configuration.enabled)}))} + {KeyValuePair("Indent style", markup!({DisplayOption(css_formatter_configuration.indent_style)}))} + {KeyValuePair("Indent width", markup!({DisplayOption(css_formatter_configuration.indent_width)}))} + {KeyValuePair("Line ending", markup!({DisplayOption(css_formatter_configuration.line_ending)}))} + {KeyValuePair("Line width", markup!({DisplayOption(css_formatter_configuration.line_width)}))} + {KeyValuePair("Quote style", markup!({DisplayOption(css_formatter_configuration.quote_style)}))} ).fmt(fmt)?; let graphql_formatter_configuration = configuration.get_graphql_formatter_configuration(); markup! ( {Section("GraphQL Formatter")} - {KeyValuePair("Enabled", markup!({DebugDisplayOption(graphql_formatter_configuration.enabled)}))} - {KeyValuePair("Indent style", markup!({DebugDisplayOption(graphql_formatter_configuration.indent_style)}))} - {KeyValuePair("Indent width", markup!({DebugDisplayOption(graphql_formatter_configuration.indent_width)}))} - {KeyValuePair("Line ending", markup!({DebugDisplayOption(graphql_formatter_configuration.line_ending)}))} - {KeyValuePair("Line width", markup!({DebugDisplayOption(graphql_formatter_configuration.line_width)}))} - {KeyValuePair("Bracket spacing", markup!({DebugDisplayOption(graphql_formatter_configuration.bracket_spacing)}))} - {KeyValuePair("Quote style", markup!({DebugDisplayOption(graphql_formatter_configuration.quote_style)}))} + {KeyValuePair("Enabled", markup!({DisplayOption(graphql_formatter_configuration.enabled)}))} + {KeyValuePair("Indent style", markup!({DisplayOption(graphql_formatter_configuration.indent_style)}))} + {KeyValuePair("Indent width", markup!({DisplayOption(graphql_formatter_configuration.indent_width)}))} + {KeyValuePair("Line ending", markup!({DisplayOption(graphql_formatter_configuration.line_ending)}))} + {KeyValuePair("Line width", markup!({DisplayOption(graphql_formatter_configuration.line_width)}))} + {KeyValuePair("Bracket spacing", markup!({DisplayOption(graphql_formatter_configuration.bracket_spacing)}))} + {KeyValuePair("Quote style", markup!({DisplayOption(graphql_formatter_configuration.quote_style)}))} ).fmt(fmt)?; } @@ -294,15 +341,14 @@ impl Display for RageConfiguration<'_, '_> { let javascript_linter = configuration.get_javascript_linter_configuration(); let json_linter = configuration.get_json_linter_configuration(); let css_linter = configuration.get_css_linter_configuration(); - let graphq_linter = configuration.get_graphql_linter_configuration(); + let graphql_linter = configuration.get_graphql_linter_configuration(); markup! ( {Section("Linter")} - {KeyValuePair("JavaScript enabled", markup!({DebugDisplay(javascript_linter.enabled)}))} - {KeyValuePair("JSON enabled", markup!({DebugDisplay(json_linter.enabled)}))} - {KeyValuePair("CSS enabled", markup!({DebugDisplay(css_linter.enabled)}))} - {KeyValuePair("GraphQL enabled", markup!({DebugDisplay(graphq_linter.enabled)}))} - {KeyValuePair("Recommended", markup!({DebugDisplay(linter_configuration.recommended.unwrap_or_default())}))} - {KeyValuePair("All", markup!({DebugDisplay(linter_configuration.all.unwrap_or_default())}))} + {KeyValuePair("JavaScript enabled", markup!({DisplayOption(javascript_linter.enabled)}))} + {KeyValuePair("JSON enabled", markup!({DisplayOption(json_linter.enabled)}))} + {KeyValuePair("CSS enabled", markup!({DisplayOption(css_linter.enabled)}))} + {KeyValuePair("GraphQL enabled", markup!({DisplayOption(graphql_linter.enabled)}))} + {KeyValuePair("Recommended", markup!({DisplayOption(linter_configuration.recommended)}))} {RageConfigurationLintRules("Enabled rules", linter_configuration)} ).fmt(fmt)?; } @@ -325,12 +371,13 @@ impl Display for RageConfigurationLintRules<'_> { fn fmt(&self, fmt: &mut Formatter<'_>) -> io::Result<()> { let rules_str = self.0; let padding = Padding::new(2); + let padding_rules = Padding::new(4); fmt.write_markup(markup! {{padding}{rules_str}":"})?; fmt.write_markup(markup! {{SOFT_LINE}})?; let rules = self.1.as_enabled_rules(); let rules = rules.iter().collect::>(); for rule in rules { - fmt.write_markup(markup! {{padding}{rule}})?; + fmt.write_markup(markup! {{padding_rules}{rule}})?; fmt.write_markup(markup! {{SOFT_LINE}})?; } @@ -345,7 +392,7 @@ impl fmt::Display for EnvVarOs { let name = self.0; match env::var_os(name) { None => KeyValuePair(name, markup! { "unset" }).fmt(fmt), - Some(value) => KeyValuePair(name, markup! {{DebugDisplay(value)}}).fmt(fmt), + Some(value) => KeyValuePair(name, markup! {{DisplayOption(value.to_str())}}).fmt(fmt), } } } @@ -363,7 +410,7 @@ struct BiomeServerLog; impl Display for BiomeServerLog { fn fmt(&self, fmt: &mut Formatter) -> io::Result<()> { if let Ok(Some(log)) = read_most_recent_log_file( - biome_env().biome_log_path.value().map(PathBuf::from), + biome_env().biome_log_path.value().map(Utf8PathBuf::from), biome_env() .biome_log_prefix .value() diff --git a/crates/biome_cli/src/commands/search.rs b/crates/biome_cli/src/commands/search.rs index 233d526b5da6..50bfc0ef7e10 100644 --- a/crates/biome_cli/src/commands/search.rs +++ b/crates/biome_cli/src/commands/search.rs @@ -1,23 +1,25 @@ use crate::cli_options::CliOptions; use crate::commands::CommandRunner; use crate::{CliDiagnostic, Execution, TraversalMode}; -use biome_configuration::{ - vcs::PartialVcsConfiguration, PartialConfiguration, PartialFilesConfiguration, -}; +use biome_configuration::vcs::VcsConfiguration; +use biome_configuration::{Configuration, FilesConfiguration}; use biome_console::Console; use biome_deserialize::Merge; use biome_fs::FileSystem; +use biome_grit_patterns::GritTargetLanguage; use biome_service::configuration::LoadedConfiguration; +use biome_service::projects::ProjectKey; use biome_service::workspace::ParsePatternParams; -use biome_service::{DynRef, Workspace, WorkspaceError}; +use biome_service::{Workspace, WorkspaceError}; use std::ffi::OsString; pub(crate) struct SearchCommandPayload { - pub(crate) files_configuration: Option, + pub(crate) files_configuration: Option, pub(crate) paths: Vec, pub(crate) pattern: String, + pub(crate) language: Option, pub(crate) stdin_file_path: Option, - pub(crate) vcs_configuration: Option, + pub(crate) vcs_configuration: Option, } impl CommandRunner for SearchCommandPayload { @@ -26,9 +28,9 @@ impl CommandRunner for SearchCommandPayload { fn merge_configuration( &mut self, loaded_configuration: LoadedConfiguration, - _fs: &DynRef<'_, dyn FileSystem>, + _fs: &dyn FileSystem, _console: &mut dyn Console, - ) -> Result { + ) -> Result { let LoadedConfiguration { mut configuration, .. } = loaded_configuration; @@ -42,8 +44,8 @@ impl CommandRunner for SearchCommandPayload { fn get_files_to_process( &self, - _fs: &DynRef<'_, dyn FileSystem>, - _configuration: &PartialConfiguration, + _fs: &dyn FileSystem, + _configuration: &Configuration, ) -> Result, CliDiagnostic> { Ok(self.paths.clone()) } @@ -59,17 +61,21 @@ impl CommandRunner for SearchCommandPayload { fn get_execution( &self, cli_options: &CliOptions, - _console: &mut dyn Console, + console: &mut dyn Console, workspace: &dyn Workspace, + project_key: ProjectKey, ) -> Result { let pattern = workspace .parse_pattern(ParsePatternParams { pattern: self.pattern.clone(), + default_language: self.language.clone().unwrap_or_default(), })? .pattern_id; Ok(Execution::new(TraversalMode::Search { + project_key, pattern, - stdin: self.get_stdin(_console)?, + language: self.language.clone(), + stdin: self.get_stdin(console)?, }) .set_report(cli_options)) } diff --git a/crates/biome_cli/src/diagnostics.rs b/crates/biome_cli/src/diagnostics.rs index 48aa995f8261..6cf1354c4f3a 100644 --- a/crates/biome_cli/src/diagnostics.rs +++ b/crates/biome_cli/src/diagnostics.rs @@ -1,9 +1,8 @@ -use biome_console::fmt::Display; use biome_console::markup; -use biome_diagnostics::adapters::{BpafError, IoError, SerdeJsonError}; use biome_diagnostics::{ Advices, Category, Diagnostic, Error, LogCategory, MessageAndDescription, Severity, Visit, }; +use biome_diagnostics::{BpafError, IoError, SerdeJsonError}; use biome_service::WorkspaceError; use std::process::{ExitCode, Termination}; use std::{env::current_exe, fmt::Debug}; @@ -258,14 +257,6 @@ pub struct DeprecatedArgument { pub message: MessageAndDescription, } -impl DeprecatedArgument { - pub fn new(message: impl Display) -> Self { - Self { - message: MessageAndDescription::from(markup! {{message}}.to_owned()), - } - } -} - #[derive(Debug, Diagnostic)] pub enum ReportDiagnostic { /// Emitted when trying to serialise the report @@ -475,26 +466,6 @@ impl Termination for CliDiagnostic { } } -#[derive(Debug, Diagnostic)] -#[diagnostic( -category = "internalError/fs", - severity = Warning, - message( - description = "The configuration file {path} is deprecated. Use biome.json instead.", - message("The configuration file "{self.path}" is deprecated. Use ""biome.json"" instead."), - ) -)] -pub struct DeprecatedConfigurationFile { - #[location(resource)] - pub path: String, -} - -impl DeprecatedConfigurationFile { - pub fn new(path: impl Into) -> Self { - Self { path: path.into() } - } -} - #[derive(Debug, Default, Diagnostic)] #[diagnostic( severity = Error, diff --git a/crates/biome_cli/src/execute/diagnostics.rs b/crates/biome_cli/src/execute/diagnostics.rs index 3f2ff5ffbca5..3738a1e8ac31 100644 --- a/crates/biome_cli/src/execute/diagnostics.rs +++ b/crates/biome_cli/src/execute/diagnostics.rs @@ -1,7 +1,7 @@ -use biome_diagnostics::adapters::{IoError, StdError}; use biome_diagnostics::{ Advices, Category, Diagnostic, DiagnosticExt, DiagnosticTags, Error, Visit, }; +use biome_diagnostics::{IoError, StdError}; use biome_text_edit::TextEdit; use std::io; @@ -20,21 +20,10 @@ pub(crate) struct CIFormatDiffDiagnostic { #[derive(Debug, Diagnostic)] #[diagnostic( - category = "organizeImports", - message = "Import statements differs from the output" + category = "assist", + message = "Applied actions differs from the output" )] -pub(crate) struct CIOrganizeImportsDiffDiagnostic { - #[location(resource)] - pub(crate) file_name: String, - #[advice] - pub(crate) diff: ContentDiffAdvice, -} -#[derive(Debug, Diagnostic)] -#[diagnostic( - category = "assists", - message = "Applied assists differs from the output" -)] -pub(crate) struct CIAssistsDiffDiagnostic { +pub(crate) struct CIAssistDiffDiagnostic { #[location(resource)] pub(crate) file_name: String, #[advice] @@ -56,24 +45,11 @@ pub(crate) struct FormatDiffDiagnostic { #[derive(Debug, Diagnostic)] #[diagnostic( - category = "organizeImports", - severity = Error, - message = "Import statements could be sorted:" -)] -pub(crate) struct OrganizeImportsDiffDiagnostic { - #[location(resource)] - pub(crate) file_name: String, - #[advice] - pub(crate) diff: ContentDiffAdvice, -} - -#[derive(Debug, Diagnostic)] -#[diagnostic( - category = "assists", + category = "assist", severity = Error, - message = "Not all assists were applied:" + message = "Not all actions were applied:" )] -pub(crate) struct AssistsDiffDiagnostic { +pub(crate) struct AssistDiffDiagnostic { #[location(resource)] pub(crate) file_name: String, #[advice] diff --git a/crates/biome_cli/src/execute/migrate.rs b/crates/biome_cli/src/execute/migrate.rs index df4662dc9a8c..5323551dfb5a 100644 --- a/crates/biome_cli/src/execute/migrate.rs +++ b/crates/biome_cli/src/execute/migrate.rs @@ -2,22 +2,27 @@ use crate::commands::MigrateSubCommand; use crate::diagnostics::MigrationDiagnostic; use crate::execute::diagnostics::{ContentDiffAdvice, MigrateDiffDiagnostic}; use crate::{CliDiagnostic, CliSession}; -use biome_configuration::PartialConfiguration; +use biome_analyze::AnalysisFilter; +use biome_configuration::Configuration; use biome_console::{markup, ConsoleExt}; use biome_deserialize::json::deserialize_from_json_ast; use biome_deserialize::Merge; use biome_diagnostics::Diagnostic; use biome_diagnostics::{category, PrintDiagnostic}; use biome_formatter::ParseFormatNumberError; -use biome_fs::{BiomePath, ConfigName, FileSystemExt, OpenOptions}; +use biome_fs::{BiomePath, OpenOptions}; +use biome_json_formatter::context::JsonFormatOptions; +use biome_json_formatter::format_node; use biome_json_parser::{parse_json_with_cache, JsonParserOptions}; use biome_json_syntax::{JsonFileSource, JsonRoot}; use biome_migrate::{migrate_configuration, ControlFlow}; use biome_rowan::{AstNode, NodeCache}; -use biome_service::workspace::{ChangeFileParams, FixAction, FormatFileParams, OpenFileParams}; +use biome_service::projects::ProjectKey; +use biome_service::workspace::{ + ChangeFileParams, FileContent, FixAction, FormatFileParams, OpenFileParams, +}; +use camino::Utf8PathBuf; use std::borrow::Cow; -use std::ffi::OsStr; -use std::path::PathBuf; mod eslint; mod eslint_any_rule_to_biome; @@ -32,9 +37,9 @@ mod prettier; pub(crate) struct MigratePayload<'a> { pub(crate) session: CliSession<'a>, + pub(crate) project_key: ProjectKey, pub(crate) write: bool, - pub(crate) configuration_file_path: PathBuf, - pub(crate) configuration_directory_path: PathBuf, + pub(crate) configuration_file_path: Utf8PathBuf, pub(crate) verbose: bool, pub(crate) sub_command: Option, } @@ -42,16 +47,16 @@ pub(crate) struct MigratePayload<'a> { pub(crate) fn run(migrate_payload: MigratePayload) -> Result<(), CliDiagnostic> { let MigratePayload { session, + project_key, write, configuration_file_path, - configuration_directory_path, verbose, sub_command, } = migrate_payload; let mut cache = NodeCache::default(); - let fs = &session.app.fs; let console = session.app.console; let workspace = session.app.workspace; + let fs = workspace.fs(); let open_options = if write { OpenOptions::default().read(true).write(true) @@ -64,17 +69,21 @@ pub(crate) fn run(migrate_payload: MigratePayload) -> Result<(), CliDiagnostic> biome_config_file.read_to_string(&mut biome_config_content)?; let biome_path = BiomePath::new(configuration_file_path.as_path()); + let parse_options = match configuration_file_path.extension() { + Some("jsonc") => JsonParserOptions::default() + .with_allow_comments() + .with_allow_trailing_commas(), + _ => JsonParserOptions::default(), + }; workspace.open_file(OpenFileParams { + project_key, path: biome_path.clone(), - content: biome_config_content.to_string(), + content: FileContent::FromClient(biome_config_content.to_string()), version: 0, document_file_source: Some(JsonFileSource::json().into()), + persist_node_cache: false, })?; - let parsed = parse_json_with_cache( - &biome_config_content, - &mut cache, - JsonParserOptions::default(), - ); + let parsed = parse_json_with_cache(&biome_config_content, &mut cache, parse_options); match sub_command { Some(MigrateSubCommand::Prettier) => { @@ -83,8 +92,7 @@ pub(crate) fn run(migrate_payload: MigratePayload) -> Result<(), CliDiagnostic> data: prettier_config, } = prettier::read_config_file(fs, console)?; let biome_config = - deserialize_from_json_ast::(&parsed.tree(), "") - .into_deserialized(); + deserialize_from_json_ast::(&parsed.tree(), "").into_deserialized(); let Some(mut biome_config) = biome_config else { return Ok(()); }; @@ -103,15 +111,11 @@ pub(crate) fn run(migrate_payload: MigratePayload) -> Result<(), CliDiagnostic> biome_config .formatter .get_or_insert(Default::default()) - .ignore + .includes .get_or_insert(Default::default()) .extend(ignore_patterns.patterns); } - if ignore_patterns.has_negated_patterns { - console.log(markup! { - {prettier::IGNORE_FILE}" contains negated glob patterns that start with ""!"".\nThese patterns cannot be migrated because Biome doesn't support them." - }) - } else if write && biome_config != old_biome_config { + if write && biome_config != old_biome_config { console.log(markup!{ {prettier::IGNORE_FILE}" has been successfully migrated." }); @@ -128,18 +132,22 @@ pub(crate) fn run(migrate_payload: MigratePayload) -> Result<(), CliDiagnostic> }) })?; workspace.change_file(ChangeFileParams { + project_key, path: biome_path.clone(), content: new_content, version: 1, })?; - let printed = workspace.format_file(FormatFileParams { path: biome_path })?; + let printed = workspace.format_file(FormatFileParams { + project_key, + path: biome_path, + })?; if write { biome_config_file.set_content(printed.as_code().as_bytes())?; console.log(markup!{ {prettier_path}" has been successfully migrated." }); } else { - let file_name = configuration_file_path.display().to_string(); + let file_name = configuration_file_path.to_string(); let diagnostic = MigrateDiffDiagnostic { file_name, diff: ContentDiffAdvice { @@ -163,8 +171,7 @@ pub(crate) fn run(migrate_payload: MigratePayload) -> Result<(), CliDiagnostic> data: eslint_config, } = eslint::read_eslint_config(fs, console)?; let biome_config = - deserialize_from_json_ast::(&parsed.tree(), "") - .into_deserialized(); + deserialize_from_json_ast::(&parsed.tree(), "").into_deserialized(); let Some(mut biome_config) = biome_config else { return Ok(()); }; @@ -180,15 +187,11 @@ pub(crate) fn run(migrate_payload: MigratePayload) -> Result<(), CliDiagnostic> biome_config .linter .get_or_insert(Default::default()) - .ignore + .includes .get_or_insert(Default::default()) .extend(ignore_patterns.patterns); } - if ignore_patterns.has_negated_patterns { - console.log(markup! { - {eslint::IGNORE_FILE}" contains negated glob patterns that start with ""!"".\nThese patterns cannot be migrated because Biome doesn't support them." - }) - } else if write && biome_config != old_biome_config { + if write && biome_config != old_biome_config { console.log(markup!{ {eslint::IGNORE_FILE}" has been successfully migrated." }); @@ -205,18 +208,22 @@ pub(crate) fn run(migrate_payload: MigratePayload) -> Result<(), CliDiagnostic> }) })?; workspace.change_file(ChangeFileParams { + project_key, path: biome_path.clone(), content: new_content, version: 1, })?; - let printed = workspace.format_file(FormatFileParams { path: biome_path })?; + let printed = workspace.format_file(FormatFileParams { + project_key, + path: biome_path, + })?; if write { biome_config_file.set_content(printed.as_code().as_bytes())?; console.log(markup!{ {eslint_path}" has been successfully migrated." }); } else { - let file_name = configuration_file_path.display().to_string(); + let file_name = configuration_file_path.to_string(); let diagnostic = MigrateDiffDiagnostic { file_name, diff: ContentDiffAdvice { @@ -237,17 +244,14 @@ pub(crate) fn run(migrate_payload: MigratePayload) -> Result<(), CliDiagnostic> } } None => { - let has_deprecated_configuration = - configuration_file_path.file_name() == Some(OsStr::new("rome.json")); - let mut errors = 0; let mut tree = parsed.tree(); let mut actions = Vec::new(); loop { let (action, _) = migrate_configuration( &tree, + AnalysisFilter::default(), configuration_file_path.as_path(), - biome_configuration::VERSION.to_string(), |signal| { let current_diagnostic = signal.diagnostic(); if current_diagnostic.is_some() { @@ -285,37 +289,31 @@ pub(crate) fn run(migrate_payload: MigratePayload) -> Result<(), CliDiagnostic> } let new_configuration_content = tree.to_string(); - if biome_config_content != new_configuration_content || has_deprecated_configuration { + if biome_config_content != new_configuration_content { if write { - let mut configuration_file = if has_deprecated_configuration { - let biome_file_path = - configuration_directory_path.join(ConfigName::biome_json()); - fs.create_new(biome_file_path.as_path())? + let mut configuration_file = biome_config_file; + let format_options = JsonFormatOptions::default(); + let formatted = format_node(format_options, tree.syntax()) + .ok() + .map(|formatted| formatted.print()) + .and_then(|printed| printed.ok()); + + if let Some(formatted) = formatted { + configuration_file.set_content(formatted.as_code().as_bytes())?; } else { - biome_config_file - }; - configuration_file.set_content(tree.to_string().as_bytes())?; + configuration_file.set_content(new_configuration_content.as_bytes())?; + } console.log(markup!{ - "The configuration "{{configuration_file_path.display().to_string()}}" has been successfully migrated." + "The configuration "{{configuration_file_path.to_string()}}" has been successfully migrated." }) } else { - let file_name = configuration_file_path.display().to_string(); - let diagnostic = if has_deprecated_configuration { - MigrateDiffDiagnostic { - file_name, - diff: ContentDiffAdvice { - old: "rome.json".to_string(), - new: "biome.json".to_string(), - }, - } - } else { - MigrateDiffDiagnostic { - file_name, - diff: ContentDiffAdvice { - old: biome_config_content, - new: new_configuration_content, - }, - } + let file_name = configuration_file_path.to_string(); + let diagnostic = MigrateDiffDiagnostic { + file_name, + diff: ContentDiffAdvice { + old: biome_config_content, + new: new_configuration_content, + }, }; if diagnostic.tags().is_verbose() { if verbose { diff --git a/crates/biome_cli/src/execute/migrate/eslint.rs b/crates/biome_cli/src/execute/migrate/eslint.rs index 0a4dc27cba03..e0d6ce097b4c 100644 --- a/crates/biome_cli/src/execute/migrate/eslint.rs +++ b/crates/biome_cli/src/execute/migrate/eslint.rs @@ -1,17 +1,15 @@ +use crate::diagnostics::MigrationDiagnostic; +use crate::CliDiagnostic; use biome_console::{markup, Console, ConsoleExt}; use biome_deserialize::json::deserialize_from_json_str; use biome_deserialize::Merge; use biome_diagnostics::{DiagnosticExt, PrintDiagnostic}; use biome_fs::{FileSystem, OpenOptions}; use biome_json_parser::JsonParserOptions; -use biome_service::DynRef; +use camino::Utf8Path; use std::borrow::Cow; -use std::ffi::OsStr; use std::path::{Path, PathBuf}; -use crate::diagnostics::MigrationDiagnostic; -use crate::CliDiagnostic; - use super::eslint_eslint; use super::node; @@ -24,7 +22,7 @@ use super::node; /// Note that we don't need to deserialise every existing rule option. /// We only need to deserialise options that have equivalent biome options. /// This greatly reduces the amount of work involved. - +/// /// ESLint flat configuration filenames. /// /// See https://eslint.org/docs/latest/use/configure/configuration-files-new @@ -75,11 +73,11 @@ pub(crate) const IGNORE_FILE: &str = ".eslintignore"; /// /// The `extends` field is recursively resolved. pub(crate) fn read_eslint_config( - fs: &DynRef<'_, dyn FileSystem>, + fs: &dyn FileSystem, console: &mut dyn Console, ) -> Result { for config_path_str in FLAT_CONFIG_FILES { - let path = Path::new(config_path_str); + let path = Utf8Path::new(config_path_str); if fs.path_exists(path) { return load_flat_config_data(path, console).map(|data| Config { path: config_path_str, @@ -88,7 +86,7 @@ pub(crate) fn read_eslint_config( } } for config_path_str in LEGACY_CONFIG_FILES { - let path = Path::new(config_path_str); + let path = Utf8Path::new(config_path_str); if fs.path_exists(path) { return load_legacy_config_data(fs, path, console).map(|data| Config { path: config_path_str, @@ -97,7 +95,7 @@ pub(crate) fn read_eslint_config( } } // We don't report an error if ESLint config is not embedded in `PACKAGE_JSON`. - if let Ok(data) = load_legacy_config_data(fs, Path::new(PACKAGE_JSON), console) { + if let Ok(data) = load_legacy_config_data(fs, Utf8Path::new(PACKAGE_JSON), console) { return Ok(Config { path: PACKAGE_JSON, data: data.into(), @@ -117,17 +115,17 @@ pub(crate) struct Config { /// Load an ESlint Flat config /// See https://eslint.org/docs/latest/use/configure/configuration-files-new fn load_flat_config_data( - path: &Path, + path: &Utf8Path, console: &mut dyn Console, ) -> Result { - let node::Resolution { content, .. } = node::load_config(&path.to_string_lossy())?; + let node::Resolution { content, .. } = node::load_config(path.as_ref())?; let (deserialized, diagnostics) = deserialize_from_json_str::( &content, JsonParserOptions::default(), "", ) .consume(); - let path_str = path.to_string_lossy(); + let path_str = path.to_string(); for diagnostic in diagnostics.into_iter().filter(|diag| { matches!( diag.severity(), @@ -151,11 +149,11 @@ fn load_flat_config_data( /// Load an ESlint legacy config /// See https://eslint.org/docs/latest/use/configure/configuration-files fn load_legacy_config_data( - fs: &DynRef<'_, dyn FileSystem>, - path: &Path, + fs: &dyn FileSystem, + path: &Utf8Path, console: &mut dyn Console, ) -> Result { - let (deserialized, diagnostics) = match path.extension().and_then(OsStr::to_str) { + let (deserialized, diagnostics) = match path.extension() { None | Some("json") => { let mut file = fs.open_with_options(path, OpenOptions::default().read(true))?; let mut content = String::new(); @@ -193,7 +191,7 @@ fn load_legacy_config_data( } } Some("js" | "cjs") => { - let node::Resolution { content, .. } = node::load_config(&path.to_string_lossy())?; + let node::Resolution { content, .. } = node::load_config(path.as_ref())?; deserialize_from_json_str::( &content, JsonParserOptions::default(), @@ -209,7 +207,7 @@ fn load_legacy_config_data( })) } }; - let path_str = path.to_string_lossy(); + let path_str = path.to_string(); for diagnostic in diagnostics.into_iter().filter(|diag| { matches!( diag.severity(), diff --git a/crates/biome_cli/src/execute/migrate/eslint_any_rule_to_biome.rs b/crates/biome_cli/src/execute/migrate/eslint_any_rule_to_biome.rs index d80d70fa886f..0fd2eda9e9e2 100644 --- a/crates/biome_cli/src/execute/migrate/eslint_any_rule_to_biome.rs +++ b/crates/biome_cli/src/execute/migrate/eslint_any_rule_to_biome.rs @@ -11,8 +11,11 @@ pub(crate) fn migrate_eslint_any_rule( match eslint_name { "@mysticatea/no-this-in-static" => { let group = rules.complexity.get_or_insert_with(Default::default); - let rule = group.no_this_in_static.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .no_this_in_static + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "@next/google-font-display" => { if !options.include_nursery { @@ -20,9 +23,10 @@ pub(crate) fn migrate_eslint_any_rule( } let group = rules.nursery.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .use_google_font_display .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "@next/google-font-preconnect" => { if !options.include_nursery { @@ -30,9 +34,10 @@ pub(crate) fn migrate_eslint_any_rule( } let group = rules.nursery.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .use_google_font_preconnect .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "@next/no-document-import-in-page" => { if !options.include_nursery { @@ -40,17 +45,21 @@ pub(crate) fn migrate_eslint_any_rule( } let group = rules.nursery.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .no_document_import_in_page .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "@next/no-head-element" => { if !options.include_nursery { return false; } let group = rules.nursery.get_or_insert_with(Default::default); - let rule = group.no_head_element.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .no_head_element + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "@next/no-head-import-in-document" => { if !options.include_nursery { @@ -58,24 +67,40 @@ pub(crate) fn migrate_eslint_any_rule( } let group = rules.nursery.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .no_head_import_in_document .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "@next/no-img-element" => { if !options.include_nursery { return false; } let group = rules.nursery.get_or_insert_with(Default::default); - let rule = group.no_img_element.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .no_img_element + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); + } + "@next/no-unwanted-polyfillio" => { + if !options.include_nursery { + return false; + } + let group = rules.nursery.get_or_insert_with(Default::default); + let rule = group + .unwrap_group_as_mut() + .no_unwanted_polyfillio + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "@stylistic/jsx-self-closing-comp" => { let group = rules.style.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .use_self_closing_elements .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "@typescript-eslint/adjacent-overload-signatures" => { if !options.include_nursery { @@ -83,21 +108,26 @@ pub(crate) fn migrate_eslint_any_rule( } let group = rules.nursery.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .use_adjacent_overload_signatures .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "@typescript-eslint/array-type" => { let group = rules.style.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .use_consistent_array_type .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "@typescript-eslint/ban-types" => { let group = rules.complexity.get_or_insert_with(Default::default); - let rule = group.no_banned_types.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .no_banned_types + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "@typescript-eslint/consistent-type-exports" => { if !options.include_inspired { @@ -105,8 +135,11 @@ pub(crate) fn migrate_eslint_any_rule( return false; } let group = rules.style.get_or_insert_with(Default::default); - let rule = group.use_export_type.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .use_export_type + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "@typescript-eslint/consistent-type-imports" => { if !options.include_inspired { @@ -114,28 +147,38 @@ pub(crate) fn migrate_eslint_any_rule( return false; } let group = rules.style.get_or_insert_with(Default::default); - let rule = group.use_import_type.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .use_import_type + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "@typescript-eslint/default-param-last" => { let group = rules.style.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .use_default_parameter_last .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "@typescript-eslint/dot-notation" => { let group = rules.complexity.get_or_insert_with(Default::default); - let rule = group.use_literal_keys.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .use_literal_keys + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "@typescript-eslint/explicit-function-return-type" => { if !options.include_nursery { return false; } let group = rules.nursery.get_or_insert_with(Default::default); - let rule = group.use_explicit_type.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .use_explicit_type + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "@typescript-eslint/explicit-member-accessibility" => { if !options.include_nursery { @@ -143,9 +186,10 @@ pub(crate) fn migrate_eslint_any_rule( } let group = rules.nursery.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .use_consistent_member_accessibility .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "@typescript-eslint/naming-convention" => { if !options.include_inspired { @@ -154,28 +198,34 @@ pub(crate) fn migrate_eslint_any_rule( } let group = rules.style.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .use_naming_convention .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "@typescript-eslint/no-array-constructor" => { let group = rules.correctness.get_or_insert_with(Default::default); - let rule = group.use_array_literals.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .use_array_literals + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "@typescript-eslint/no-dupe-class-members" => { let group = rules.suspicious.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .no_duplicate_class_members .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "@typescript-eslint/no-empty-function" => { let group = rules.suspicious.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .no_empty_block_statements .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "@typescript-eslint/no-empty-interface" => { if !options.include_inspired { @@ -183,74 +233,113 @@ pub(crate) fn migrate_eslint_any_rule( return false; } let group = rules.suspicious.get_or_insert_with(Default::default); - let rule = group.no_empty_interface.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .no_empty_interface + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "@typescript-eslint/no-explicit-any" => { let group = rules.suspicious.get_or_insert_with(Default::default); - let rule = group.no_explicit_any.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .no_explicit_any + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "@typescript-eslint/no-extra-non-null-assertion" => { let group = rules.suspicious.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .no_extra_non_null_assertion .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "@typescript-eslint/no-extraneous-class" => { let group = rules.complexity.get_or_insert_with(Default::default); - let rule = group.no_static_only_class.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .no_static_only_class + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); + } + "@typescript-eslint/no-floating-promises" => { + if !options.include_nursery { + return false; + } + let group = rules.nursery.get_or_insert_with(Default::default); + let rule = group + .unwrap_group_as_mut() + .no_floating_promises + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "@typescript-eslint/no-inferrable-types" => { let group = rules.style.get_or_insert_with(Default::default); - let rule = group.no_inferrable_types.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .no_inferrable_types + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "@typescript-eslint/no-invalid-void-type" => { let group = rules.suspicious.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .no_confusing_void_type .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "@typescript-eslint/no-loss-of-precision" => { let group = rules.correctness.get_or_insert_with(Default::default); - let rule = group.no_precision_loss.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .no_precision_loss + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "@typescript-eslint/no-misused-new" => { let group = rules.suspicious.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .no_misleading_instantiator .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "@typescript-eslint/no-namespace" => { let group = rules.style.get_or_insert_with(Default::default); - let rule = group.no_namespace.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .no_namespace + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "@typescript-eslint/no-non-null-assertion" => { let group = rules.style.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .no_non_null_assertion .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "@typescript-eslint/no-redeclare" => { let group = rules.suspicious.get_or_insert_with(Default::default); - let rule = group.no_redeclare.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .no_redeclare + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "@typescript-eslint/no-require-imports" => { if !options.include_nursery { return false; } let group = rules.nursery.get_or_insert_with(Default::default); - let rule = group.no_common_js.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .no_common_js + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "@typescript-eslint/no-restricted-imports" => { if !options.include_nursery { @@ -258,17 +347,21 @@ pub(crate) fn migrate_eslint_any_rule( } let group = rules.nursery.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .no_restricted_imports .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "@typescript-eslint/no-restricted-types" => { if !options.include_nursery { return false; } let group = rules.nursery.get_or_insert_with(Default::default); - let rule = group.no_restricted_types.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .no_restricted_types + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "@typescript-eslint/no-this-alias" => { if !options.include_inspired { @@ -277,49 +370,58 @@ pub(crate) fn migrate_eslint_any_rule( } let group = rules.complexity.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .no_useless_this_alias .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "@typescript-eslint/no-unnecessary-type-constraint" => { let group = rules.complexity.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .no_useless_type_constraint .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "@typescript-eslint/no-unsafe-declaration-merging" => { let group = rules.suspicious.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .no_unsafe_declaration_merging .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "@typescript-eslint/no-unused-vars" => { let group = rules.correctness.get_or_insert_with(Default::default); - let rule = group.no_unused_variables.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .no_unused_variables + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "@typescript-eslint/no-use-before-define" => { let group = rules.correctness.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .no_invalid_use_before_declaration .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "@typescript-eslint/no-useless-constructor" => { let group = rules.complexity.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .no_useless_constructor .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "@typescript-eslint/no-useless-empty-export" => { let group = rules.complexity.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .no_useless_empty_export .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "@typescript-eslint/only-throw-error" => { if !options.include_inspired { @@ -327,8 +429,11 @@ pub(crate) fn migrate_eslint_any_rule( return false; } let group = rules.style.get_or_insert_with(Default::default); - let rule = group.use_throw_only_error.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .use_throw_only_error + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "@typescript-eslint/parameter-properties" => { if !options.include_inspired { @@ -337,59 +442,89 @@ pub(crate) fn migrate_eslint_any_rule( } let group = rules.style.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .no_parameter_properties .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "@typescript-eslint/prefer-as-const" => { let group = rules.style.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .use_as_const_assertion .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "@typescript-eslint/prefer-enum-initializers" => { let group = rules.style.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .use_enum_initializers .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "@typescript-eslint/prefer-for-of" => { let group = rules.style.get_or_insert_with(Default::default); - let rule = group.use_for_of.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .use_for_of + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "@typescript-eslint/prefer-function-type" => { let group = rules.style.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .use_shorthand_function_type .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "@typescript-eslint/prefer-literal-enum-member" => { let group = rules.style.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .use_literal_enum_members .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "@typescript-eslint/prefer-namespace-keyword" => { let group = rules.suspicious.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .use_namespace_keyword .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "@typescript-eslint/prefer-optional-chain" => { let group = rules.complexity.get_or_insert_with(Default::default); - let rule = group.use_optional_chain.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .use_optional_chain + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "@typescript-eslint/require-await" => { let group = rules.suspicious.get_or_insert_with(Default::default); - let rule = group.use_await.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .use_await + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); + } + "ban-ts-comment" => { + if !options.include_inspired { + results.has_inspired_rules = true; + return false; + } + if !options.include_nursery { + return false; + } + let group = rules.nursery.get_or_insert_with(Default::default); + let rule = group + .unwrap_group_as_mut() + .no_ts_ignore + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "barrel-files/avoid-barrel-files" => { if !options.include_inspired { @@ -397,143 +532,197 @@ pub(crate) fn migrate_eslint_any_rule( return false; } let group = rules.performance.get_or_insert_with(Default::default); - let rule = group.no_barrel_file.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .no_barrel_file + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "barrel-files/avoid-namespace-import" => { let group = rules.style.get_or_insert_with(Default::default); - let rule = group.no_namespace_import.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .no_namespace_import + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "barrel-files/avoid-re-export-all" => { let group = rules.performance.get_or_insert_with(Default::default); - let rule = group.no_re_export_all.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .no_re_export_all + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "constructor-super" => { let group = rules.correctness.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .no_invalid_constructor_super .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "curly" => { let group = rules.style.get_or_insert_with(Default::default); - let rule = group.use_block_statements.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .use_block_statements + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "default-case" => { let group = rules.style.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .use_default_switch_clause .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "default-case-last" => { let group = rules.suspicious.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .use_default_switch_clause_last .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "default-param-last" => { let group = rules.style.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .use_default_parameter_last .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "deno-lint/no-process-global" => { if !options.include_nursery { return false; } let group = rules.nursery.get_or_insert_with(Default::default); - let rule = group.no_process_global.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .no_process_global + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "dot-notation" => { let group = rules.complexity.get_or_insert_with(Default::default); - let rule = group.use_literal_keys.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .use_literal_keys + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "eqeqeq" => { let group = rules.suspicious.get_or_insert_with(Default::default); - let rule = group.no_double_equals.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .no_double_equals + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "for-direction" => { let group = rules.correctness.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .use_valid_for_direction .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "getter-return" => { let group = rules.suspicious.get_or_insert_with(Default::default); - let rule = group.use_getter_return.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .use_getter_return + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "guard-for-in" => { if !options.include_nursery { return false; } let group = rules.nursery.get_or_insert_with(Default::default); - let rule = group.use_guard_for_in.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .use_guard_for_in + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "import-access/eslint-plugin-import-access" => { - if !options.include_inspired { - results.has_inspired_rules = true; - return false; - } if !options.include_nursery { return false; } let group = rules.nursery.get_or_insert_with(Default::default); let rule = group - .use_import_restrictions + .unwrap_group_as_mut() + .no_package_private_imports .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "import/exports-last" => { if !options.include_nursery { return false; } let group = rules.nursery.get_or_insert_with(Default::default); - let rule = group.use_exports_last.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .use_exports_last + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "import/no-commonjs" => { if !options.include_nursery { return false; } let group = rules.nursery.get_or_insert_with(Default::default); - let rule = group.no_common_js.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .no_common_js + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); + } + "import/no-cycle" => { + if !options.include_nursery { + return false; + } + let group = rules.nursery.get_or_insert_with(Default::default); + let rule = group + .unwrap_group_as_mut() + .no_import_cycles + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "import/no-default-export" => { let group = rules.style.get_or_insert_with(Default::default); - let rule = group.no_default_export.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .no_default_export + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "import/no-extraneous-dependencies" => { let group = rules.correctness.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .no_undeclared_dependencies .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "import/no-nodejs-modules" => { let group = rules.correctness.get_or_insert_with(Default::default); - let rule = group.no_nodejs_modules.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .no_nodejs_modules + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "jest/max-nested-describe" => { let group = rules.complexity.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .no_excessive_nested_test_suites .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "jest/no-disabled-tests" => { if !options.include_inspired { @@ -541,13 +730,19 @@ pub(crate) fn migrate_eslint_any_rule( return false; } let group = rules.suspicious.get_or_insert_with(Default::default); - let rule = group.no_skipped_tests.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .no_skipped_tests + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "jest/no-done-callback" => { let group = rules.style.get_or_insert_with(Default::default); - let rule = group.no_done_callback.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .no_done_callback + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "jest/no-duplicate-hooks" => { if !options.include_inspired { @@ -556,9 +751,10 @@ pub(crate) fn migrate_eslint_any_rule( } let group = rules.suspicious.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .no_duplicate_test_hooks .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "jest/no-export" => { if !options.include_inspired { @@ -566,8 +762,11 @@ pub(crate) fn migrate_eslint_any_rule( return false; } let group = rules.suspicious.get_or_insert_with(Default::default); - let rule = group.no_exports_in_test.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .no_exports_in_test + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "jest/no-focused-tests" => { if !options.include_inspired { @@ -575,8 +774,11 @@ pub(crate) fn migrate_eslint_any_rule( return false; } let group = rules.suspicious.get_or_insert_with(Default::default); - let rule = group.no_focused_tests.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .no_focused_tests + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "jest/no-standalone-expect" => { if !options.include_inspired { @@ -585,55 +787,74 @@ pub(crate) fn migrate_eslint_any_rule( } let group = rules.suspicious.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .no_misplaced_assertion .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "jsx-a11y/alt-text" => { let group = rules.a11y.get_or_insert_with(Default::default); - let rule = group.use_alt_text.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .use_alt_text + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "jsx-a11y/anchor-has-content" => { let group = rules.a11y.get_or_insert_with(Default::default); - let rule = group.use_anchor_content.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .use_anchor_content + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "jsx-a11y/anchor-is-valid" => { let group = rules.a11y.get_or_insert_with(Default::default); - let rule = group.use_valid_anchor.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .use_valid_anchor + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "jsx-a11y/aria-activedescendant-has-tabindex" => { let group = rules.a11y.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .use_aria_activedescendant_with_tabindex .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "jsx-a11y/aria-props" => { let group = rules.a11y.get_or_insert_with(Default::default); - let rule = group.use_valid_aria_props.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .use_valid_aria_props + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "jsx-a11y/aria-proptypes" => { let group = rules.a11y.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .use_valid_aria_values .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "jsx-a11y/aria-role" => { let group = rules.a11y.get_or_insert_with(Default::default); - let rule = group.use_valid_aria_role.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .use_valid_aria_role + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "jsx-a11y/aria-unsupported-elements" => { let group = rules.a11y.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .no_aria_unsupported_elements .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "jsx-a11y/autocomplete-valid" => { if !options.include_nursery { @@ -641,98 +862,130 @@ pub(crate) fn migrate_eslint_any_rule( } let group = rules.nursery.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .use_valid_autocomplete .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "jsx-a11y/click-events-have-key-events" => { let group = rules.a11y.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .use_key_with_click_events .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "jsx-a11y/heading-has-content" => { let group = rules.a11y.get_or_insert_with(Default::default); - let rule = group.use_heading_content.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .use_heading_content + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "jsx-a11y/html-has-lang" => { let group = rules.a11y.get_or_insert_with(Default::default); - let rule = group.use_html_lang.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .use_html_lang + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "jsx-a11y/iframe-has-title" => { let group = rules.a11y.get_or_insert_with(Default::default); - let rule = group.use_iframe_title.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .use_iframe_title + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "jsx-a11y/img-redundant-alt" => { let group = rules.a11y.get_or_insert_with(Default::default); - let rule = group.no_redundant_alt.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .no_redundant_alt + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "jsx-a11y/interactive-supports-focus" => { let group = rules.a11y.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .use_focusable_interactive .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "jsx-a11y/label-has-associated-control" => { let group = rules.a11y.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .no_label_without_control .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "jsx-a11y/lang" => { let group = rules.a11y.get_or_insert_with(Default::default); - let rule = group.use_valid_lang.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .use_valid_lang + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "jsx-a11y/media-has-caption" => { let group = rules.a11y.get_or_insert_with(Default::default); - let rule = group.use_media_caption.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .use_media_caption + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "jsx-a11y/mouse-events-have-key-events" => { let group = rules.a11y.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .use_key_with_mouse_events .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "jsx-a11y/no-access-key" => { let group = rules.a11y.get_or_insert_with(Default::default); - let rule = group.no_access_key.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .no_access_key + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "jsx-a11y/no-aria-hidden-on-focusable" => { let group = rules.a11y.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .no_aria_hidden_on_focusable .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "jsx-a11y/no-autofocus" => { let group = rules.a11y.get_or_insert_with(Default::default); - let rule = group.no_autofocus.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .no_autofocus + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "jsx-a11y/no-distracting-elements" => { let group = rules.a11y.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .no_distracting_elements .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "jsx-a11y/no-interactive-element-to-noninteractive-role" => { let group = rules.a11y.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .no_interactive_element_to_noninteractive_role .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "jsx-a11y/no-noninteractive-element-interactions" => { if !options.include_nursery { @@ -740,28 +993,34 @@ pub(crate) fn migrate_eslint_any_rule( } let group = rules.nursery.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .no_noninteractive_element_interactions .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "jsx-a11y/no-noninteractive-element-to-interactive-role" => { let group = rules.a11y.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .no_noninteractive_element_to_interactive_role .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "jsx-a11y/no-noninteractive-tabindex" => { let group = rules.a11y.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .no_noninteractive_tabindex .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "jsx-a11y/no-redundant-roles" => { let group = rules.a11y.get_or_insert_with(Default::default); - let rule = group.no_redundant_roles.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .no_redundant_roles + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "jsx-a11y/no-static-element-interactions" => { if !options.include_nursery { @@ -769,23 +1028,26 @@ pub(crate) fn migrate_eslint_any_rule( } let group = rules.nursery.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .no_static_element_interactions .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "jsx-a11y/prefer-tag-over-role" => { let group = rules.a11y.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .use_semantic_elements .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "jsx-a11y/role-has-required-aria-props" => { let group = rules.a11y.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .use_aria_props_for_role .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "jsx-a11y/role-supports-aria-props" => { if !options.include_nursery { @@ -793,56 +1055,77 @@ pub(crate) fn migrate_eslint_any_rule( } let group = rules.nursery.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .use_aria_props_supported_by_role .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "jsx-a11y/scope" => { let group = rules.a11y.get_or_insert_with(Default::default); - let rule = group.no_header_scope.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .no_header_scope + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "jsx-a11y/tabindex-no-positive" => { let group = rules.a11y.get_or_insert_with(Default::default); - let rule = group.no_positive_tabindex.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .no_positive_tabindex + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "n/no-process-env" => { if !options.include_nursery { return false; } let group = rules.nursery.get_or_insert_with(Default::default); - let rule = group.no_process_env.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .no_process_env + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "no-array-constructor" => { let group = rules.correctness.get_or_insert_with(Default::default); - let rule = group.use_array_literals.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .use_array_literals + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "no-async-promise-executor" => { let group = rules.suspicious.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .no_async_promise_executor .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "no-case-declarations" => { let group = rules.correctness.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .no_switch_declarations .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "no-class-assign" => { let group = rules.suspicious.get_or_insert_with(Default::default); - let rule = group.no_class_assign.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .no_class_assign + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "no-compare-neg-zero" => { let group = rules.suspicious.get_or_insert_with(Default::default); - let rule = group.no_compare_neg_zero.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .no_compare_neg_zero + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "no-cond-assign" => { if !options.include_inspired { @@ -851,79 +1134,101 @@ pub(crate) fn migrate_eslint_any_rule( } let group = rules.suspicious.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .no_assign_in_expressions .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "no-console" => { let group = rules.suspicious.get_or_insert_with(Default::default); - let rule = group.no_console.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .no_console + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "no-const-assign" => { let group = rules.correctness.get_or_insert_with(Default::default); - let rule = group.no_const_assign.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .no_const_assign + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "no-constant-condition" => { let group = rules.correctness.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .no_constant_condition .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "no-constructor-return" => { let group = rules.correctness.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .no_constructor_return .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "no-control-regex" => { let group = rules.suspicious.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .no_control_characters_in_regex .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "no-debugger" => { let group = rules.suspicious.get_or_insert_with(Default::default); - let rule = group.no_debugger.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .no_debugger + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "no-dupe-args" => { let group = rules.suspicious.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .no_duplicate_parameters .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "no-dupe-class-members" => { let group = rules.suspicious.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .no_duplicate_class_members .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "no-dupe-else-if" => { if !options.include_nursery { return false; } let group = rules.nursery.get_or_insert_with(Default::default); - let rule = group.no_duplicate_else_if.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .no_duplicate_else_if + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "no-dupe-keys" => { let group = rules.suspicious.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .no_duplicate_object_keys .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "no-duplicate-case" => { let group = rules.suspicious.get_or_insert_with(Default::default); - let rule = group.no_duplicate_case.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .no_duplicate_case + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "no-else-return" => { if !options.include_inspired { @@ -931,92 +1236,123 @@ pub(crate) fn migrate_eslint_any_rule( return false; } let group = rules.style.get_or_insert_with(Default::default); - let rule = group.no_useless_else.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .no_useless_else + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "no-empty" => { let group = rules.suspicious.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .no_empty_block_statements .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "no-empty-character-class" => { let group = rules.correctness.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .no_empty_character_class_in_regex .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "no-empty-function" => { let group = rules.suspicious.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .no_empty_block_statements .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "no-empty-pattern" => { let group = rules.correctness.get_or_insert_with(Default::default); - let rule = group.no_empty_pattern.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .no_empty_pattern + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "no-empty-static-block" => { let group = rules.suspicious.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .no_empty_block_statements .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "no-eval" => { let group = rules.security.get_or_insert_with(Default::default); - let rule = group.no_global_eval.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .no_global_eval + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "no-ex-assign" => { let group = rules.suspicious.get_or_insert_with(Default::default); - let rule = group.no_catch_assign.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .no_catch_assign + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "no-extra-boolean-cast" => { let group = rules.complexity.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .no_extra_boolean_cast .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "no-extra-label" => { let group = rules.complexity.get_or_insert_with(Default::default); - let rule = group.no_useless_label.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .no_useless_label + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "no-fallthrough" => { let group = rules.suspicious.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .no_fallthrough_switch_clause .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "no-func-assign" => { let group = rules.suspicious.get_or_insert_with(Default::default); - let rule = group.no_function_assign.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .no_function_assign + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "no-global-assign" => { let group = rules.suspicious.get_or_insert_with(Default::default); - let rule = group.no_global_assign.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .no_global_assign + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "no-import-assign" => { let group = rules.suspicious.get_or_insert_with(Default::default); - let rule = group.no_import_assign.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .no_import_assign + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "no-inner-declarations" => { let group = rules.correctness.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .no_inner_declarations .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "no-irregular-whitespace" => { if !options.include_nursery { @@ -1024,14 +1360,18 @@ pub(crate) fn migrate_eslint_any_rule( } let group = rules.nursery.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .no_irregular_whitespace .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "no-label-var" => { let group = rules.suspicious.get_or_insert_with(Default::default); - let rule = group.no_label_var.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .no_label_var + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "no-labels" => { if !options.include_inspired { @@ -1039,119 +1379,153 @@ pub(crate) fn migrate_eslint_any_rule( return false; } let group = rules.suspicious.get_or_insert_with(Default::default); - let rule = group.no_confusing_labels.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .no_confusing_labels + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "no-lone-blocks" => { let group = rules.complexity.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .no_useless_lone_block_statements .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "no-lonely-if" => { let group = rules.style.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .use_collapsed_else_if .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "no-loss-of-precision" => { let group = rules.correctness.get_or_insert_with(Default::default); - let rule = group.no_precision_loss.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .no_precision_loss + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "no-misleading-character-class" => { let group = rules.suspicious.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .no_misleading_character_class .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "no-negated-condition" => { let group = rules.style.get_or_insert_with(Default::default); - let rule = group.no_negation_else.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .no_negation_else + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "no-nested-ternary" => { if !options.include_nursery { return false; } let group = rules.nursery.get_or_insert_with(Default::default); - let rule = group.no_nested_ternary.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .no_nested_ternary + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "no-new-native-nonconstructor" => { let group = rules.correctness.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .no_invalid_builtin_instantiation .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "no-new-symbol" => { let group = rules.correctness.get_or_insert_with(Default::default); - let rule = group.no_new_symbol.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .no_new_symbol + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "no-new-wrappers" => { let group = rules.style.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .use_consistent_builtin_instantiation .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "no-nonoctal-decimal-escape" => { let group = rules.correctness.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .no_nonoctal_decimal_escape .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "no-obj-calls" => { let group = rules.correctness.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .no_global_object_calls .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "no-octal-escape" => { if !options.include_nursery { return false; } let group = rules.nursery.get_or_insert_with(Default::default); - let rule = group.no_octal_escape.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .no_octal_escape + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "no-param-reassign" => { let group = rules.style.get_or_insert_with(Default::default); - let rule = group.no_parameter_assign.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .no_parameter_assign + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "no-prototype-builtins" => { let group = rules.suspicious.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .no_prototype_builtins .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "no-redeclare" => { let group = rules.suspicious.get_or_insert_with(Default::default); - let rule = group.no_redeclare.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .no_redeclare + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "no-regex-spaces" => { let group = rules.complexity.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .no_multiple_spaces_in_regular_expression_literals .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "no-restricted-globals" => { let group = rules.style.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .no_restricted_globals .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "no-restricted-imports" => { if !options.include_nursery { @@ -1159,9 +1533,10 @@ pub(crate) fn migrate_eslint_any_rule( } let group = rules.nursery.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .no_restricted_imports .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "no-secrets/no-secrets" => { if !options.include_inspired { @@ -1172,40 +1547,59 @@ pub(crate) fn migrate_eslint_any_rule( return false; } let group = rules.nursery.get_or_insert_with(Default::default); - let rule = group.no_secrets.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .no_secrets + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "no-self-assign" => { let group = rules.correctness.get_or_insert_with(Default::default); - let rule = group.no_self_assign.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .no_self_assign + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "no-self-compare" => { let group = rules.suspicious.get_or_insert_with(Default::default); - let rule = group.no_self_compare.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .no_self_compare + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "no-sequences" => { let group = rules.style.get_or_insert_with(Default::default); - let rule = group.no_comma_operator.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .no_comma_operator + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "no-setter-return" => { let group = rules.correctness.get_or_insert_with(Default::default); - let rule = group.no_setter_return.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .no_setter_return + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "no-shadow-restricted-names" => { let group = rules.suspicious.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .no_shadow_restricted_names .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "no-sparse-arrays" => { let group = rules.suspicious.get_or_insert_with(Default::default); - let rule = group.no_sparse_array.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .no_sparse_array + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "no-template-curly-in-string" => { if !options.include_nursery { @@ -1213,14 +1607,18 @@ pub(crate) fn migrate_eslint_any_rule( } let group = rules.nursery.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .no_template_curly_in_string .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "no-this-before-super" => { let group = rules.correctness.get_or_insert_with(Default::default); - let rule = group.no_unreachable_super.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .no_unreachable_super + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "no-throw-literal" => { if !options.include_inspired { @@ -1228,92 +1626,123 @@ pub(crate) fn migrate_eslint_any_rule( return false; } let group = rules.style.get_or_insert_with(Default::default); - let rule = group.use_throw_only_error.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .use_throw_only_error + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "no-undef" => { let group = rules.correctness.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .no_undeclared_variables .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "no-undef-init" => { let group = rules.complexity.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .no_useless_undefined_initialization .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "no-unneeded-ternary" => { let group = rules.complexity.get_or_insert_with(Default::default); - let rule = group.no_useless_ternary.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .no_useless_ternary + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "no-unreachable" => { let group = rules.correctness.get_or_insert_with(Default::default); - let rule = group.no_unreachable.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .no_unreachable + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "no-unsafe-finally" => { let group = rules.correctness.get_or_insert_with(Default::default); - let rule = group.no_unsafe_finally.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .no_unsafe_finally + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "no-unsafe-negation" => { let group = rules.suspicious.get_or_insert_with(Default::default); - let rule = group.no_unsafe_negation.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .no_unsafe_negation + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "no-unsafe-optional-chaining" => { let group = rules.correctness.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .no_unsafe_optional_chaining .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "no-unused-labels" => { let group = rules.correctness.get_or_insert_with(Default::default); - let rule = group.no_unused_labels.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .no_unused_labels + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "no-unused-private-class-members" => { let group = rules.correctness.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .no_unused_private_class_members .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "no-unused-vars" => { let group = rules.correctness.get_or_insert_with(Default::default); - let rule = group.no_unused_variables.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .no_unused_variables + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "no-use-before-define" => { let group = rules.correctness.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .no_invalid_use_before_declaration .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "no-useless-catch" => { let group = rules.complexity.get_or_insert_with(Default::default); - let rule = group.no_useless_catch.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .no_useless_catch + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "no-useless-concat" => { let group = rules.complexity.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .no_useless_string_concat .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "no-useless-constructor" => { let group = rules.complexity.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .no_useless_constructor .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "no-useless-escape" => { if !options.include_nursery { @@ -1321,41 +1750,58 @@ pub(crate) fn migrate_eslint_any_rule( } let group = rules.nursery.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .no_useless_escape_in_regex .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "no-useless-rename" => { let group = rules.complexity.get_or_insert_with(Default::default); - let rule = group.no_useless_rename.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .no_useless_rename + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "no-var" => { - let group = rules.style.get_or_insert_with(Default::default); - let rule = group.no_var.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let group = rules.suspicious.get_or_insert_with(Default::default); + let rule = group + .unwrap_group_as_mut() + .no_var + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "no-void" => { let group = rules.complexity.get_or_insert_with(Default::default); - let rule = group.no_void.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .no_void + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "no-with" => { let group = rules.complexity.get_or_insert_with(Default::default); - let rule = group.no_with.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .no_with + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "one-var" => { let group = rules.style.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .use_single_var_declarator .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "operator-assignment" => { let group = rules.style.get_or_insert_with(Default::default); - let rule = group.use_shorthand_assign.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .use_shorthand_assign + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "prefer-arrow-callback" => { if !options.include_inspired { @@ -1363,69 +1809,94 @@ pub(crate) fn migrate_eslint_any_rule( return false; } let group = rules.complexity.get_or_insert_with(Default::default); - let rule = group.use_arrow_function.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .use_arrow_function + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "prefer-const" => { let group = rules.style.get_or_insert_with(Default::default); - let rule = group.use_const.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .use_const + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "prefer-exponentiation-operator" => { let group = rules.style.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .use_exponentiation_operator .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "prefer-numeric-literals" => { let group = rules.style.get_or_insert_with(Default::default); - let rule = group.use_numeric_literals.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .use_numeric_literals + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "prefer-object-has-own" => { let group = rules.suspicious.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .no_prototype_builtins .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "prefer-regex-literals" => { let group = rules.complexity.get_or_insert_with(Default::default); - let rule = group.use_regex_literals.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .use_regex_literals + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "prefer-rest-params" => { let group = rules.style.get_or_insert_with(Default::default); - let rule = group.no_arguments.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .no_arguments + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "prefer-template" => { let group = rules.style.get_or_insert_with(Default::default); - let rule = group.use_template.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .use_template + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "radix" => { if !options.include_nursery { return false; } let group = rules.nursery.get_or_insert_with(Default::default); - let rule = group.use_parse_int_radix.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .use_parse_int_radix + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "react-hooks/exhaustive-deps" => { let group = rules.correctness.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .use_exhaustive_dependencies .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "react-hooks/rules-of-hooks" => { let group = rules.correctness.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .use_hook_at_top_level .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "react-refresh/only-export-components" => { if !options.include_inspired { @@ -1437,14 +1908,18 @@ pub(crate) fn migrate_eslint_any_rule( } let group = rules.nursery.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .use_component_export_only_modules .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "react/button-has-type" => { let group = rules.a11y.get_or_insert_with(Default::default); - let rule = group.use_button_type.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .use_button_type + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "react/jsx-boolean-value" => { if !options.include_inspired { @@ -1452,8 +1927,11 @@ pub(crate) fn migrate_eslint_any_rule( return false; } let group = rules.style.get_or_insert_with(Default::default); - let rule = group.no_implicit_boolean.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .no_implicit_boolean + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "react/jsx-curly-brace-presence" => { if !options.include_inspired { @@ -1465,115 +1943,154 @@ pub(crate) fn migrate_eslint_any_rule( } let group = rules.nursery.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .use_consistent_curly_braces .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "react/jsx-fragments" => { let group = rules.style.get_or_insert_with(Default::default); - let rule = group.use_fragment_syntax.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .use_fragment_syntax + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "react/jsx-key" => { let group = rules.correctness.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .use_jsx_key_in_iterable .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "react/jsx-no-comment-textnodes" => { let group = rules.suspicious.get_or_insert_with(Default::default); - let rule = group.no_comment_text.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .no_comment_text + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "react/jsx-no-duplicate-props" => { let group = rules.suspicious.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .no_duplicate_jsx_props .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "react/jsx-no-target-blank" => { let group = rules.a11y.get_or_insert_with(Default::default); - let rule = group.no_blank_target.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .no_blank_target + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "react/jsx-no-useless-fragment" => { let group = rules.complexity.get_or_insert_with(Default::default); - let rule = group.no_useless_fragments.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .no_useless_fragments + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "react/no-array-index-key" => { let group = rules.suspicious.get_or_insert_with(Default::default); - let rule = group.no_array_index_key.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .no_array_index_key + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "react/no-children-prop" => { let group = rules.correctness.get_or_insert_with(Default::default); - let rule = group.no_children_prop.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .no_children_prop + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "react/no-danger" => { let group = rules.security.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .no_dangerously_set_inner_html .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "react/no-danger-with-children" => { let group = rules.security.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .no_dangerously_set_inner_html_with_children .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "react/void-dom-elements-no-children" => { let group = rules.correctness.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .no_void_elements_with_children .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "require-await" => { let group = rules.suspicious.get_or_insert_with(Default::default); - let rule = group.use_await.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .use_await + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "require-yield" => { let group = rules.correctness.get_or_insert_with(Default::default); - let rule = group.use_yield.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .use_yield + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "solidjs/no-react-specific-props" => { let group = rules.suspicious.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .no_react_specific_props .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "sonarjs/cognitive-complexity" => { let group = rules.complexity.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .no_excessive_cognitive_complexity .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "sonarjs/prefer-while" => { - let group = rules.style.get_or_insert_with(Default::default); - let rule = group.use_while.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let group = rules.complexity.get_or_insert_with(Default::default); + let rule = group + .unwrap_group_as_mut() + .use_while + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "unicorn/error-message" => { let group = rules.suspicious.get_or_insert_with(Default::default); - let rule = group.use_error_message.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .use_error_message + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "unicorn/explicit-length-check" => { let group = rules.style.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .use_explicit_length_check .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "unicorn/filename-case" => { if !options.include_inspired { @@ -1582,77 +2099,107 @@ pub(crate) fn migrate_eslint_any_rule( } let group = rules.style.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .use_filenaming_convention .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "unicorn/new-for-builtins" => { let group = rules.correctness.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .no_invalid_builtin_instantiation .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "unicorn/no-array-for-each" => { let group = rules.complexity.get_or_insert_with(Default::default); - let rule = group.no_for_each.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .no_for_each + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "unicorn/no-document-cookie" => { if !options.include_nursery { return false; } let group = rules.nursery.get_or_insert_with(Default::default); - let rule = group.no_document_cookie.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .no_document_cookie + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "unicorn/no-for-loop" => { let group = rules.style.get_or_insert_with(Default::default); - let rule = group.use_for_of.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .use_for_of + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "unicorn/no-instanceof-array" => { let group = rules.suspicious.get_or_insert_with(Default::default); - let rule = group.use_is_array.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .use_is_array + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "unicorn/no-lonely-if" => { if !options.include_nursery { return false; } let group = rules.nursery.get_or_insert_with(Default::default); - let rule = group.use_collapsed_if.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .use_collapsed_if + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "unicorn/no-static-only-class" => { let group = rules.complexity.get_or_insert_with(Default::default); - let rule = group.no_static_only_class.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .no_static_only_class + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "unicorn/no-thenable" => { let group = rules.suspicious.get_or_insert_with(Default::default); - let rule = group.no_then_property.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .no_then_property + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "unicorn/no-useless-switch-case" => { let group = rules.complexity.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .no_useless_switch_case .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "unicorn/no-useless-undefined" => { if !options.include_nursery { return false; } let group = rules.nursery.get_or_insert_with(Default::default); - let rule = group.no_useless_undefined.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .no_useless_undefined + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "unicorn/prefer-array-flat-map" => { let group = rules.complexity.get_or_insert_with(Default::default); - let rule = group.use_flat_map.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .use_flat_map + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "unicorn/prefer-at" => { if !options.include_inspired { @@ -1663,13 +2210,19 @@ pub(crate) fn migrate_eslint_any_rule( return false; } let group = rules.nursery.get_or_insert_with(Default::default); - let rule = group.use_at_index.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .use_at_index + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "unicorn/prefer-date-now" => { let group = rules.complexity.get_or_insert_with(Default::default); - let rule = group.use_date_now.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .use_date_now + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "unicorn/prefer-module" => { if !options.include_inspired { @@ -1681,74 +2234,104 @@ pub(crate) fn migrate_eslint_any_rule( } let group = rules.nursery.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .no_global_dirname_filename .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "unicorn/prefer-node-protocol" => { let group = rules.style.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .use_nodejs_import_protocol .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "unicorn/prefer-number-properties" => { let group = rules.style.get_or_insert_with(Default::default); - let rule = group.use_number_namespace.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .use_number_namespace + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "unicorn/prefer-string-slice" => { if !options.include_nursery { return false; } let group = rules.nursery.get_or_insert_with(Default::default); - let rule = group.no_substr.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .no_substr + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "unicorn/prefer-string-trim-start-end" => { if !options.include_nursery { return false; } let group = rules.nursery.get_or_insert_with(Default::default); - let rule = group.use_trim_start_end.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .use_trim_start_end + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "unicorn/require-number-to-fixed-digits-argument" => { let group = rules.suspicious.get_or_insert_with(Default::default); let rule = group + .unwrap_group_as_mut() .use_number_to_fixed_digits_argument .get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + rule.set_level(rule.level().max(rule_severity.into())); } "unicorn/throw-new-error" => { let group = rules.style.get_or_insert_with(Default::default); - let rule = group.use_throw_new_error.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .use_throw_new_error + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "unused-imports/no-unused-imports" => { let group = rules.correctness.get_or_insert_with(Default::default); - let rule = group.no_unused_imports.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .no_unused_imports + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "unused-imports/no-unused-vars" => { let group = rules.correctness.get_or_insert_with(Default::default); - let rule = group.no_unused_variables.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .no_unused_variables + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "use-isnan" => { let group = rules.correctness.get_or_insert_with(Default::default); - let rule = group.use_is_nan.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .use_is_nan + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "valid-typeof" => { let group = rules.suspicious.get_or_insert_with(Default::default); - let rule = group.use_valid_typeof.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .use_valid_typeof + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } "yoda" => { let group = rules.style.get_or_insert_with(Default::default); - let rule = group.no_yoda_expression.get_or_insert(Default::default()); - rule.set_level(rule_severity.into()); + let rule = group + .unwrap_group_as_mut() + .no_yoda_expression + .get_or_insert(Default::default()); + rule.set_level(rule.level().max(rule_severity.into())); } _ => { return false; diff --git a/crates/biome_cli/src/execute/migrate/eslint_eslint.rs b/crates/biome_cli/src/execute/migrate/eslint_eslint.rs index 16f0031ac5f5..59bb19119ed9 100644 --- a/crates/biome_cli/src/execute/migrate/eslint_eslint.rs +++ b/crates/biome_cli/src/execute/migrate/eslint_eslint.rs @@ -1,10 +1,9 @@ use biome_deserialize::{ Deserializable, DeserializableType, DeserializableTypes, DeserializableValue, - DeserializationDiagnostic, DeserializationVisitor, Merge, + DeserializationContext, DeserializationDiagnostic, DeserializationVisitor, Merge, }; use biome_deserialize_macros::Deserializable; use biome_rowan::TextRange; -use indexmap::IndexSet; use rustc_hash::FxHashMap; use std::borrow::Cow; use std::hash::{Hash, Hasher}; @@ -43,9 +42,9 @@ pub(crate) struct FlatConfigData(pub(crate) Vec); #[derive(Debug, Default, Deserializable)] #[deserializable(unknown_fields = "allow")] pub(crate) struct FlatConfigObject { - pub(crate) files: ShorthandVec, + pub(crate) files: Vec>, /// The glob patterns that ignore to lint. - pub(crate) ignores: ShorthandVec, + pub(crate) ignores: Vec>, // using `Option` is important to distinguish a global ignores from a config objerct pub(crate) language_options: Option, // using `Option` is important to distinguish a global ignores from a config objerct @@ -116,27 +115,26 @@ impl Merge for LegacyConfigData { } #[derive(Debug, Default)] -pub(crate) struct IgnorePattern(pub(crate) String); +pub(crate) struct IgnorePattern(pub(crate) Box); impl Deref for IgnorePattern { - type Target = String; + type Target = Box; fn deref(&self) -> &Self::Target { &self.0 } } +impl AsRef for IgnorePattern { + fn as_ref(&self) -> &str { + self.0.as_ref() + } +} impl biome_deserialize::Deserializable for IgnorePattern { fn deserialize( + ctx: &mut impl DeserializationContext, value: &impl DeserializableValue, name: &str, - diagnostics: &mut Vec, ) -> Option { - let s = biome_deserialize::Text::deserialize(value, name, diagnostics)?; - match ignorefile::convert_pattern(s.text()) { - Ok(pattern) => Some(Self(pattern)), - Err(msg) => { - diagnostics.push(DeserializationDiagnostic::new(msg).with_range(value.range())); - None - } - } + let s = biome_deserialize::Text::deserialize(ctx, value, name)?; + Some(Self(ignorefile::convert_pattern(s.text()).into_boxed_str())) } } @@ -149,16 +147,16 @@ pub(crate) struct PluginExport { } #[derive(Debug, Default, Deserializable)] -pub(crate) struct Globals(pub(crate) FxHashMap); +pub(crate) struct Globals(pub(crate) FxHashMap, GlobalConf>); impl Globals { - pub(crate) fn enabled(self) -> impl Iterator { + pub(crate) fn enabled(self) -> impl Iterator> { self.0.into_iter().filter_map(|(global_name, global_conf)| { global_conf.is_enabled().then_some(global_name) }) } } impl Deref for Globals { - type Target = FxHashMap; + type Target = FxHashMap, GlobalConf>; fn deref(&self) -> &Self::Target { &self.0 } @@ -184,14 +182,14 @@ impl GlobalConf { } impl Deserializable for GlobalConf { fn deserialize( + ctx: &mut impl DeserializationContext, value: &impl biome_deserialize::DeserializableValue, name: &str, - diagnostics: &mut Vec, ) -> Option { if value.visitable_type()? == DeserializableType::Str { - Deserializable::deserialize(value, name, diagnostics).map(Self::Qualifier) + Deserializable::deserialize(ctx, value, name).map(Self::Qualifier) } else { - Deserializable::deserialize(value, name, diagnostics).map(Self::Flag) + Deserializable::deserialize(ctx, value, name).map(Self::Flag) } } } @@ -208,12 +206,12 @@ pub(crate) enum GlobalConfQualifier { #[derive(Debug, Default, Deserializable)] #[deserializable(unknown_fields = "allow")] pub(crate) struct OverrideConfigData { - pub(crate) extends: ShorthandVec, + pub(crate) extends: ShorthandVec>, pub(crate) globals: Globals, /// The glob patterns for excluded files. - pub(crate) excluded_files: ShorthandVec, + pub(crate) excluded_files: ShorthandVec>, /// The glob patterns for target files. - pub(crate) files: ShorthandVec, + pub(crate) files: ShorthandVec>, pub(crate) rules: Rules, } @@ -240,11 +238,6 @@ impl DerefMut for ShorthandVec { &mut self.0 } } -impl FromIterator for ShorthandVec { - fn from_iter>(iter: I) -> Self { - Self(iter.into_iter().collect()) - } -} impl IntoIterator for ShorthandVec { type Item = T; type IntoIter = vec::IntoIter; @@ -254,20 +247,15 @@ impl IntoIterator for ShorthandVec { } impl Deserializable for ShorthandVec { fn deserialize( + ctx: &mut impl DeserializationContext, value: &impl DeserializableValue, name: &str, - diagnostics: &mut Vec, ) -> Option { Some(ShorthandVec( if value.visitable_type()? == DeserializableType::Array { - >>::deserialize(value, name, diagnostics)? - .into_iter() - .flatten() - .collect() + Deserializable::deserialize(ctx, value, name)? } else { - >::deserialize(value, name, diagnostics)? - .into_iter() - .collect() + Vec::from_iter([Deserializable::deserialize(ctx, value, name)?]) }, )) } @@ -316,9 +304,9 @@ impl RuleConf { } impl Deserializable for RuleConf { fn deserialize( + ctx: &mut impl DeserializationContext, value: &impl biome_deserialize::DeserializableValue, name: &str, - diagnostics: &mut Vec, ) -> Option { struct Visitor(PhantomData<(T, U)>); impl DeserializationVisitor @@ -328,26 +316,26 @@ impl Deserializable fo const EXPECTED_TYPE: DeserializableTypes = DeserializableTypes::ARRAY; fn visit_array( self, + ctx: &mut impl DeserializationContext, values: impl Iterator>, range: TextRange, _name: &str, - diagnostics: &mut Vec, ) -> Option { let mut values = values.flatten(); let Some(first_value) = values.next() else { - diagnostics.push( + ctx.report( DeserializationDiagnostic::new("A severity is expected.").with_range(range), ); return None; }; - let severity = Deserializable::deserialize(&first_value, "", diagnostics)?; + let severity = Deserializable::deserialize(ctx, &first_value, "")?; if TypeId::of::() == TypeId::of::<()>() { return Some(RuleConf::Severity(severity)); } let Some(second_value) = values.next() else { return Some(RuleConf::Severity(severity)); }; - let Some(option) = T::deserialize(&second_value, "", diagnostics) else { + let Some(option) = T::deserialize(ctx, &second_value, "") else { // Recover by ignoring the failed deserialization return Some(RuleConf::Severity(severity)); }; @@ -355,21 +343,21 @@ impl Deserializable fo return Some(RuleConf::Option(severity, option)); }; if TypeId::of::() != TypeId::of::<()>() { - if let Some(option2) = U::deserialize(&third_value, "", diagnostics) { + if let Some(option2) = U::deserialize(ctx, &third_value, "") { return Some(RuleConf::Options(severity, option, option2)); } else { // Recover by ignoring the failed deserialization return Some(RuleConf::Option(severity, option)); } } - let Some(option2) = T::deserialize(&third_value, "", diagnostics) else { + let Some(option2) = T::deserialize(ctx, &third_value, "") else { // Recover by ignoring the failed deserialization return Some(RuleConf::Option(severity, option)); }; let mut spread = Vec::new(); spread.push(option); spread.push(option2); - spread.extend(values.filter_map(|val| T::deserialize(&val, "", diagnostics))); + spread.extend(values.filter_map(|val| T::deserialize(ctx, &val, ""))); Some(RuleConf::Spread(severity, spread)) } } @@ -377,9 +365,9 @@ impl Deserializable fo value.visitable_type()?, DeserializableType::Number | DeserializableType::Str ) { - Deserializable::deserialize(value, name, diagnostics).map(RuleConf::Severity) + Deserializable::deserialize(ctx, value, name).map(RuleConf::Severity) } else { - value.deserialize(Visitor(PhantomData), name, diagnostics) + value.deserialize(ctx, Visitor(PhantomData), name) } } } @@ -427,42 +415,36 @@ enum NumberOrString { } impl Deserializable for NumberOrString { fn deserialize( + ctx: &mut impl DeserializationContext, value: &impl biome_deserialize::DeserializableValue, name: &str, - diagnostics: &mut Vec, ) -> Option { Some(if value.visitable_type()? == DeserializableType::Str { - Self::String(Deserializable::deserialize(value, name, diagnostics)?) + Self::String(Deserializable::deserialize(ctx, value, name)?) } else { - Self::Number(Deserializable::deserialize(value, name, diagnostics)?) + Self::Number(Deserializable::deserialize(ctx, value, name)?) }) } } #[derive(Debug, Default)] -pub(crate) struct Rules( - // We use `IndexSet` instead of `HashSet` to preserve the order. - // Keeping the order is important because several ESLint rules can have - // the same equivalent Biome rule. - // The severity level of the last one is thus used. - pub(crate) IndexSet, -); +pub(crate) struct Rules(pub(crate) rustc_hash::FxHashSet); impl Merge for Rules { fn merge_with(&mut self, other: Self) { self.0.extend(other.0); } } impl Deref for Rules { - type Target = IndexSet; + type Target = rustc_hash::FxHashSet; fn deref(&self) -> &Self::Target { &self.0 } } impl Deserializable for Rules { fn deserialize( + ctx: &mut impl DeserializationContext, value: &impl biome_deserialize::DeserializableValue, name: &str, - diagnostics: &mut Vec, ) -> Option { struct Visitor; impl DeserializationVisitor for Visitor { @@ -470,6 +452,7 @@ impl Deserializable for Rules { const EXPECTED_TYPE: DeserializableTypes = DeserializableTypes::MAP; fn visit_map( self, + ctx: &mut impl DeserializationContext, members: impl Iterator< Item = Option<( impl biome_deserialize::DeserializableValue, @@ -478,57 +461,54 @@ impl Deserializable for Rules { >, _range: biome_rowan::TextRange, name: &str, - diagnostics: &mut Vec, ) -> Option { use biome_deserialize::Text; - let mut result = IndexSet::default(); + let mut result = rustc_hash::FxHashSet::default(); for (key, value) in members.flatten() { - let Some(rule_name) = Text::deserialize(&key, "", diagnostics) else { + let Some(rule_name) = Text::deserialize(ctx, &key, "") else { continue; }; match rule_name.text() { // Eslint rules with options that we handle "no-console" => { - if let Some(conf) = RuleConf::deserialize(&value, name, diagnostics) { + if let Some(conf) = RuleConf::deserialize(ctx, &value, name) { result.insert(Rule::NoConsole(conf)); } } "no-restricted-globals" => { - if let Some(conf) = RuleConf::deserialize(&value, name, diagnostics) { + if let Some(conf) = RuleConf::deserialize(ctx, &value, name) { result.insert(Rule::NoRestrictedGlobals(conf)); } } // Eslint plugin rules with options that we handle "jsx-a11y/aria-role" => { - if let Some(conf) = RuleConf::deserialize(&value, name, diagnostics) { + if let Some(conf) = RuleConf::deserialize(ctx, &value, name) { result.insert(Rule::Jsxa11yArioaRoles(conf)); } } "@typescript-eslint/array-type" => { - if let Some(conf) = RuleConf::deserialize(&value, name, diagnostics) { + if let Some(conf) = RuleConf::deserialize(ctx, &value, name) { result.insert(Rule::TypeScriptArrayType(conf)); } } "@typescript-eslint/explicit-member-accessibility" => { - if let Some(conf) = RuleConf::deserialize(&value, name, diagnostics) { + if let Some(conf) = RuleConf::deserialize(ctx, &value, name) { result.insert(Rule::TypeScriptExplicitMemberAccessibility(conf)); } } "@typescript-eslint/naming-convention" => { - if let Some(conf) = RuleConf::deserialize(&value, name, diagnostics) { + if let Some(conf) = RuleConf::deserialize(ctx, &value, name) { result.insert(Rule::TypeScriptNamingConvention(conf)); } } "unicorn/filename-case" => { - if let Some(conf) = RuleConf::deserialize(&value, name, diagnostics) { + if let Some(conf) = RuleConf::deserialize(ctx, &value, name) { result.insert(Rule::UnicornFilenameCase(conf)); } } // Other rules rule_name => { - if let Some(conf) = - RuleConf::<()>::deserialize(&value, name, diagnostics) - { + if let Some(conf) = RuleConf::<()>::deserialize(ctx, &value, name) { result.insert(Rule::Any( Cow::Owned(rule_name.to_string()), conf.severity(), @@ -540,7 +520,7 @@ impl Deserializable for Rules { Some(Rules(result)) } } - value.deserialize(Visitor, name, diagnostics) + value.deserialize(ctx, Visitor, name) } } @@ -570,15 +550,14 @@ impl NoRestrictedGlobal { } impl Deserializable for NoRestrictedGlobal { fn deserialize( + ctx: &mut impl DeserializationContext, value: &impl DeserializableValue, name: &str, - diagnostics: &mut Vec, ) -> Option { if value.visitable_type()? == DeserializableType::Str { - Deserializable::deserialize(value, name, diagnostics).map(NoRestrictedGlobal::Plain) + Deserializable::deserialize(ctx, value, name).map(NoRestrictedGlobal::Plain) } else { - Deserializable::deserialize(value, name, diagnostics) - .map(NoRestrictedGlobal::WithMessage) + Deserializable::deserialize(ctx, value, name).map(NoRestrictedGlobal::WithMessage) } } } diff --git a/crates/biome_cli/src/execute/migrate/eslint_to_biome.rs b/crates/biome_cli/src/execute/migrate/eslint_to_biome.rs index 1ac08837eaa5..5937009d2aee 100644 --- a/crates/biome_cli/src/execute/migrate/eslint_to_biome.rs +++ b/crates/biome_cli/src/execute/migrate/eslint_to_biome.rs @@ -1,5 +1,6 @@ +use biome_configuration::analyzer::SeverityOrGroup; use biome_configuration::{self as biome_config}; -use biome_deserialize::{Merge, StringSet}; +use biome_deserialize::Merge; use biome_js_analyze::lint::style::no_restricted_globals; use super::{eslint_any_rule_to_biome::migrate_eslint_any_rule, eslint_eslint, eslint_typescript}; @@ -30,7 +31,7 @@ impl eslint_eslint::AnyConfigData { pub(crate) fn into_biome_config( self, options: &MigrationOptions, - ) -> (biome_config::PartialConfiguration, MigrationResults) { + ) -> (biome_config::Configuration, MigrationResults) { match self { Self::Flat(config) => config.into_biome_config(options), Self::Legacy(config) => config.into_biome_config(options), @@ -42,10 +43,10 @@ impl eslint_eslint::FlatConfigData { pub(crate) fn into_biome_config( self, options: &MigrationOptions, - ) -> (biome_config::PartialConfiguration, MigrationResults) { + ) -> (biome_config::Configuration, MigrationResults) { let mut results = MigrationResults::default(); - let mut biome_config = biome_config::PartialConfiguration::default(); - let mut linter = biome_config::PartialLinterConfiguration::default(); + let mut biome_config = biome_config::Configuration::default(); + let mut linter = biome_config::LinterConfiguration::default(); let mut overrides = biome_config::Overrides::default(); let global_config_object = if self.0.len() == 1 { // If there is a single config object, then we use it as the global config @@ -62,20 +63,19 @@ impl eslint_eslint::FlatConfigData { } else { let mut override_pat = biome_config::OverridePattern::default(); if let Some(language_options) = flat_config_object.language_options { - let globals = language_options.globals.enabled().collect::(); - let js_config = biome_config::PartialJavascriptConfiguration { + let globals = language_options + .globals + .enabled() + .collect::>(); + let js_config = biome_config::JsConfiguration { globals: Some(globals), ..Default::default() }; override_pat.javascript = Some(js_config) } - if !flat_config_object.ignores.is_empty() { - override_pat.ignore = - Some(flat_config_object.ignores.into_iter().collect()); - } - if !flat_config_object.files.is_empty() { - override_pat.include = Some(flat_config_object.files.into_iter().collect()); - } + let includes = + to_biome_includes(&flat_config_object.files, &flat_config_object.ignores); + override_pat.includes = (!includes.is_empty()).then_some(includes); if let Some(rules) = flat_config_object.rules { if !rules.is_empty() { override_pat.linter = Some(biome_config::OverrideLinterConfiguration { @@ -98,8 +98,11 @@ impl eslint_eslint::FlatConfigData { biome_config::Rules::default() }; if let Some(language_options) = global_config_object.language_options { - let globals = language_options.globals.enabled().collect::(); - let js_config = biome_config::PartialJavascriptConfiguration { + let globals = language_options + .globals + .enabled() + .collect::>(); + let js_config = biome_config::JsConfiguration { globals: Some(globals), ..Default::default() }; @@ -107,12 +110,9 @@ impl eslint_eslint::FlatConfigData { } rules.recommended = Some(false); linter.rules = Some(rules); - if !global_config_object.ignores.is_empty() { - linter.ignore = Some(global_config_object.ignores.into_iter().collect()); - } - if !global_config_object.files.is_empty() { - linter.include = Some(global_config_object.files.into_iter().collect()); - } + let includes = + to_biome_includes(&global_config_object.files, &global_config_object.ignores); + linter.includes = (!includes.is_empty()).then_some(includes); biome_config.linter = Some(linter); (biome_config, results) } @@ -122,48 +122,40 @@ impl eslint_eslint::LegacyConfigData { pub(crate) fn into_biome_config( self, options: &MigrationOptions, - ) -> (biome_config::PartialConfiguration, MigrationResults) { + ) -> (biome_config::Configuration, MigrationResults) { let mut results = MigrationResults::default(); - let mut biome_config = biome_config::PartialConfiguration::default(); + let mut biome_config = biome_config::Configuration::default(); if !self.globals.is_empty() { - let globals = self.globals.enabled().collect::(); - let js_config = biome_config::PartialJavascriptConfiguration { + let globals = self.globals.enabled().collect::>(); + let js_config = biome_config::JsConfiguration { globals: Some(globals), ..Default::default() }; biome_config.javascript = Some(js_config) } - let mut linter = biome_config::PartialLinterConfiguration::default(); + let mut linter = biome_config::LinterConfiguration::default(); let mut rules = self.rules.into_biome_rules(options, &mut results); rules.recommended = Some(false); linter.rules = Some(rules); - if !self.ignore_patterns.is_empty() { - let ignore = self - .ignore_patterns - .into_iter() - .map(|p| p.0) - .collect::(); - linter.ignore = Some(ignore); - } + let includes = to_biome_includes(&[] as &[&str], self.ignore_patterns.as_slice()); + linter.includes = (!includes.is_empty()).then_some(includes); if !self.overrides.is_empty() { let mut overrides = biome_config::Overrides::default(); for override_elt in self.overrides { let mut override_pattern = biome_config::OverridePattern::default(); if !override_elt.globals.is_empty() { - let globals = override_elt.globals.enabled().collect::(); - let js_config = biome_config::PartialJavascriptConfiguration { + let globals = override_elt + .globals + .enabled() + .collect::>(); + let js_config = biome_config::JsConfiguration { globals: Some(globals), ..Default::default() }; override_pattern.javascript = Some(js_config) } - if !override_elt.excluded_files.is_empty() { - override_pattern.ignore = - Some(override_elt.excluded_files.into_iter().collect()); - } - if !override_elt.files.is_empty() { - override_pattern.include = Some(override_elt.files.into_iter().collect()); - } + let includes = to_biome_includes(&override_elt.files, &override_elt.excluded_files); + override_pattern.includes = (!includes.is_empty()).then_some(includes); if !override_elt.rules.is_empty() { override_pattern.linter = Some(biome_config::OverrideLinterConfiguration { rules: Some(override_elt.rules.into_biome_rules(options, &mut results)), @@ -211,13 +203,15 @@ fn migrate_eslint_rule( if migrate_eslint_any_rule(rules, &name, conf.severity(), opts, results) { if let eslint_eslint::RuleConf::Option(severity, rule_options) = conf { let group = rules.suspicious.get_or_insert_with(Default::default); - group.no_console = Some(biome_config::RuleFixConfiguration::WithOptions( - biome_config::RuleWithFixOptions { - level: severity.into(), - fix: None, - options: Box::new((*rule_options).into()), - }, - )); + if let SeverityOrGroup::Group(group) = group { + group.no_console = Some(biome_config::RuleFixConfiguration::WithOptions( + biome_config::RuleWithFixOptions { + level: severity.into(), + fix: None, + options: Box::new((*rule_options).into()), + }, + )); + } } } } @@ -229,28 +223,37 @@ fn migrate_eslint_rule( .into_iter() .map(|g| g.into_name().into_boxed_str()); let group = rules.style.get_or_insert_with(Default::default); - group.no_restricted_globals = Some(biome_config::RuleConfiguration::WithOptions( - biome_config::RuleWithOptions { - level: severity.into(), - options: Box::new(no_restricted_globals::RestrictedGlobalsOptions { - denied_globals: globals.collect::>().into_boxed_slice(), - }), - }, - )); + if let SeverityOrGroup::Group(group) = group { + group.no_restricted_globals = + Some(biome_config::RuleConfiguration::WithOptions( + biome_config::RuleWithOptions { + level: severity.into(), + options: Box::new( + no_restricted_globals::RestrictedGlobalsOptions { + denied_globals: globals + .collect::>() + .into_boxed_slice(), + }, + ), + }, + )); + } } } eslint_eslint::Rule::Jsxa11yArioaRoles(conf) => { if migrate_eslint_any_rule(rules, &name, conf.severity(), opts, results) { if let eslint_eslint::RuleConf::Option(severity, rule_options) = conf { let group = rules.a11y.get_or_insert_with(Default::default); - group.use_valid_aria_role = - Some(biome_config::RuleFixConfiguration::WithOptions( - biome_config::RuleWithFixOptions { - level: severity.into(), - fix: None, - options: Box::new((*rule_options).into()), - }, - )); + if let SeverityOrGroup::Group(group) = group { + group.use_valid_aria_role = + Some(biome_config::RuleFixConfiguration::WithOptions( + biome_config::RuleWithFixOptions { + level: severity.into(), + fix: None, + options: Box::new((*rule_options).into()), + }, + )); + } } } } @@ -258,14 +261,16 @@ fn migrate_eslint_rule( if migrate_eslint_any_rule(rules, &name, conf.severity(), opts, results) { if let eslint_eslint::RuleConf::Option(severity, rule_options) = conf { let group = rules.style.get_or_insert_with(Default::default); - group.use_consistent_array_type = - Some(biome_config::RuleFixConfiguration::WithOptions( - biome_config::RuleWithFixOptions { - level: severity.into(), - fix: None, - options: rule_options.into(), - }, - )); + if let SeverityOrGroup::Group(group) = group { + group.use_consistent_array_type = + Some(biome_config::RuleFixConfiguration::WithOptions( + biome_config::RuleWithFixOptions { + level: severity.into(), + fix: None, + options: rule_options.into(), + }, + )); + } } } } @@ -273,13 +278,15 @@ fn migrate_eslint_rule( if migrate_eslint_any_rule(rules, &name, conf.severity(), opts, results) { if let eslint_eslint::RuleConf::Option(severity, rule_options) = conf { let group = rules.nursery.get_or_insert_with(Default::default); - group.use_consistent_member_accessibility = - Some(biome_config::RuleConfiguration::WithOptions( - biome_config::RuleWithOptions { - level: severity.into(), - options: rule_options.into(), - }, - )); + if let SeverityOrGroup::Group(group) = group { + group.use_consistent_member_accessibility = + Some(biome_config::RuleConfiguration::WithOptions( + biome_config::RuleWithOptions { + level: severity.into(), + options: rule_options.into(), + }, + )); + } } } } @@ -290,30 +297,61 @@ fn migrate_eslint_rule( conf.into_vec().into_iter().map(|v| *v), ); let group = rules.style.get_or_insert_with(Default::default); - group.use_naming_convention = - Some(biome_config::RuleFixConfiguration::WithOptions( - biome_config::RuleWithFixOptions { - level: severity.into(), - fix: None, - options: options.into(), - }, - )); + if let SeverityOrGroup::Group(group) = group { + group.use_naming_convention = + Some(biome_config::RuleFixConfiguration::WithOptions( + biome_config::RuleWithFixOptions { + level: severity.into(), + fix: None, + options: options.into(), + }, + )); + } } } eslint_eslint::Rule::UnicornFilenameCase(conf) => { if migrate_eslint_any_rule(rules, &name, conf.severity(), opts, results) { let group = rules.style.get_or_insert_with(Default::default); - group.use_filenaming_convention = Some( - biome_config::RuleConfiguration::WithOptions(biome_config::RuleWithOptions { - level: conf.severity().into(), - options: Box::new(conf.option_or_default().into()), - }), - ); + if let SeverityOrGroup::Group(group) = group { + group.use_filenaming_convention = + Some(biome_config::RuleConfiguration::WithOptions( + biome_config::RuleWithOptions { + level: conf.severity().into(), + options: Box::new(conf.option_or_default().into()), + }, + )); + } } } } } +fn to_biome_includes( + files: &[impl AsRef], + ignores: &[impl AsRef], +) -> Vec { + let mut includes = Vec::new(); + if !files.is_empty() { + includes.extend(files.iter().filter_map(|glob| glob.as_ref().parse().ok())); + } else if let Ok(glob) = "**".parse() { + includes.push(glob); + } + if !ignores.is_empty() { + includes.extend(ignores.iter().filter_map(|glob| { + // ESLint supports negation: https://eslint.org/docs/latest/use/configure/ignore#unignoring-files-and-directories + if let Some(rest) = glob.as_ref().strip_prefix('!') { + rest.parse() + } else { + glob.as_ref() + .parse() + .map(|glob: biome_glob::Glob| glob.negated()) + } + .ok() + })); + } + includes +} + #[cfg(test)] mod tests { use super::*; @@ -323,8 +361,8 @@ mod tests { #[test] fn flat_config_single_config_object() { let flat_config = FlatConfigData(vec![FlatConfigObject { - files: ["*.js".to_string()].into_iter().collect(), - ignores: ["*.test.js".to_string()].into_iter().collect(), + files: vec!["*.js".into()], + ignores: vec!["*.test.js".into()], language_options: None, rules: Some(Rules( [Rule::Any(Cow::Borrowed("eqeqeq"), Severity::Error)] @@ -337,15 +375,11 @@ mod tests { assert!(biome_config.files.is_none()); assert!(biome_config.overrides.is_none()); assert!(biome_config.formatter.is_none()); - assert!(biome_config.organize_imports.is_none()); + assert!(biome_config.assist.is_none()); let linter = biome_config.linter.unwrap(); assert_eq!( - linter.include, - Some(["*.js".to_string()].into_iter().collect()) - ); - assert_eq!( - linter.ignore, - Some(["*.test.js".to_string()].into_iter().collect()) + linter.includes.unwrap(), + ["*.js".parse().unwrap(), "!*.test.js".parse().unwrap()], ); assert!(linter.rules.is_some()); } @@ -354,14 +388,14 @@ mod tests { fn flat_config_multiple_config_object() { let flat_config = FlatConfigData(vec![ FlatConfigObject { - files: ShorthandVec::default(), - ignores: ["*.test.js".to_string()].into_iter().collect(), + files: vec![], + ignores: vec!["*.test.js".into()], language_options: None, rules: None, }, FlatConfigObject { - files: ShorthandVec::default(), - ignores: ShorthandVec::default(), + files: vec![], + ignores: vec![], language_options: None, rules: Some(Rules( [Rule::Any(Cow::Borrowed("eqeqeq"), Severity::Error)] @@ -370,14 +404,14 @@ mod tests { )), }, FlatConfigObject { - files: ShorthandVec::default(), - ignores: ["*.spec.js".to_string()].into_iter().collect(), + files: vec![], + ignores: vec!["*.spec.js".into()], language_options: None, rules: None, }, FlatConfigObject { - files: ["*.ts".to_string()].into_iter().collect(), - ignores: ShorthandVec::default(), + files: vec!["*.ts".into()], + ignores: vec![], language_options: None, rules: Some(Rules( [Rule::Any(Cow::Borrowed("eqeqeq"), Severity::Off)] @@ -390,19 +424,24 @@ mod tests { assert!(biome_config.files.is_none()); assert!(biome_config.formatter.is_none()); - assert!(biome_config.organize_imports.is_none()); + assert!(biome_config.assist.is_none()); let linter = biome_config.linter.unwrap(); - assert!(linter.include.is_none()); assert_eq!( - linter.ignore, - Some( - ["*.test.js".to_string(), "*.spec.js".to_string()] - .into_iter() - .collect() - ) + linter.includes.unwrap(), + [ + "**".parse().unwrap(), + "!*.test.js".parse().unwrap(), + "!*.spec.js".parse().unwrap() + ] ); assert_eq!( - linter.rules.unwrap().suspicious.unwrap().no_double_equals, + linter + .rules + .unwrap() + .suspicious + .unwrap() + .unwrap_group() + .no_double_equals, Some(biome_config::RuleFixConfiguration::Plain( biome_config::RulePlainConfiguration::Error )) @@ -410,11 +449,7 @@ mod tests { let overrides = biome_config.overrides.unwrap(); assert_eq!(overrides.0.len(), 1); let override0 = overrides.0.into_iter().next().unwrap(); - assert_eq!( - override0.include, - Some(["*.ts".to_string()].into_iter().collect()) - ); - assert!(override0.ignore.is_none()); + assert_eq!(override0.includes.unwrap(), ["*.ts".parse().unwrap()],); assert_eq!( override0 .linter @@ -423,6 +458,7 @@ mod tests { .unwrap() .suspicious .unwrap() + .unwrap_group() .no_double_equals, Some(biome_config::RuleFixConfiguration::Plain( biome_config::RulePlainConfiguration::Off diff --git a/crates/biome_cli/src/execute/migrate/eslint_typescript.rs b/crates/biome_cli/src/execute/migrate/eslint_typescript.rs index e79f177688ab..7faa7091ef40 100644 --- a/crates/biome_cli/src/execute/migrate/eslint_typescript.rs +++ b/crates/biome_cli/src/execute/migrate/eslint_typescript.rs @@ -499,9 +499,9 @@ impl NamingConventionSelection { pub(crate) struct Anything; impl Deserializable for Anything { fn deserialize( + _ctx: &mut impl biome_deserialize::DeserializationContext, _value: &impl biome_deserialize::DeserializableValue, _name: &str, - _diagnostics: &mut Vec, ) -> Option { Some(Anything) } diff --git a/crates/biome_cli/src/execute/migrate/ignorefile.rs b/crates/biome_cli/src/execute/migrate/ignorefile.rs index 8262bb5fa00e..0bf7ceb39071 100644 --- a/crates/biome_cli/src/execute/migrate/ignorefile.rs +++ b/crates/biome_cli/src/execute/migrate/ignorefile.rs @@ -1,17 +1,15 @@ -use std::{io, path::Path}; - use biome_fs::{FileSystem, OpenOptions}; -use biome_service::DynRef; -use indexmap::IndexSet; +use camino::Utf8Path; +use std::io; /// Read an ignore file that follows gitignore pattern syntax, /// and turn them into a list of UNIX glob patterns. pub(crate) fn read_ignore_file( - fs: &DynRef<'_, dyn FileSystem>, + fs: &dyn FileSystem, ignore_filename: &str, ) -> io::Result { let mut file = fs.open_with_options( - Path::new(ignore_filename), + Utf8Path::new(ignore_filename), OpenOptions::default().read(true), )?; let mut content = String::new(); @@ -21,13 +19,14 @@ pub(crate) fn read_ignore_file( #[derive(Debug)] pub(crate) struct IgnorePatterns { - pub(crate) patterns: IndexSet, - pub(crate) has_negated_patterns: bool, + pub(crate) patterns: Box<[biome_glob::Glob]>, } impl IgnorePatterns { pub(crate) fn from(content: &str) -> Self { - let mut has_negated_patterns = false; - let mut patterns = IndexSet::new(); + let mut patterns = Vec::new(); + if let Ok(glob) = "**".parse() { + patterns.push(glob); + } for line in content.lines() { // Trailing spaces are ignored let line = line.trim_end(); @@ -35,42 +34,35 @@ impl IgnorePatterns { if line.is_empty() || line.starts_with('#') { continue; } - match convert_pattern(line) { - Ok(pattern) => { - patterns.insert(pattern); - } - Err(_) => { - has_negated_patterns = true; - // Skip negated patterns because we don't support them. - continue; - } + if let Ok(glob) = convert_pattern(line).parse() { + patterns.push(glob); } } IgnorePatterns { - patterns, - has_negated_patterns, + patterns: patterns.into_boxed_slice(), } } } -pub(crate) fn convert_pattern(line: &str) -> Result { - if line.starts_with('!') { - // Skip negated patterns because we don't support them. - return Err("Negated patterns are not supported."); - } +pub(crate) fn convert_pattern(line: &str) -> String { + let (negation, line) = if let Some(rest) = line.strip_prefix('!') { + ("", rest) + } else { + ("!", line) + }; let result = if let Some(stripped_line) = line.strip_prefix('/') { - // Patterns tha tstarts with `/` are relative to the ignore file - format!("./{stripped_line}") + // Patterns that starts with `/` are relative to the ignore file + format!("{negation}./{stripped_line}") } else if line.find('/').is_some_and(|index| index < (line.len() - 1)) || line == "**" || line == "**/" { // Patterns that includes at least one `/` in the middle are relatives paths - line.to_string() + format!("{negation}{line}") } else { - format!("**/{line}") + format!("{negation}**/{line}") }; - Ok(result) + result } #[cfg(test)] @@ -82,8 +74,7 @@ mod tests { const IGNORE_FILE_CONTENT: &str = r#""#; let result = IgnorePatterns::from(IGNORE_FILE_CONTENT); - assert!(!result.has_negated_patterns); - assert!(result.patterns.is_empty()); + assert_eq!(result.patterns.as_ref(), ["**".parse().unwrap(),]); } #[test] @@ -91,15 +82,14 @@ mod tests { const IGNORE_FILE_CONTENT: &str = r#" # Comment 1 # folloed by a blank line - + # Comment 2 # folloed by a blank line (trailing space are ignored) "#; let result = IgnorePatterns::from(IGNORE_FILE_CONTENT); - assert!(!result.has_negated_patterns); - assert!(result.patterns.is_empty()); + assert_eq!(result.patterns.as_ref(), ["**".parse().unwrap(),]); } #[test] @@ -114,18 +104,17 @@ dir/ "#; let result = IgnorePatterns::from(IGNORE_FILE_CONTENT); - assert!(!result.has_negated_patterns); assert_eq!( - result.patterns, + result.patterns.as_ref(), [ - "**/file-or-dir".to_string(), - "**/dir/".to_string(), - "**".to_string(), - "**/".to_string(), - "**/*".to_string(), - "**/*/".to_string(), + "**".parse().unwrap(), + "!**/file-or-dir".parse().unwrap(), + "!**/dir/".parse().unwrap(), + "!**".parse().unwrap(), + "!**/".parse().unwrap(), + "!**/*".parse().unwrap(), + "!**/*/".parse().unwrap(), ] - .into() ); } @@ -141,18 +130,17 @@ dir/subdir/ "#; let result = IgnorePatterns::from(IGNORE_FILE_CONTENT); - assert!(!result.has_negated_patterns); assert_eq!( - result.patterns, + result.patterns.as_ref(), [ - "dir/dubfile-or-subdir".to_string(), - "dir/subdir/".to_string(), - "**/*".to_string(), - "**/*/".to_string(), - "**/a/b".to_string(), - "**/a/b/".to_string(), + "**".parse().unwrap(), + "!dir/dubfile-or-subdir".parse().unwrap(), + "!dir/subdir/".parse().unwrap(), + "!**/*".parse().unwrap(), + "!**/*/".parse().unwrap(), + "!**/a/b".parse().unwrap(), + "!**/a/b/".parse().unwrap(), ] - .into() ); } @@ -168,18 +156,17 @@ dir/subdir/ "#; let result = IgnorePatterns::from(IGNORE_FILE_CONTENT); - assert!(!result.has_negated_patterns); assert_eq!( - result.patterns, + result.patterns.as_ref(), [ - "./dir/dubfile-or-subdir".to_string(), - "./dir/subdir/".to_string(), - "./**/*".to_string(), - "./**/*/".to_string(), - "./**/a/b".to_string(), - "./**/a/b/".to_string(), + "**".parse().unwrap(), + "!./dir/dubfile-or-subdir".parse().unwrap(), + "!./dir/subdir/".parse().unwrap(), + "!./**/*".parse().unwrap(), + "!./**/*/".parse().unwrap(), + "!./**/a/b".parse().unwrap(), + "!./**/a/b/".parse().unwrap(), ] - .into() ); } @@ -188,8 +175,10 @@ dir/subdir/ const IGNORE_FILE_CONTENT: &str = r#"!a"#; let result = IgnorePatterns::from(IGNORE_FILE_CONTENT); - assert!(result.has_negated_patterns); - assert!(result.patterns.is_empty()); + assert_eq!( + result.patterns.as_ref(), + ["**".parse().unwrap(), "**/a".parse().unwrap(),] + ); } #[test] @@ -199,11 +188,14 @@ dir/subdir/ "#; let result = IgnorePatterns::from(IGNORE_FILE_CONTENT); - assert!(!result.has_negated_patterns); assert_eq!( - result.patterns, - ["**/ # This is not a comment because there is some leading spaces".to_string()] - .into() + result.patterns.as_ref(), + [ + "**".parse().unwrap(), + "!**/ # This is not a comment because there is some leading spaces" + .parse() + .unwrap(), + ] ); } } diff --git a/crates/biome_cli/src/execute/migrate/prettier.rs b/crates/biome_cli/src/execute/migrate/prettier.rs index eae4d3231e91..e500be7568b9 100644 --- a/crates/biome_cli/src/execute/migrate/prettier.rs +++ b/crates/biome_cli/src/execute/migrate/prettier.rs @@ -1,7 +1,9 @@ +use super::{eslint_eslint::ShorthandVec, node}; use crate::diagnostics::MigrationDiagnostic; use crate::CliDiagnostic; +use biome_configuration::javascript::JsFormatterConfiguration; use biome_console::{markup, Console, ConsoleExt}; -use biome_deserialize::{json::deserialize_from_json_str, StringSet}; +use biome_deserialize::json::deserialize_from_json_str; use biome_deserialize_macros::Deserializable; use biome_diagnostics::{DiagnosticExt, PrintDiagnostic}; use biome_formatter::{ @@ -11,10 +13,7 @@ use biome_formatter::{ use biome_fs::{FileSystem, OpenOptions}; use biome_js_formatter::context::{ArrowParentheses, QuoteProperties, Semicolons, TrailingCommas}; use biome_json_parser::JsonParserOptions; -use biome_service::DynRef; -use std::{ffi::OsStr, path::Path}; - -use super::{eslint_eslint::ShorthandVec, node}; +use camino::Utf8Path; #[derive(Debug, Default, Deserializable)] #[deserializable(unknown_fields = "allow")] @@ -187,10 +186,10 @@ impl From for QuoteProperties { } } -impl TryFrom for biome_configuration::PartialConfiguration { +impl TryFrom for biome_configuration::Configuration { type Error = ParseFormatNumberError; fn try_from(value: PrettierConfiguration) -> Result { - let mut result = biome_configuration::PartialConfiguration::default(); + let mut result = biome_configuration::Configuration::default(); let line_width = LineWidth::try_from(value.print_width)?; let indent_width = IndentWidth::try_from(value.tab_width)?; @@ -199,21 +198,21 @@ impl TryFrom for biome_configuration::PartialConfiguratio } else { biome_formatter::IndentStyle::Space }; - let formatter = biome_configuration::PartialFormatterConfiguration { + let formatter = biome_configuration::FormatterConfiguration { indent_width: Some(indent_width), line_width: Some(line_width), indent_style: Some(indent_style), line_ending: Some(value.end_of_line.into()), + bracket_same_line: Some(value.bracket_line.into()), attribute_position: Some(AttributePosition::default()), - format_with_errors: Some(false), + format_with_errors: Some(false.into()), ignore: None, include: None, - enabled: Some(true), + includes: None, + enabled: Some(true.into()), // editorconfig support is intentionally set to true, because prettier always reads the editorconfig file // see: https://github.com/prettier/prettier/issues/15255 - use_editorconfig: Some(true), - // deprecated - indent_size: None, + use_editorconfig: Some(true.into()), bracket_spacing: Some(BracketSpacing::default()), }; result.formatter = Some(formatter); @@ -233,29 +232,24 @@ impl TryFrom for biome_configuration::PartialConfiguratio } else { QuoteStyle::Double }; - let js_formatter = biome_configuration::PartialJavascriptFormatter { + let js_formatter = JsFormatterConfiguration { indent_width: None, line_width: None, indent_style: None, line_ending: None, enabled: None, - // deprecated - indent_size: None, - // js ones - bracket_same_line: Some(value.bracket_line), + bracket_same_line: Some(value.bracket_line.into()), arrow_parentheses: Some(value.arrow_parens.into()), semicolons: Some(semicolons), trailing_commas: Some(value.trailing_comma.into()), - // deprecated - trailing_comma: None, quote_style: Some(quote_style), quote_properties: Some(value.quote_props.into()), bracket_spacing: Some(value.bracket_spacing.into()), jsx_quote_style: Some(jsx_quote_style), attribute_position: Some(AttributePosition::default()), }; - let js_config = biome_configuration::PartialJavascriptConfiguration { + let js_config = biome_configuration::JsConfiguration { formatter: Some(js_formatter), ..Default::default() }; @@ -275,7 +269,12 @@ impl TryFrom for biome_configuration::OverridePattern { type Error = ParseFormatNumberError; fn try_from(Override { files, options }: Override) -> Result { let mut result = biome_configuration::OverridePattern { - include: Some(StringSet::new(files.into_iter().collect())), + includes: Some( + files + .into_iter() + .filter_map(|glob| glob.parse().ok()) + .collect(), + ), ..Default::default() }; if options.print_width.is_some() @@ -345,8 +344,8 @@ impl TryFrom for biome_configuration::OverridePattern { QuoteStyle::Double } }); - let js_formatter = biome_configuration::PartialJavascriptFormatter { - bracket_same_line: options.bracket_line, + let js_formatter = JsFormatterConfiguration { + bracket_same_line: options.bracket_line.map(Into::into), arrow_parentheses: options.arrow_parens.map(|arrow_parens| arrow_parens.into()), semicolons, trailing_commas: options @@ -357,7 +356,7 @@ impl TryFrom for biome_configuration::OverridePattern { jsx_quote_style, ..Default::default() }; - let js_config = biome_configuration::PartialJavascriptConfiguration { + let js_config = biome_configuration::JsConfiguration { formatter: Some(js_formatter), ..Default::default() }; @@ -387,18 +386,18 @@ pub(crate) const IGNORE_FILE: &str = ".prettierignore"; /// This function is in charge of reading prettier files, deserialize its contents pub(crate) fn read_config_file( - fs: &DynRef<'_, dyn FileSystem>, + fs: &dyn FileSystem, console: &mut dyn Console, ) -> Result { // We don't report an error if Prettier config is not embedded in `PACKAGE_JSON`. - if let Ok(data) = load_config(fs, Path::new(PACKAGE_JSON), console) { + if let Ok(data) = load_config(fs, Utf8Path::new(PACKAGE_JSON), console) { return Ok(Config { path: PACKAGE_JSON, data, }); } for config_name in CONFIG_FILES { - let path = Path::new(config_name); + let path = Utf8Path::new(config_name); if fs.path_exists(path) { return Ok(Config { path: config_name, @@ -412,11 +411,11 @@ pub(crate) fn read_config_file( } fn load_config( - fs: &DynRef<'_, dyn FileSystem>, - path: &Path, + fs: &dyn FileSystem, + path: &Utf8Path, console: &mut dyn Console, ) -> Result { - let (deserialized, diagnostics) = match path.extension().and_then(OsStr::to_str) { + let (deserialized, diagnostics) = match path.extension() { None | Some("json") => { let mut file = fs.open_with_options(path, OpenOptions::default().read(true))?; let mut content = String::new(); @@ -446,7 +445,7 @@ fn load_config( } } Some("js" | "mjs" | "cjs") => { - let node::Resolution { content, .. } = node::load_config(&path.to_string_lossy())?; + let node::Resolution { content, .. } = node::load_config(path.as_ref())?; deserialize_from_json_str::( &content, JsonParserOptions::default(), @@ -462,7 +461,7 @@ fn load_config( })) } }; - let path_str = path.to_string_lossy(); + let path_str = path.to_string(); // Heuristic: the Prettier config file is considered a YAML file if: // - desrialization failed // - there are at least 3 diagnostics diff --git a/crates/biome_cli/src/execute/mod.rs b/crates/biome_cli/src/execute/mod.rs index f77c70671796..3c6d76756865 100644 --- a/crates/biome_cli/src/execute/mod.rs +++ b/crates/biome_cli/src/execute/mod.rs @@ -18,17 +18,19 @@ use crate::reporter::terminal::{ConsoleReporter, ConsoleReporterVisitor}; use crate::{CliDiagnostic, CliSession, DiagnosticsPayload, Reporter}; use biome_configuration::analyzer::RuleSelector; use biome_console::{markup, ConsoleExt}; -use biome_diagnostics::adapters::SerdeJsonError; +use biome_diagnostics::SerdeJsonError; use biome_diagnostics::{category, Category}; use biome_fs::BiomePath; +use biome_grit_patterns::GritTargetLanguage; +use biome_service::projects::ProjectKey; use biome_service::workspace::{ - FeatureName, FeaturesBuilder, FixFileMode, FormatFileParams, OpenFileParams, PatternId, + FeatureName, FeaturesBuilder, FileContent, FixFileMode, FormatFileParams, OpenFileParams, + PatternId, }; -use std::borrow::Borrow; +use camino::{Utf8Path, Utf8PathBuf}; use std::ffi::OsString; use std::fmt::{Display, Formatter}; -use std::path::{Path, PathBuf}; -use tracing::info; +use tracing::{info, instrument}; /// Useful information during the traversal of files and virtual content #[derive(Debug, Clone)] @@ -43,42 +45,6 @@ pub struct Execution { max_diagnostics: u32, } -impl Execution { - pub fn new_format(vcs_targeted: VcsTargeted) -> Self { - Self { - traversal_mode: TraversalMode::Format { - ignore_errors: false, - write: false, - stdin: None, - vcs_targeted, - }, - report_mode: ReportMode::default(), - max_diagnostics: 0, - } - } - - pub fn report_mode(&self) -> &ReportMode { - &self.report_mode - } -} - -impl Execution { - pub(crate) fn to_feature(&self) -> FeatureName { - match self.traversal_mode { - TraversalMode::Format { .. } => FeaturesBuilder::new().with_formatter().build(), - TraversalMode::Lint { .. } => FeaturesBuilder::new().with_linter().build(), - TraversalMode::Check { .. } | TraversalMode::CI { .. } => FeaturesBuilder::new() - .with_organize_imports() - .with_formatter() - .with_linter() - .with_assists() - .build(), - TraversalMode::Migrate { .. } => FeatureName::empty(), - TraversalMode::Search { .. } => FeaturesBuilder::new().with_search().build(), - } - } -} - #[derive(Debug, Clone, Copy)] pub enum ExecutionEnvironment { GitHub, @@ -88,13 +54,13 @@ pub enum ExecutionEnvironment { #[derive(Debug, Clone)] pub struct Stdin( /// The virtual path to the file - PathBuf, + Utf8PathBuf, /// The content of the file String, ); impl Stdin { - fn as_path(&self) -> &Path { + fn as_path(&self) -> &Utf8Path { self.0.as_path() } @@ -103,8 +69,8 @@ impl Stdin { } } -impl From<(PathBuf, String)> for Stdin { - fn from((path, content): (PathBuf, String)) -> Self { +impl From<(Utf8PathBuf, String)> for Stdin { + fn from((path, content): (Utf8PathBuf, String)) -> Self { Self(path, content) } } @@ -125,6 +91,8 @@ impl From<(bool, bool)> for VcsTargeted { pub enum TraversalMode { /// This mode is enabled when running the command `biome check` Check { + /// Key of the project to check. + project_key: ProjectKey, /// The type of fixes that should be applied when analyzing a file. /// /// It's [None] if the `check` command is called without `--apply` or `--apply-suggested` @@ -139,9 +107,11 @@ pub enum TraversalMode { }, /// This mode is enabled when running the command `biome lint` Lint { + /// Key of the project to lint. + project_key: ProjectKey, /// The type of fixes that should be applied when analyzing a file. /// - /// It's [None] if the `check` command is called without `--apply` or `--apply-suggested` + /// It's [None] if the `lint` command is called without `--apply` or `--apply-suggested` /// arguments. fix_file_mode: Option, /// An optional tuple. @@ -164,6 +134,8 @@ pub enum TraversalMode { }, /// This mode is enabled when running the command `biome ci` CI { + /// Key of the project to run the CI checks for. + project_key: ProjectKey, /// Whether the CI is running in a specific environment, e.g. GitHub, GitLab, etc. environment: Option, /// A flag to know vcs integrated options such as `--staged` or `--changed` are enabled @@ -171,6 +143,8 @@ pub enum TraversalMode { }, /// This mode is enabled when running the command `biome format` Format { + /// Key of the project to format. + project_key: ProjectKey, /// It ignores parse errors ignore_errors: bool, /// It writes the new content on file @@ -184,21 +158,33 @@ pub enum TraversalMode { }, /// This mode is enabled when running the command `biome migrate` Migrate { + /// Key of the project to execute the migration in. + project_key: ProjectKey, /// Write result to disk write: bool, /// The path to `biome.json` - configuration_file_path: PathBuf, - /// The path directory where `biome.json` is placed - configuration_directory_path: PathBuf, + configuration_file_path: Utf8PathBuf, sub_command: Option, }, /// This mode is enabled when running the command `biome search` Search { + /// Key of the project to search in. + project_key: ProjectKey, + /// The GritQL pattern to search for. /// /// Note that the search command does not support rewrites. pattern: PatternId, + /// The language to query for. + /// + /// Grit queries are specific to the grammar of the language they + /// target, so we currently do not support writing queries that apply + /// to multiple languages at once. + /// + /// If none given, the default language is JavaScript. + language: Option, + /// An optional tuple. /// 1. The virtual path to the file /// 2. The content of the file @@ -219,6 +205,30 @@ impl Display for TraversalMode { } } +impl TraversalMode { + pub fn project_key(&self) -> ProjectKey { + match self { + Self::Check { project_key, .. } + | Self::CI { project_key, .. } + | Self::Format { project_key, .. } + | Self::Lint { project_key, .. } + | Self::Migrate { project_key, .. } + | Self::Search { project_key, .. } => *project_key, + } + } + + pub fn should_scan_project(&self) -> bool { + match self { + Self::CI { .. } => true, + Self::Check { stdin, .. } + | Self::Format { stdin, .. } + | Self::Lint { stdin, .. } + | Self::Search { stdin, .. } => stdin.is_none(), + Self::Migrate { .. } => false, + } + } +} + /// Tells to the execution of the traversal how the information should be reported #[derive(Copy, Clone, Debug)] pub enum ReportMode { @@ -268,15 +278,16 @@ impl Execution { } } - pub(crate) fn new_ci(vcs_targeted: VcsTargeted) -> Self { + pub(crate) fn new_ci(project_key: ProjectKey, vcs_targeted: VcsTargeted) -> Self { // Ref: https://docs.github.com/actions/learn-github-actions/variables#default-environment-variables let is_github = std::env::var("GITHUB_ACTIONS") .ok() - .map_or(false, |value| value == "true"); + .is_some_and(|value| value == "true"); Self { report_mode: ReportMode::default(), traversal_mode: TraversalMode::CI { + project_key, environment: if is_github { Some(ExecutionEnvironment::GitHub) } else { @@ -341,24 +352,29 @@ impl Execution { matches!(self.traversal_mode, TraversalMode::Lint { .. }) } - pub(crate) const fn is_check_apply(&self) -> bool { - matches!( - self.traversal_mode, - TraversalMode::Check { - fix_file_mode: Some(FixFileMode::SafeFixes), - .. + #[instrument(level = "debug", skip(self), fields(result))] + pub(crate) fn is_safe_fixes_enabled(&self) -> bool { + let result = match self.traversal_mode { + TraversalMode::Check { fix_file_mode, .. } => { + fix_file_mode == Some(FixFileMode::SafeFixes) } - ) + _ => false, + }; + tracing::Span::current().record("result", result); + result } - pub(crate) const fn is_check_apply_unsafe(&self) -> bool { - matches!( - self.traversal_mode, - TraversalMode::Check { - fix_file_mode: Some(FixFileMode::SafeAndUnsafeFixes), - .. + #[instrument(level = "debug", skip(self), fields(result))] + pub(crate) fn is_safe_and_unsafe_fixes_enabled(&self) -> bool { + let result = match self.traversal_mode { + TraversalMode::Check { fix_file_mode, .. } => { + fix_file_mode == Some(FixFileMode::SafeAndUnsafeFixes) } - ) + _ => false, + }; + + tracing::Span::current().record("result", result); + result } pub(crate) const fn is_format(&self) -> bool { @@ -414,6 +430,60 @@ impl Execution { TraversalMode::Search { .. } => false, } } + + pub fn new_format(project_key: ProjectKey, vcs_targeted: VcsTargeted) -> Self { + Self { + traversal_mode: TraversalMode::Format { + project_key, + ignore_errors: false, + write: false, + stdin: None, + vcs_targeted, + }, + report_mode: ReportMode::default(), + max_diagnostics: 0, + } + } + + pub fn report_mode(&self) -> &ReportMode { + &self.report_mode + } + pub(crate) fn to_feature(&self) -> FeatureName { + match self.traversal_mode { + TraversalMode::Format { .. } => FeaturesBuilder::new().with_formatter().build(), + TraversalMode::Lint { .. } => FeaturesBuilder::new().with_linter().build(), + TraversalMode::Check { .. } | TraversalMode::CI { .. } => FeaturesBuilder::new() + .with_formatter() + .with_linter() + .with_assist() + .build(), + TraversalMode::Migrate { .. } => FeatureName::empty(), + TraversalMode::Search { .. } => FeaturesBuilder::new().with_search().build(), + } + } + + #[instrument(level = "debug", skip(self), fields(result))] + pub(crate) fn should_write(&self) -> bool { + let result = match self.traversal_mode { + TraversalMode::Format { write, .. } => write, + + _ => self.is_safe_fixes_enabled() || self.is_safe_and_unsafe_fixes_enabled(), + }; + tracing::Span::current().record("result", result); + result + } + + #[instrument(level = "debug", skip(self), fields(result))] + pub(crate) fn should_ignore_errors(&self) -> bool { + let result = match self.traversal_mode { + TraversalMode::Format { ignore_errors, .. } => ignore_errors, + + _ => false, + }; + tracing::Span::current().record("result", result); + + result + } } /// Based on the [mode](TraversalMode), the function might launch a traversal of the file system @@ -432,113 +502,56 @@ pub fn execute_mode( u32::MAX }; - // don't do any traversal if there's some content coming from stdin - if let Some(stdin) = execution.as_stdin_file() { - let biome_path = BiomePath::new(stdin.as_path()); - std_in::run( - session, - &execution, - biome_path, - stdin.as_content(), - cli_options.verbose, - ) - } else if let TraversalMode::Migrate { + // migrate command doesn't do any traversal. + if let TraversalMode::Migrate { + project_key, write, configuration_file_path, - configuration_directory_path, sub_command, } = execution.traversal_mode { let payload = MigratePayload { session, + project_key, write, configuration_file_path, - configuration_directory_path, verbose: cli_options.verbose, sub_command, }; - migrate::run(payload) - } else { - let TraverseResult { - summary, - evaluated_paths, - diagnostics, - } = traverse(&execution, &mut session, cli_options, paths)?; - let console = session.app.console; - let errors = summary.errors; - let skipped = summary.skipped; - let processed = summary.changed + summary.unchanged; - let should_exit_on_warnings = summary.warnings > 0 && cli_options.error_on_warnings; - - match execution.report_mode { - ReportMode::Terminal { with_summary } => { - if with_summary { - let reporter = SummaryReporter { - summary, - diagnostics_payload: DiagnosticsPayload { - verbose: cli_options.verbose, - diagnostic_level: cli_options.diagnostic_level, - diagnostics, - }, - execution: execution.clone(), - }; - reporter.write(&mut SummaryReporterVisitor(console))?; - } else { - let reporter = ConsoleReporter { - summary, - diagnostics_payload: DiagnosticsPayload { - verbose: cli_options.verbose, - diagnostic_level: cli_options.diagnostic_level, - diagnostics, - }, - execution: execution.clone(), - evaluated_paths, - }; - reporter.write(&mut ConsoleReporterVisitor(console))?; - } - } - ReportMode::Json { pretty } => { - console.error(markup!{ - "The ""--json"" option is ""unstable/experimental"" and its output might change between patches/minor releases." - }); - let reporter = JsonReporter { + return migrate::run(payload); + } + + let project_key = execution.traversal_mode.project_key(); + + // don't do any traversal if there's some content coming from stdin + if let Some(stdin) = execution.as_stdin_file() { + let biome_path = BiomePath::new(stdin.as_path()); + return std_in::run( + session, + project_key, + &execution, + biome_path, + stdin.as_content(), + cli_options.verbose, + ); + } + + let TraverseResult { + summary, + evaluated_paths, + diagnostics, + } = traverse(&execution, &mut session, project_key, cli_options, paths)?; + let console = session.app.console; + let errors = summary.errors; + let skipped = summary.skipped; + let processed = summary.changed + summary.unchanged; + let should_exit_on_warnings = summary.warnings > 0 && cli_options.error_on_warnings; + + match execution.report_mode { + ReportMode::Terminal { with_summary } => { + if with_summary { + let reporter = SummaryReporter { summary, - diagnostics: DiagnosticsPayload { - verbose: cli_options.verbose, - diagnostic_level: cli_options.diagnostic_level, - diagnostics, - }, - execution: execution.clone(), - }; - let mut buffer = JsonReporterVisitor::new(summary); - reporter.write(&mut buffer)?; - if pretty { - let content = serde_json::to_string(&buffer).map_err(|error| { - CliDiagnostic::Report(ReportDiagnostic::Serialization( - SerdeJsonError::from(error), - )) - })?; - let report_file = BiomePath::new("_report_output.json"); - session.app.workspace.open_file(OpenFileParams { - content, - path: report_file.clone(), - version: 0, - document_file_source: None, - })?; - let code = session.app.workspace.format_file(FormatFileParams { - path: report_file.clone(), - })?; - console.log(markup! { - {code.as_code()} - }); - } else { - console.log(markup! { - {buffer} - }); - } - } - ReportMode::GitHub => { - let reporter = GithubReporter { diagnostics_payload: DiagnosticsPayload { verbose: cli_options.verbose, diagnostic_level: cli_options.diagnostic_level, @@ -546,24 +559,9 @@ pub fn execute_mode( }, execution: execution.clone(), }; - reporter.write(&mut GithubReporterVisitor(console))?; - } - ReportMode::GitLab => { - let reporter = GitLabReporter { - diagnostics: DiagnosticsPayload { - verbose: cli_options.verbose, - diagnostic_level: cli_options.diagnostic_level, - diagnostics, - }, - execution: execution.clone(), - }; - reporter.write(&mut GitLabReporterVisitor::new( - console, - session.app.fs.borrow().working_directory(), - ))?; - } - ReportMode::Junit => { - let reporter = JunitReporter { + reporter.write(&mut SummaryReporterVisitor(console))?; + } else { + let reporter = ConsoleReporter { summary, diagnostics_payload: DiagnosticsPayload { verbose: cli_options.verbose, @@ -571,29 +569,110 @@ pub fn execute_mode( diagnostics, }, execution: execution.clone(), + evaluated_paths, }; - reporter.write(&mut JunitReporterVisitor::new(console))?; + reporter.write(&mut ConsoleReporterVisitor(console))?; + } + } + ReportMode::Json { pretty } => { + console.error(markup!{ + "The ""--json"" option is ""unstable/experimental"" and its output might change between patches/minor releases." + }); + let reporter = JsonReporter { + summary, + diagnostics: DiagnosticsPayload { + verbose: cli_options.verbose, + diagnostic_level: cli_options.diagnostic_level, + diagnostics, + }, + execution: execution.clone(), + }; + let mut buffer = JsonReporterVisitor::new(summary); + reporter.write(&mut buffer)?; + if pretty { + let content = serde_json::to_string(&buffer).map_err(|error| { + CliDiagnostic::Report(ReportDiagnostic::Serialization(SerdeJsonError::from( + error, + ))) + })?; + let report_file = BiomePath::new("_report_output.json"); + session.app.workspace.open_file(OpenFileParams { + project_key, + content: FileContent::FromClient(content), + path: report_file.clone(), + version: 0, + document_file_source: None, + persist_node_cache: false, + })?; + let code = session.app.workspace.format_file(FormatFileParams { + project_key, + path: report_file.clone(), + })?; + console.log(markup! { + {code.as_code()} + }); + } else { + console.log(markup! { + {buffer} + }); } } + ReportMode::GitHub => { + let reporter = GithubReporter { + diagnostics_payload: DiagnosticsPayload { + verbose: cli_options.verbose, + diagnostic_level: cli_options.diagnostic_level, + diagnostics, + }, + execution: execution.clone(), + }; + reporter.write(&mut GithubReporterVisitor(console))?; + } + ReportMode::GitLab => { + let reporter = GitLabReporter { + diagnostics: DiagnosticsPayload { + verbose: cli_options.verbose, + diagnostic_level: cli_options.diagnostic_level, + diagnostics, + }, + execution: execution.clone(), + }; + reporter.write(&mut GitLabReporterVisitor::new( + console, + session.app.workspace.fs().working_directory(), + ))?; + } + ReportMode::Junit => { + let reporter = JunitReporter { + summary, + diagnostics_payload: DiagnosticsPayload { + verbose: cli_options.verbose, + diagnostic_level: cli_options.diagnostic_level, + diagnostics, + }, + execution: execution.clone(), + }; + reporter.write(&mut JunitReporterVisitor::new(console))?; + } + } - // Processing emitted error diagnostics, exit with a non-zero code - if processed.saturating_sub(skipped) == 0 && !cli_options.no_errors_on_unmatched { - Err(CliDiagnostic::no_files_processed()) - } else if errors > 0 || should_exit_on_warnings { - let category = execution.as_diagnostic_category(); - if should_exit_on_warnings { - if execution.is_check_apply() { - Err(CliDiagnostic::apply_warnings(category)) - } else { - Err(CliDiagnostic::check_warnings(category)) - } - } else if execution.is_check_apply() { - Err(CliDiagnostic::apply_error(category)) + // Processing emitted error diagnostics, exit with a non-zero code + if processed.saturating_sub(skipped) == 0 && !cli_options.no_errors_on_unmatched { + Err(CliDiagnostic::no_files_processed()) + } else if errors > 0 || should_exit_on_warnings { + let category = execution.as_diagnostic_category(); + if should_exit_on_warnings { + if execution.is_safe_fixes_enabled() { + Err(CliDiagnostic::apply_warnings(category)) } else { - Err(CliDiagnostic::check_error(category)) + Err(CliDiagnostic::check_warnings(category)) } + } else if execution.is_safe_fixes_enabled() { + Err(CliDiagnostic::apply_error(category)) } else { - Ok(()) + Err(CliDiagnostic::check_error(category)) } + } else { + Ok(()) } } diff --git a/crates/biome_cli/src/execute/process_file.rs b/crates/biome_cli/src/execute/process_file.rs index c7bae642e1dd..68123022def2 100644 --- a/crates/biome_cli/src/execute/process_file.rs +++ b/crates/biome_cli/src/execute/process_file.rs @@ -1,8 +1,7 @@ -mod assists; +mod assist; mod check; mod format; mod lint; -mod organize_imports; mod search; pub(crate) mod workspace_file; @@ -51,7 +50,7 @@ pub(crate) enum Message { Failure, Error(Error), Diagnostics { - name: String, + file_path: String, content: String, diagnostics: Vec, skipped_diagnostics: u32, @@ -73,8 +72,7 @@ impl Message { #[derive(Debug)] pub(crate) enum DiffKind { Format, - OrganizeImports, - Assists, + Assist, } impl From for Message @@ -128,114 +126,114 @@ impl<'ctx, 'app> Deref for SharedTraversalOptions<'ctx, 'app> { /// content of the file and emit a diff or write the new content to the disk if /// write mode is enabled pub(crate) fn process_file(ctx: &TraversalOptions, biome_path: &BiomePath) -> FileResult { - tracing::trace_span!("process_file", path = ?biome_path).in_scope(move || { - let file_features = ctx - .workspace - .file_features(SupportsFeatureParams { - path: biome_path.clone(), - features: ctx.execution.to_feature(), - }) - .with_file_path_and_code_and_tags( - biome_path.display().to_string(), - category!("files/missingHandler"), - DiagnosticTags::VERBOSE, - )?; - - // first we stop if there are some files that don't have ALL features enabled, e.g. images, fonts, etc. - if file_features.is_ignored() || file_features.is_not_enabled() { - return Ok(FileStatus::Ignored); - } else if file_features.is_not_supported() { - return Err(Message::from( - UnhandledDiagnostic.with_file_path(biome_path.display().to_string()), - )); - } - - // then we pick the specific features for this file - let unsupported_reason = match ctx.execution.traversal_mode() { - TraversalMode::Check { .. } | TraversalMode::CI { .. } => file_features - .support_kind_for(&FeatureKind::Lint) - .and_then(|support_kind| { - if support_kind.is_not_enabled() { - Some(support_kind) - } else { - None - } - }) - .and( - file_features - .support_kind_for(&FeatureKind::Format) - .and_then(|support_kind| { - if support_kind.is_not_enabled() { - Some(support_kind) - } else { - None - } - }), - ) - .and( - file_features - .support_kind_for(&FeatureKind::OrganizeImports) - .and_then(|support_kind| { - if support_kind.is_not_enabled() { - Some(support_kind) - } else { - None - } - }), - ), - TraversalMode::Format { .. } => file_features.support_kind_for(&FeatureKind::Format), - TraversalMode::Lint { .. } => file_features.support_kind_for(&FeatureKind::Lint), - TraversalMode::Migrate { .. } => None, - TraversalMode::Search { .. } => file_features.support_kind_for(&FeatureKind::Search), - }; + let _ = tracing::trace_span!("process_file", path = ?biome_path).entered(); + let file_features = ctx + .workspace + .file_features(SupportsFeatureParams { + project_key: ctx.project_key, + path: biome_path.clone(), + features: ctx.execution.to_feature(), + }) + .with_file_path_and_code_and_tags( + biome_path.to_string(), + category!("files/missingHandler"), + DiagnosticTags::VERBOSE, + )?; + + // first we stop if there are some files that don't have ALL features enabled, e.g. images, fonts, etc. + if file_features.is_ignored() || file_features.is_not_enabled() { + return Ok(FileStatus::Ignored); + } else if file_features.is_not_supported() { + return Err(Message::from( + UnhandledDiagnostic.with_file_path(biome_path.to_string()), + )); + } - if let Some(reason) = unsupported_reason { - match reason { - SupportKind::FileNotSupported => { - return Err(Message::from( - UnhandledDiagnostic.with_file_path(biome_path.display().to_string()), - )); - } - SupportKind::FeatureNotEnabled | SupportKind::Ignored => { - return Ok(FileStatus::Ignored); + // then we pick the specific features for this file + let unsupported_reason = match ctx.execution.traversal_mode() { + TraversalMode::Check { .. } | TraversalMode::CI { .. } => file_features + .support_kind_for(&FeatureKind::Lint) + .and_then(|support_kind| { + if support_kind.is_not_enabled() { + Some(support_kind) + } else { + None } - SupportKind::Protected => { - return Ok(FileStatus::Protected(biome_path.display().to_string())); - } - SupportKind::Supported => {} - }; - } - - let shared_context = &SharedTraversalOptions::new(ctx); - - match ctx.execution.traversal_mode { - TraversalMode::Lint { - ref suppression_reason, - suppress, - .. - } => { - // the unsupported case should be handled already at this point - lint( - shared_context, - biome_path, - suppress, - suppression_reason.as_deref(), - ) - } - TraversalMode::Format { .. } => { - // the unsupported case should be handled already at this point - format(shared_context, biome_path) - } - TraversalMode::Check { .. } | TraversalMode::CI { .. } => { - check_file(shared_context, biome_path, &file_features) + }) + .and( + file_features + .support_kind_for(&FeatureKind::Format) + .and_then(|support_kind| { + if support_kind.is_not_enabled() { + Some(support_kind) + } else { + None + } + }), + ) + .and( + file_features + .support_kind_for(&FeatureKind::Assist) + .and_then(|support_kind| { + if support_kind.is_not_enabled() { + Some(support_kind) + } else { + None + } + }), + ), + TraversalMode::Format { .. } => file_features.support_kind_for(&FeatureKind::Format), + TraversalMode::Lint { .. } => file_features.support_kind_for(&FeatureKind::Lint), + TraversalMode::Migrate { .. } => None, + TraversalMode::Search { .. } => file_features.support_kind_for(&FeatureKind::Search), + }; + + if let Some(reason) = unsupported_reason { + match reason { + SupportKind::FileNotSupported => { + return Err(Message::from( + UnhandledDiagnostic.with_file_path(biome_path.to_string()), + )); } - TraversalMode::Migrate { .. } => { - unreachable!("The migration should not be called for this file") + SupportKind::FeatureNotEnabled | SupportKind::Ignored => { + return Ok(FileStatus::Ignored); } - TraversalMode::Search { ref pattern, .. } => { - // the unsupported case should be handled already at this point - search(shared_context, biome_path, pattern) + SupportKind::Protected => { + return Ok(FileStatus::Protected(biome_path.to_string())); } + SupportKind::Supported => {} + }; + } + + let shared_context = &SharedTraversalOptions::new(ctx); + + match ctx.execution.traversal_mode { + TraversalMode::Lint { + ref suppression_reason, + suppress, + .. + } => { + // the unsupported case should be handled already at this point + lint( + shared_context, + biome_path.clone(), + suppress, + suppression_reason.as_deref(), + ) } - }) + TraversalMode::Format { .. } => { + // the unsupported case should be handled already at this point + format(shared_context, biome_path.clone()) + } + TraversalMode::Check { .. } | TraversalMode::CI { .. } => { + check_file(shared_context, biome_path.clone(), &file_features) + } + TraversalMode::Migrate { .. } => { + unreachable!("The migration should not be called for this file") + } + TraversalMode::Search { ref pattern, .. } => { + // the unsupported case should be handled already at this point + search(shared_context, biome_path.clone(), pattern) + } + } } diff --git a/crates/biome_cli/src/execute/process_file/assist.rs b/crates/biome_cli/src/execute/process_file/assist.rs new file mode 100644 index 000000000000..180f73f82a6b --- /dev/null +++ b/crates/biome_cli/src/execute/process_file/assist.rs @@ -0,0 +1,68 @@ +use crate::execute::diagnostics::ResultExt; +use crate::execute::process_file::workspace_file::WorkspaceFile; +use crate::execute::process_file::{ + DiffKind, FileResult, FileStatus, Message, SharedTraversalOptions, +}; +use biome_analyze::RuleCategoriesBuilder; +use biome_diagnostics::category; +use biome_service::file_handlers::{AstroFileHandler, SvelteFileHandler, VueFileHandler}; +use biome_service::workspace::FixFileMode; + +/// Lints a single file and returns a [FileResult] +pub(crate) fn assist_with_guard<'ctx>( + ctx: &'ctx SharedTraversalOptions<'ctx, '_>, + workspace_file: &mut WorkspaceFile, +) -> FileResult { + let _ = tracing::info_span!("Process assist", path =? workspace_file.path).entered(); + let input = workspace_file.input()?; + + let only = Vec::new(); + let skip = Vec::new(); + let fix_result = workspace_file + .guard() + .fix_file( + FixFileMode::SafeFixes, + false, + RuleCategoriesBuilder::default().with_assist().build(), + only.clone(), + skip.clone(), + None, + ) + .with_file_path_and_code(workspace_file.path.to_string(), category!("assist"))?; + + ctx.push_message(Message::SkippedFixes { + skipped_suggested_fixes: fix_result.skipped_suggested_fixes, + }); + + let mut output = fix_result.code; + + match workspace_file.as_extension() { + Some("astro") => { + output = AstroFileHandler::output(input.as_str(), output.as_str()); + } + Some("vue") => { + output = VueFileHandler::output(input.as_str(), output.as_str()); + } + Some("svelte") => { + output = SvelteFileHandler::output(input.as_str(), output.as_str()); + } + _ => {} + } + if input != output { + if ctx.execution.as_fix_file_mode().is_none() { + Ok(FileStatus::Message(Message::Diff { + file_name: workspace_file.path.to_string(), + old: input, + new: output, + diff_kind: DiffKind::Assist, + })) + } else { + if output != input && ctx.execution.as_fix_file_mode().is_some() { + workspace_file.update_file(output)?; + } + Ok(FileStatus::Changed) + } + } else { + Ok(FileStatus::Unchanged) + } +} diff --git a/crates/biome_cli/src/execute/process_file/assists.rs b/crates/biome_cli/src/execute/process_file/assists.rs deleted file mode 100644 index 43563ac702c9..000000000000 --- a/crates/biome_cli/src/execute/process_file/assists.rs +++ /dev/null @@ -1,75 +0,0 @@ -use std::ffi::OsStr; - -use crate::execute::diagnostics::ResultExt; -use crate::execute::process_file::workspace_file::WorkspaceFile; -use crate::execute::process_file::{ - DiffKind, FileResult, FileStatus, Message, SharedTraversalOptions, -}; -use biome_analyze::RuleCategoriesBuilder; -use biome_diagnostics::category; -use biome_service::file_handlers::{AstroFileHandler, SvelteFileHandler, VueFileHandler}; -use biome_service::workspace::FixFileMode; - -/// Lints a single file and returns a [FileResult] -pub(crate) fn assists_with_guard<'ctx>( - ctx: &'ctx SharedTraversalOptions<'ctx, '_>, - workspace_file: &mut WorkspaceFile, -) -> FileResult { - tracing::info_span!("Processes assists", path =? workspace_file.path.display()).in_scope( - move || { - let input = workspace_file.input()?; - let only = Vec::new(); - let skip = Vec::new(); - let fix_result = workspace_file - .guard() - .fix_file( - FixFileMode::SafeFixes, - false, - RuleCategoriesBuilder::default().with_action().build(), - only.clone(), - skip.clone(), - None, - ) - .with_file_path_and_code( - workspace_file.path.display().to_string(), - category!("assists"), - )?; - - ctx.push_message(Message::SkippedFixes { - skipped_suggested_fixes: fix_result.skipped_suggested_fixes, - }); - - let mut output = fix_result.code; - - match workspace_file.as_extension().map(OsStr::as_encoded_bytes) { - Some(b"astro") => { - output = AstroFileHandler::output(input.as_str(), output.as_str()); - } - Some(b"vue") => { - output = VueFileHandler::output(input.as_str(), output.as_str()); - } - Some(b"svelte") => { - output = SvelteFileHandler::output(input.as_str(), output.as_str()); - } - _ => {} - } - if input != output { - if ctx.execution.as_fix_file_mode().is_none() { - return Ok(FileStatus::Message(Message::Diff { - file_name: workspace_file.path.display().to_string(), - old: input, - new: output, - diff_kind: DiffKind::Assists, - })); - } else { - if output != input && ctx.execution.as_fix_file_mode().is_some() { - workspace_file.update_file(output)?; - } - Ok(FileStatus::Changed) - } - } else { - Ok(FileStatus::Unchanged) - } - }, - ) -} diff --git a/crates/biome_cli/src/execute/process_file/check.rs b/crates/biome_cli/src/execute/process_file/check.rs index 2981c79ee05a..020ff4658098 100644 --- a/crates/biome_cli/src/execute/process_file/check.rs +++ b/crates/biome_cli/src/execute/process_file/check.rs @@ -1,113 +1,99 @@ -use crate::execute::process_file::assists::assists_with_guard; +use crate::execute::process_file::assist::assist_with_guard; use crate::execute::process_file::format::format_with_guard; use crate::execute::process_file::lint::lint_with_guard; -use crate::execute::process_file::organize_imports::organize_imports_with_guard; use crate::execute::process_file::workspace_file::WorkspaceFile; use crate::execute::process_file::{FileResult, FileStatus, Message, SharedTraversalOptions}; +use biome_diagnostics::{category, DiagnosticExt}; +use biome_fs::{BiomePath, TraversalContext}; +use biome_service::diagnostics::FileTooLarge; use biome_service::workspace::FileFeaturesResult; -use std::path::Path; pub(crate) fn check_file<'ctx>( ctx: &'ctx SharedTraversalOptions<'ctx, '_>, - path: &Path, + path: BiomePath, file_features: &'ctx FileFeaturesResult, ) -> FileResult { let mut has_failures = false; let mut workspace_file = WorkspaceFile::new(ctx, path)?; + let result = workspace_file.guard().check_file_size()?; + if result.is_too_large() { + ctx.push_diagnostic( + FileTooLarge::from(result) + .with_file_path(workspace_file.path.to_string()) + .with_category(category!("check")), + ); + return Ok(FileStatus::Ignored); + } let mut changed = false; - tracing::info_span!("Process check", path =? workspace_file.path.display()).in_scope( - move || { - if file_features.supports_lint() { - let lint_result = lint_with_guard(ctx, &mut workspace_file, false, None); - match lint_result { - Ok(status) => { - if status.is_changed() { - changed = true - } - if let FileStatus::Message(msg) = status { - if msg.is_failure() { - has_failures = true; - } - ctx.push_message(msg); - } - } - Err(err) => { - ctx.push_message(err); - has_failures = true; - } + let _ = tracing::info_span!("Check ", path =? workspace_file.path).entered(); + if file_features.supports_lint() { + let lint_result = lint_with_guard(ctx, &mut workspace_file, false, None); + match lint_result { + Ok(status) => { + if status.is_changed() { + changed = true } - } - - if file_features.supports_organize_imports() { - let organize_imports_result = organize_imports_with_guard(ctx, &mut workspace_file); - match organize_imports_result { - Ok(status) => { - if status.is_changed() { - changed = true - } - if let FileStatus::Message(msg) = status { - if msg.is_failure() { - has_failures = true; - } - ctx.push_message(msg); - } - } - Err(err) => { - ctx.push_message(err); + if let FileStatus::Message(msg) = status { + if msg.is_failure() { has_failures = true; } + ctx.push_message(msg); } } + Err(err) => { + ctx.push_message(err); + has_failures = true; + } + } + } - if file_features.supports_assists() { - let assists_result = assists_with_guard(ctx, &mut workspace_file); - match assists_result { - Ok(status) => { - if status.is_changed() { - changed = true - } - if let FileStatus::Message(msg) = status { - if msg.is_failure() { - has_failures = true; - } - ctx.push_message(msg); - } - } - Err(err) => { - ctx.push_message(err); + if file_features.supports_assist() { + let assist_result = assist_with_guard(ctx, &mut workspace_file); + match assist_result { + Ok(status) => { + if status.is_changed() { + changed = true + } + if let FileStatus::Message(msg) = status { + if msg.is_failure() { has_failures = true; } + ctx.push_message(msg); } } + Err(err) => { + ctx.push_message(err); + has_failures = true; + } + } + } - if file_features.supports_format() { - let format_result = format_with_guard(ctx, &mut workspace_file); - match format_result { - Ok(status) => { - if status.is_changed() { - changed = true - } - if let FileStatus::Message(msg) = status { - if msg.is_failure() { - has_failures = true; - } - ctx.push_message(msg); - } - } - Err(err) => { - ctx.push_message(err); + if file_features.supports_format() { + let format_result = format_with_guard(ctx, &mut workspace_file); + match format_result { + Ok(status) => { + if status.is_changed() { + changed = true + } + if let FileStatus::Message(msg) = status { + if msg.is_failure() { has_failures = true; } + ctx.push_message(msg); } } - - if has_failures { - Ok(FileStatus::Message(Message::Failure)) - } else if changed { - Ok(FileStatus::Changed) - } else { - Ok(FileStatus::Unchanged) + Err(err) => { + ctx.push_message(err); + has_failures = true; } - }, - ) + } + } + + if has_failures { + Ok(FileStatus::Message(Message::Failure)) + } else if changed { + Ok(FileStatus::Changed) + } else { + Ok(FileStatus::Unchanged) + } } diff --git a/crates/biome_cli/src/execute/process_file/format.rs b/crates/biome_cli/src/execute/process_file/format.rs index c8fe35521f70..581fa56b09aa 100644 --- a/crates/biome_cli/src/execute/process_file/format.rs +++ b/crates/biome_cli/src/execute/process_file/format.rs @@ -3,132 +3,130 @@ use crate::execute::process_file::workspace_file::WorkspaceFile; use crate::execute::process_file::{ DiffKind, FileResult, FileStatus, Message, SharedTraversalOptions, }; -use crate::execute::TraversalMode; use biome_analyze::RuleCategoriesBuilder; use biome_diagnostics::{category, Diagnostic, DiagnosticExt, Error, Severity}; +use biome_fs::{BiomePath, TraversalContext}; +use biome_service::diagnostics::FileTooLarge; use biome_service::file_handlers::{AstroFileHandler, SvelteFileHandler, VueFileHandler}; -use std::ffi::OsStr; -use std::path::Path; use std::sync::atomic::Ordering; -use tracing::debug; +use tracing::{debug, instrument}; -pub(crate) fn format<'ctx>(ctx: &'ctx SharedTraversalOptions<'ctx, '_>, path: &Path) -> FileResult { +#[instrument(name = "cli_format", level = "debug", skip(ctx, path))] +pub(crate) fn format<'ctx>( + ctx: &'ctx SharedTraversalOptions<'ctx, '_>, + path: BiomePath, +) -> FileResult { let mut workspace_file = WorkspaceFile::new(ctx, path)?; - format_with_guard(ctx, &mut workspace_file) + let result = workspace_file.guard().check_file_size()?; + if result.is_too_large() { + ctx.push_diagnostic( + FileTooLarge::from(result) + .with_file_path(workspace_file.path.to_string()) + .with_category(category!("format")), + ); + Ok(FileStatus::Ignored) + } else { + format_with_guard(ctx, &mut workspace_file) + } } +#[instrument(level = "debug", skip(ctx, workspace_file))] pub(crate) fn format_with_guard<'ctx>( ctx: &'ctx SharedTraversalOptions<'ctx, '_>, workspace_file: &mut WorkspaceFile, ) -> FileResult { - tracing::info_span!("Processes formatting", path =? workspace_file.path.display()).in_scope( - move || { - let max_diagnostics = ctx.remaining_diagnostics.load(Ordering::Relaxed); - debug!("Pulling diagnostics from parsed file"); - let diagnostics_result = workspace_file - .guard() - .pull_diagnostics( - RuleCategoriesBuilder::default().with_syntax().build(), - max_diagnostics, - Vec::new(), - Vec::new(), - ) - .with_file_path_and_code( - workspace_file.path.display().to_string(), - category!("format"), - )?; + let max_diagnostics = ctx.remaining_diagnostics.load(Ordering::Relaxed); + let diagnostics_result = workspace_file + .guard() + .pull_diagnostics( + RuleCategoriesBuilder::default().with_syntax().build(), + max_diagnostics, + Vec::new(), + Vec::new(), + ) + .with_file_path_and_code(workspace_file.path.to_string(), category!("format"))?; - let input = workspace_file.input()?; - let (should_write, ignore_errors) = match ctx.execution.traversal_mode { - TraversalMode::Format { - write, - ignore_errors, - .. - } => (write, ignore_errors), + let input = workspace_file.input()?; + let should_write = ctx.execution.should_write(); + let ignore_errors = ctx.execution.should_ignore_errors(); - _ => ( - ctx.execution.is_check_apply() || ctx.execution.is_check_apply_unsafe(), - false, - ), - }; - debug!("Should write the file to disk? {}", should_write); - debug!("Should ignore errors? {}", ignore_errors); - - if diagnostics_result.errors > 0 && ignore_errors { - return Err(Message::from( - SkippedDiagnostic.with_file_path(workspace_file.path.display().to_string()), - )); - } + tracing::Span::current().record("should_write", tracing::field::display(&should_write)); + tracing::Span::current().record("ignore_errors", tracing::field::display(&ignore_errors)); - ctx.push_message(Message::Diagnostics { - name: workspace_file.path.display().to_string(), - content: input.clone(), - diagnostics: diagnostics_result - .diagnostics - .into_iter() - .filter_map(|diag| { - if diag.severity() >= Severity::Error && ignore_errors { - None - } else { - Some(Error::from(diag)) - } - }) - .collect(), - skipped_diagnostics: diagnostics_result.skipped_diagnostics as u32, - }); + if diagnostics_result.errors > 0 && ignore_errors { + return Err(Message::from( + SkippedDiagnostic.with_file_path(workspace_file.path.to_string()), + )); + } - let printed = workspace_file - .guard() - .format_file() - .with_file_path_and_code( - workspace_file.path.display().to_string(), - category!("format"), - )?; + ctx.push_message(Message::Diagnostics { + file_path: workspace_file.path.to_string(), + content: input.clone(), + diagnostics: diagnostics_result + .diagnostics + .into_iter() + .filter_map(|diag| { + if diag.severity() >= Severity::Error && ignore_errors { + None + } else { + Some(Error::from(diag)) + } + }) + .collect(), + skipped_diagnostics: diagnostics_result.skipped_diagnostics as u32, + }); - let mut output = printed.into_code(); + let printed = workspace_file + .guard() + .format_file() + .with_file_path_and_code(workspace_file.path.to_string(), category!("format"))?; - if ignore_errors { - return Ok(FileStatus::Ignored); - } + let mut output = printed.into_code(); - match workspace_file.as_extension().map(OsStr::as_encoded_bytes) { - Some(b"astro") => { - if output.is_empty() { - return Ok(FileStatus::Unchanged); - } - output = AstroFileHandler::output(input.as_str(), output.as_str()); - } - Some(b"vue") => { - if output.is_empty() { - return Ok(FileStatus::Unchanged); - } - output = VueFileHandler::output(input.as_str(), output.as_str()); - } + if ignore_errors { + return Ok(FileStatus::Ignored); + } - Some(b"svelte") => { - if output.is_empty() { - return Ok(FileStatus::Unchanged); - } - output = SvelteFileHandler::output(input.as_str(), output.as_str()); - } - _ => {} + match workspace_file.as_extension() { + Some("astro") => { + if output.is_empty() { + return Ok(FileStatus::Unchanged); + } + output = AstroFileHandler::output(input.as_str(), output.as_str()); + } + Some("vue") => { + if output.is_empty() { + return Ok(FileStatus::Unchanged); } + output = VueFileHandler::output(input.as_str(), output.as_str()); + } - if output != input { - if should_write { - workspace_file.update_file(output)?; - Ok(FileStatus::Changed) - } else { - Ok(FileStatus::Message(Message::Diff { - file_name: workspace_file.path.display().to_string(), - old: input, - new: output, - diff_kind: DiffKind::Format, - })) - } - } else { - Ok(FileStatus::Unchanged) + Some("svelte") => { + if output.is_empty() { + return Ok(FileStatus::Unchanged); } - }, - ) + output = SvelteFileHandler::output(input.as_str(), output.as_str()); + } + _ => {} + } + + debug!( + "Format output is different from intput: {}", + output != input + ); + if output != input { + if should_write { + workspace_file.update_file(output)?; + Ok(FileStatus::Changed) + } else { + Ok(FileStatus::Message(Message::Diff { + file_name: workspace_file.path.to_string(), + old: input, + new: output, + diff_kind: DiffKind::Format, + })) + } + } else { + Ok(FileStatus::Unchanged) + } } diff --git a/crates/biome_cli/src/execute/process_file/lint.rs b/crates/biome_cli/src/execute/process_file/lint.rs index 05ac42d9b5e9..d6cb1ca550ae 100644 --- a/crates/biome_cli/src/execute/process_file/lint.rs +++ b/crates/biome_cli/src/execute/process_file/lint.rs @@ -3,142 +3,153 @@ use crate::execute::process_file::workspace_file::WorkspaceFile; use crate::execute::process_file::{FileResult, FileStatus, Message, SharedTraversalOptions}; use crate::TraversalMode; use biome_analyze::RuleCategoriesBuilder; -use biome_diagnostics::{category, Error}; +use biome_diagnostics::{category, DiagnosticExt, Error}; +use biome_fs::{BiomePath, TraversalContext}; use biome_rowan::TextSize; +use biome_service::diagnostics::FileTooLarge; use biome_service::file_handlers::{AstroFileHandler, SvelteFileHandler, VueFileHandler}; -use std::ffi::OsStr; -use std::path::Path; use std::sync::atomic::Ordering; +use tracing::{info, instrument}; /// Lints a single file and returns a [FileResult] +#[instrument(level = "debug", name = "cli_lint", skip_all)] pub(crate) fn lint<'ctx>( ctx: &'ctx SharedTraversalOptions<'ctx, '_>, - path: &Path, + path: BiomePath, suppress: bool, suppression_reason: Option<&str>, ) -> FileResult { let mut workspace_file = WorkspaceFile::new(ctx, path)?; - lint_with_guard(ctx, &mut workspace_file, suppress, suppression_reason) + let result = workspace_file.guard().check_file_size()?; + if result.is_too_large() { + ctx.push_diagnostic( + FileTooLarge::from(result) + .with_file_path(workspace_file.path.to_string()) + .with_category(category!("lint")), + ); + Ok(FileStatus::Ignored) + } else { + lint_with_guard(ctx, &mut workspace_file, suppress, suppression_reason) + } } +#[instrument(level = "debug", name = "cli_lint_guard", skip_all)] + pub(crate) fn lint_with_guard<'ctx>( ctx: &'ctx SharedTraversalOptions<'ctx, '_>, workspace_file: &mut WorkspaceFile, suppress: bool, suppression_reason: Option<&str>, ) -> FileResult { - tracing::info_span!("Processes linting", path =? workspace_file.path.display()).in_scope( - move || { - let mut input = workspace_file.input()?; - let mut changed = false; - let (only, skip) = - if let TraversalMode::Lint { only, skip, .. } = ctx.execution.traversal_mode() { - (only.clone(), skip.clone()) - } else { - (Vec::new(), Vec::new()) - }; - if let Some(fix_mode) = ctx.execution.as_fix_file_mode() { - let suppression_explanation = if suppress && suppression_reason.is_none() { - "ignored using `--suppress`" - } else { - suppression_reason.unwrap_or("") - }; + let mut input = workspace_file.input()?; + let mut changed = false; + let (only, skip) = + if let TraversalMode::Lint { only, skip, .. } = ctx.execution.traversal_mode() { + (only.clone(), skip.clone()) + } else { + (Vec::new(), Vec::new()) + }; + if let Some(fix_mode) = ctx.execution.as_fix_file_mode() { + let suppression_explanation = if suppress && suppression_reason.is_none() { + "ignored using `--suppress`" + } else { + suppression_reason.unwrap_or("") + }; - let fix_result = workspace_file - .guard() - .fix_file( - *fix_mode, - false, - RuleCategoriesBuilder::default() - .with_syntax() - .with_lint() - .build(), - only.clone(), - skip.clone(), - Some(suppression_explanation.to_string()), - ) - .with_file_path_and_code( - workspace_file.path.display().to_string(), - category!("lint"), - )?; + let fix_result = workspace_file + .guard() + .fix_file( + *fix_mode, + false, + RuleCategoriesBuilder::default() + .with_syntax() + .with_lint() + .build(), + only.clone(), + skip.clone(), + Some(suppression_explanation.to_string()), + ) + .with_file_path_and_code(workspace_file.path.to_string(), category!("lint"))?; - ctx.push_message(Message::SkippedFixes { - skipped_suggested_fixes: fix_result.skipped_suggested_fixes, - }); + info!( + "Fix file summary result. Errors {}, skipped fixes {}, actions {}", + fix_result.errors, + fix_result.skipped_suggested_fixes, + fix_result.actions.len() + ); - let mut output = fix_result.code; + ctx.push_message(Message::SkippedFixes { + skipped_suggested_fixes: fix_result.skipped_suggested_fixes, + }); - match workspace_file.as_extension().map(OsStr::as_encoded_bytes) { - Some(b"astro") => { - output = AstroFileHandler::output(input.as_str(), output.as_str()); - } - Some(b"vue") => { - output = VueFileHandler::output(input.as_str(), output.as_str()); - } - Some(b"svelte") => { - output = SvelteFileHandler::output(input.as_str(), output.as_str()); - } - _ => {} - } - if output != input { - changed = true; - workspace_file.update_file(output)?; - input = workspace_file.input()?; - } + let mut output = fix_result.code; + + match workspace_file.as_extension() { + Some("astro") => { + output = AstroFileHandler::output(input.as_str(), output.as_str()); } + Some("vue") => { + output = VueFileHandler::output(input.as_str(), output.as_str()); + } + Some("svelte") => { + output = SvelteFileHandler::output(input.as_str(), output.as_str()); + } + _ => {} + } + if output != input { + changed = true; + workspace_file.update_file(output)?; + input = workspace_file.input()?; + } + } - let max_diagnostics = ctx.remaining_diagnostics.load(Ordering::Relaxed); - let pull_diagnostics_result = workspace_file - .guard() - .pull_diagnostics( - RuleCategoriesBuilder::default() - .with_syntax() - .with_lint() - .build(), - max_diagnostics, - only, - skip, - ) - .with_file_path_and_code( - workspace_file.path.display().to_string(), - category!("lint"), - )?; + let max_diagnostics = ctx.remaining_diagnostics.load(Ordering::Relaxed); + let pull_diagnostics_result = workspace_file + .guard() + .pull_diagnostics( + RuleCategoriesBuilder::default() + .with_syntax() + .with_lint() + .build(), + max_diagnostics, + only, + skip, + ) + .with_file_path_and_code(workspace_file.path.to_string(), category!("lint"))?; - let no_diagnostics = pull_diagnostics_result.diagnostics.is_empty() - && pull_diagnostics_result.skipped_diagnostics == 0; + let no_diagnostics = pull_diagnostics_result.diagnostics.is_empty() + && pull_diagnostics_result.skipped_diagnostics == 0; - if !no_diagnostics { - let offset = match workspace_file.as_extension().map(OsStr::as_encoded_bytes) { - Some(b"vue") => VueFileHandler::start(input.as_str()), - Some(b"astro") => AstroFileHandler::start(input.as_str()), - Some(b"svelte") => SvelteFileHandler::start(input.as_str()), - _ => None, - }; + if !no_diagnostics { + let offset = match workspace_file.as_extension() { + Some("vue") => VueFileHandler::start(input.as_str()), + Some("astro") => AstroFileHandler::start(input.as_str()), + Some("svelte") => SvelteFileHandler::start(input.as_str()), + _ => None, + }; - ctx.push_message(Message::Diagnostics { - name: workspace_file.path.display().to_string(), - content: input, - diagnostics: pull_diagnostics_result - .diagnostics - .into_iter() - .map(|d| { - if let Some(offset) = offset { - d.with_offset(TextSize::from(offset)) - } else { - d - } - }) - .map(Error::from) - .collect(), - skipped_diagnostics: pull_diagnostics_result.skipped_diagnostics as u32, - }); - } + ctx.push_message(Message::Diagnostics { + file_path: workspace_file.path.to_string(), + content: input, + diagnostics: pull_diagnostics_result + .diagnostics + .into_iter() + .map(|d| { + if let Some(offset) = offset { + d.with_offset(TextSize::from(offset)) + } else { + d + } + }) + .map(Error::from) + .collect(), + skipped_diagnostics: pull_diagnostics_result.skipped_diagnostics as u32, + }); + } - if changed { - Ok(FileStatus::Changed) - } else { - Ok(FileStatus::Unchanged) - } - }, - ) + if changed { + Ok(FileStatus::Changed) + } else { + Ok(FileStatus::Unchanged) + } } diff --git a/crates/biome_cli/src/execute/process_file/organize_imports.rs b/crates/biome_cli/src/execute/process_file/organize_imports.rs deleted file mode 100644 index eb2bb9902222..000000000000 --- a/crates/biome_cli/src/execute/process_file/organize_imports.rs +++ /dev/null @@ -1,69 +0,0 @@ -use std::ffi::OsStr; - -use crate::execute::diagnostics::ResultExt; -use crate::execute::process_file::workspace_file::WorkspaceFile; -use crate::execute::process_file::{ - DiffKind, FileResult, FileStatus, Message, SharedTraversalOptions, -}; -use biome_diagnostics::category; -use biome_service::file_handlers::{AstroFileHandler, SvelteFileHandler, VueFileHandler}; - -/// Lints a single file and returns a [FileResult] -pub(crate) fn organize_imports_with_guard<'ctx>( - ctx: &'ctx SharedTraversalOptions<'ctx, '_>, - workspace_file: &mut WorkspaceFile, -) -> FileResult { - tracing::info_span!("Processes import sorting", path =? workspace_file.path.display()).in_scope( - move || { - let sorted = workspace_file - .guard() - .organize_imports() - .with_file_path_and_code( - workspace_file.path.display().to_string(), - category!("organizeImports"), - )?; - - let input = workspace_file.input()?; - let mut output = sorted.code; - - match workspace_file.as_extension().map(OsStr::as_encoded_bytes) { - Some(b"astro") => { - if output.is_empty() { - return Ok(FileStatus::Unchanged); - } - output = AstroFileHandler::output(input.as_str(), output.as_str()); - } - Some(b"vue") => { - if output.is_empty() { - return Ok(FileStatus::Unchanged); - } - output = VueFileHandler::output(input.as_str(), output.as_str()); - } - - Some(b"svelte") => { - if output.is_empty() { - return Ok(FileStatus::Unchanged); - } - output = SvelteFileHandler::output(input.as_str(), output.as_str()); - } - _ => {} - } - - if output != input { - if ctx.execution.is_check_apply() || ctx.execution.is_check_apply_unsafe() { - workspace_file.update_file(output)?; - } else { - return Ok(FileStatus::Message(Message::Diff { - file_name: workspace_file.path.display().to_string(), - old: input, - new: output, - diff_kind: DiffKind::OrganizeImports, - })); - } - Ok(FileStatus::Changed) - } else { - Ok(FileStatus::Unchanged) - } - }, - ) -} diff --git a/crates/biome_cli/src/execute/process_file/search.rs b/crates/biome_cli/src/execute/process_file/search.rs index 8a9a7d5fb94a..1e046a07ec51 100644 --- a/crates/biome_cli/src/execute/process_file/search.rs +++ b/crates/biome_cli/src/execute/process_file/search.rs @@ -2,16 +2,27 @@ use crate::execute::diagnostics::{ResultExt, SearchDiagnostic}; use crate::execute::process_file::workspace_file::WorkspaceFile; use crate::execute::process_file::{FileResult, FileStatus, Message, SharedTraversalOptions}; use biome_diagnostics::{category, DiagnosticExt}; +use biome_fs::{BiomePath, TraversalContext}; +use biome_service::diagnostics::FileTooLarge; use biome_service::workspace::PatternId; -use std::path::Path; pub(crate) fn search<'ctx>( ctx: &'ctx SharedTraversalOptions<'ctx, '_>, - path: &Path, + path: BiomePath, pattern: &PatternId, ) -> FileResult { let mut workspace_file = WorkspaceFile::new(ctx, path)?; - search_with_guard(ctx, &mut workspace_file, pattern) + let result = workspace_file.guard().check_file_size()?; + if result.is_too_large() { + ctx.push_diagnostic( + FileTooLarge::from(result) + .with_file_path(workspace_file.path.to_string()) + .with_category(category!("search")), + ); + Ok(FileStatus::Ignored) + } else { + search_with_guard(ctx, &mut workspace_file, pattern) + } } pub(crate) fn search_with_guard<'ctx>( @@ -19,32 +30,26 @@ pub(crate) fn search_with_guard<'ctx>( workspace_file: &mut WorkspaceFile, pattern: &PatternId, ) -> FileResult { - tracing::info_span!("Processes searching", path =? workspace_file.path.display()).in_scope( - move || { - let result = workspace_file - .guard() - .search_pattern(pattern) - .with_file_path_and_code( - workspace_file.path.display().to_string(), - category!("search"), - )?; + let _ = tracing::info_span!("Search ", path =? workspace_file.path).entered(); + let result = workspace_file + .guard() + .search_pattern(pattern) + .with_file_path_and_code(workspace_file.path.to_string(), category!("search"))?; - let input = workspace_file.input()?; - let file_name = workspace_file.path.display().to_string(); - let matches_len = result.matches.len(); + let input = workspace_file.input()?; + let file_name = workspace_file.path.to_string(); + let matches_len = result.matches.len(); - let search_results = Message::Diagnostics { - name: file_name, - content: input, - diagnostics: result - .matches - .into_iter() - .map(|mat| SearchDiagnostic.with_file_span(mat)) - .collect(), - skipped_diagnostics: 0, - }; + let search_results = Message::Diagnostics { + file_path: file_name, + content: input, + diagnostics: result + .matches + .into_iter() + .map(|mat| SearchDiagnostic.with_file_span(mat)) + .collect(), + skipped_diagnostics: 0, + }; - Ok(FileStatus::SearchResult(matches_len, search_results)) - }, - ) + Ok(FileStatus::SearchResult(matches_len, search_results)) } diff --git a/crates/biome_cli/src/execute/process_file/workspace_file.rs b/crates/biome_cli/src/execute/process_file/workspace_file.rs index 8855bd84e06e..1a3a8d9ac1d8 100644 --- a/crates/biome_cli/src/execute/process_file/workspace_file.rs +++ b/crates/biome_cli/src/execute/process_file/workspace_file.rs @@ -2,16 +2,14 @@ use crate::execute::diagnostics::{ResultExt, ResultIoExt}; use crate::execute::process_file::SharedTraversalOptions; use biome_diagnostics::{category, Error}; use biome_fs::{BiomePath, File, OpenOptions}; -use biome_service::workspace::{FileGuard, OpenFileParams}; +use biome_service::workspace::{FileContent, FileGuard, OpenFileParams}; use biome_service::{Workspace, WorkspaceError}; -use std::ffi::OsStr; -use std::path::{Path, PathBuf}; /// Small wrapper that holds information and operations around the current processed file pub(crate) struct WorkspaceFile<'ctx, 'app> { guard: FileGuard<'app, dyn Workspace + 'ctx>, file: Box, - pub(crate) path: PathBuf, + pub(crate) path: BiomePath, } impl<'ctx, 'app> WorkspaceFile<'ctx, 'app> { @@ -19,37 +17,34 @@ impl<'ctx, 'app> WorkspaceFile<'ctx, 'app> { /// saving these information internally pub(crate) fn new( ctx: &SharedTraversalOptions<'ctx, 'app>, - path: &Path, + path: BiomePath, ) -> Result { - let biome_path = BiomePath::new(path); let open_options = OpenOptions::default() .read(true) .write(ctx.execution.requires_write_access()); let mut file = ctx .fs - .open_with_options(path, open_options) - .with_file_path(path.display().to_string())?; + .open_with_options(path.as_path(), open_options) + .with_file_path(path.to_string())?; let mut input = String::new(); file.read_to_string(&mut input) - .with_file_path(path.display().to_string())?; + .with_file_path(path.to_string())?; let guard = FileGuard::open( ctx.workspace, OpenFileParams { + project_key: ctx.project_key, document_file_source: None, - path: biome_path, + path: path.clone(), version: 0, - content: input.clone(), + content: FileContent::FromClient(input.clone()), + persist_node_cache: false, }, ) - .with_file_path_and_code(path.display().to_string(), category!("internalError/fs"))?; + .with_file_path_and_code(path.to_string(), category!("internalError/fs"))?; - Ok(Self { - file, - guard, - path: PathBuf::from(path), - }) + Ok(Self { file, guard, path }) } pub(crate) fn guard(&self) -> &FileGuard<'app, dyn Workspace + 'ctx> { @@ -60,7 +55,7 @@ impl<'ctx, 'app> WorkspaceFile<'ctx, 'app> { self.guard().get_file_content() } - pub(crate) fn as_extension(&self) -> Option<&OsStr> { + pub(crate) fn as_extension(&self) -> Option<&str> { self.path.extension() } @@ -70,7 +65,7 @@ impl<'ctx, 'app> WorkspaceFile<'ctx, 'app> { self.file .set_content(new_content.as_bytes()) - .with_file_path(self.path.display().to_string())?; + .with_file_path(self.path.to_string())?; self.guard .change_file(self.file.file_version(), new_content)?; Ok(()) diff --git a/crates/biome_cli/src/execute/std_in.rs b/crates/biome_cli/src/execute/std_in.rs index 81f0abb04546..a7bba8656a4c 100644 --- a/crates/biome_cli/src/execute/std_in.rs +++ b/crates/biome_cli/src/execute/std_in.rs @@ -8,15 +8,17 @@ use biome_diagnostics::Diagnostic; use biome_diagnostics::PrintDiagnostic; use biome_fs::BiomePath; use biome_service::file_handlers::{AstroFileHandler, SvelteFileHandler, VueFileHandler}; +use biome_service::projects::ProjectKey; use biome_service::workspace::{ - ChangeFileParams, DropPatternParams, FeaturesBuilder, FixFileParams, FormatFileParams, - OpenFileParams, OrganizeImportsParams, SupportsFeatureParams, + ChangeFileParams, DropPatternParams, FeaturesBuilder, FileContent, FixFileParams, + FormatFileParams, OpenFileParams, SupportsFeatureParams, }; use biome_service::WorkspaceError; use std::borrow::Cow; pub(crate) fn run<'a>( session: CliSession, + project_key: ProjectKey, mode: &'a Execution, biome_path: BiomePath, content: &'a str, @@ -28,12 +30,12 @@ pub(crate) fn run<'a>( if mode.is_format() { let file_features = workspace.file_features(SupportsFeatureParams { + project_key, path: biome_path.clone(), features: FeaturesBuilder::new().with_formatter().build(), })?; if file_features.is_protected() { - let protected_diagnostic = - WorkspaceError::protected_file(biome_path.display().to_string()); + let protected_diagnostic = WorkspaceError::protected_file(biome_path.to_string()); if protected_diagnostic.tags().is_verbose() { if verbose { console.error(markup! {{PrintDiagnostic::verbose(&protected_diagnostic)}}) @@ -46,20 +48,23 @@ pub(crate) fn run<'a>( }; if file_features.supports_format() { workspace.open_file(OpenFileParams { + project_key, path: biome_path.clone(), version: 0, - content: content.into(), + content: FileContent::FromClient(content.into()), document_file_source: None, + persist_node_cache: false, })?; let printed = workspace.format_file(FormatFileParams { + project_key, path: biome_path.clone(), })?; let code = printed.into_code(); - let output = match biome_path.extension().map(|ext| ext.as_encoded_bytes()) { - Some(b"astro") => AstroFileHandler::output(content, code.as_str()), - Some(b"vue") => VueFileHandler::output(content, code.as_str()), - Some(b"svelte") => SvelteFileHandler::output(content, code.as_str()), + let output = match biome_path.extension() { + Some("astro") => AstroFileHandler::output(content, code.as_str()), + Some("vue") => VueFileHandler::output(content, code.as_str()), + Some("svelte") => SvelteFileHandler::output(content, code.as_str()), _ => code, }; console.append(markup! { @@ -78,24 +83,26 @@ pub(crate) fn run<'a>( let mut new_content = Cow::Borrowed(content); workspace.open_file(OpenFileParams { + project_key, path: biome_path.clone(), version: 0, - content: content.into(), + content: FileContent::FromClient(content.into()), document_file_source: None, + persist_node_cache: false, })?; // apply fix file of the linter let file_features = workspace.file_features(SupportsFeatureParams { + project_key, path: biome_path.clone(), features: FeaturesBuilder::new() .with_linter() - .with_organize_imports() + .with_assist() .with_formatter() .build(), })?; if file_features.is_protected() { - let protected_diagnostic = - WorkspaceError::protected_file(biome_path.display().to_string()); + let protected_diagnostic = WorkspaceError::protected_file(biome_path.to_string()); if protected_diagnostic.tags().is_verbose() { if verbose { console.error(markup! {{PrintDiagnostic::verbose(&protected_diagnostic)}}) @@ -114,51 +121,39 @@ pub(crate) fn run<'a>( }; if let Some(fix_file_mode) = mode.as_fix_file_mode() { - if file_features.supports_lint() { + if file_features.supports_lint() || file_features.supports_assist() { + let mut rule_categories = RuleCategoriesBuilder::default().with_syntax(); + + if file_features.supports_lint() { + rule_categories = rule_categories.with_lint(); + } + + if file_features.supports_assist() { + rule_categories = rule_categories.with_assist(); + } + let fix_file_result = workspace.fix_file(FixFileParams { + project_key, fix_file_mode: *fix_file_mode, path: biome_path.clone(), should_format: mode.is_check() && file_features.supports_format(), only: only.clone(), skip: skip.clone(), suppression_reason: None, - rule_categories: RuleCategoriesBuilder::default() - .with_syntax() - .with_lint() - .build(), + enabled_rules: vec![], + rule_categories: rule_categories.build(), })?; let code = fix_file_result.code; - let output = match biome_path.extension().map(|ext| ext.as_encoded_bytes()) { - Some(b"astro") => AstroFileHandler::output(&new_content, code.as_str()), - Some(b"vue") => VueFileHandler::output(&new_content, code.as_str()), - Some(b"svelte") => SvelteFileHandler::output(&new_content, code.as_str()), - _ => code, - }; - if output != new_content { - version += 1; - workspace.change_file(ChangeFileParams { - content: output.clone(), - path: biome_path.clone(), - version, - })?; - new_content = Cow::Owned(output); - } - } - - if file_features.supports_organize_imports() && mode.is_check() { - let result = workspace.organize_imports(OrganizeImportsParams { - path: biome_path.clone(), - })?; - let code = result.code; - let output = match biome_path.extension().map(|ext| ext.as_encoded_bytes()) { - Some(b"astro") => AstroFileHandler::output(&new_content, code.as_str()), - Some(b"vue") => VueFileHandler::output(&new_content, code.as_str()), - Some(b"svelte") => SvelteFileHandler::output(&new_content, code.as_str()), + let output = match biome_path.extension() { + Some("astro") => AstroFileHandler::output(&new_content, code.as_str()), + Some("vue") => VueFileHandler::output(&new_content, code.as_str()), + Some("svelte") => SvelteFileHandler::output(&new_content, code.as_str()), _ => code, }; if output != new_content { version += 1; workspace.change_file(ChangeFileParams { + project_key, content: output.clone(), path: biome_path.clone(), version, @@ -170,16 +165,19 @@ pub(crate) fn run<'a>( if file_features.supports_format() && mode.is_check() { let printed = workspace.format_file(FormatFileParams { + project_key, path: biome_path.clone(), })?; let code = printed.into_code(); - let output = match biome_path.extension().map(|ext| ext.as_encoded_bytes()) { - Some(b"astro") => AstroFileHandler::output(&new_content, code.as_str()), - Some(b"vue") => VueFileHandler::output(&new_content, code.as_str()), - Some(b"svelte") => SvelteFileHandler::output(&new_content, code.as_str()), + let output = match biome_path.extension() { + Some("astro") => AstroFileHandler::output(&new_content, code.as_str()), + Some("vue") => VueFileHandler::output(&new_content, code.as_str()), + Some("svelte") => SvelteFileHandler::output(&new_content, code.as_str()), _ => code, }; - if (mode.is_check_apply() || mode.is_check_apply_unsafe()) && output != new_content { + if (mode.is_safe_fixes_enabled() || mode.is_safe_and_unsafe_fixes_enabled()) + && output != new_content + { new_content = Cow::Owned(output); } } diff --git a/crates/biome_cli/src/execute/traverse.rs b/crates/biome_cli/src/execute/traverse.rs index 87e85afc4a3e..7dd8647506b8 100644 --- a/crates/biome_cli/src/execute/traverse.rs +++ b/crates/biome_cli/src/execute/traverse.rs @@ -2,9 +2,8 @@ use super::process_file::{process_file, DiffKind, FileStatus, Message}; use super::{Execution, TraversalMode}; use crate::cli_options::CliOptions; use crate::execute::diagnostics::{ - AssistsDiffDiagnostic, CIAssistsDiffDiagnostic, CIFormatDiffDiagnostic, - CIOrganizeImportsDiffDiagnostic, ContentDiffAdvice, FormatDiffDiagnostic, - OrganizeImportsDiffDiagnostic, PanicDiagnostic, + AssistDiffDiagnostic, CIAssistDiffDiagnostic, CIFormatDiffDiagnostic, ContentDiffAdvice, + FormatDiffDiagnostic, PanicDiagnostic, }; use crate::reporter::TraversalSummary; use crate::{CliDiagnostic, CliSession}; @@ -13,8 +12,10 @@ use biome_diagnostics::{category, DiagnosticExt, Error, Resource, Severity}; use biome_fs::{BiomePath, FileSystem, PathInterner}; use biome_fs::{TraversalContext, TraversalScope}; use biome_service::dome::Dome; +use biome_service::projects::ProjectKey; use biome_service::workspace::{DropPatternParams, IsPathIgnoredParams}; use biome_service::{extension_error, workspace::SupportsFeatureParams, Workspace, WorkspaceError}; +use camino::{Utf8Path, Utf8PathBuf}; use crossbeam::channel::{unbounded, Receiver, Sender}; use rustc_hash::FxHashSet; use std::collections::BTreeSet; @@ -25,13 +26,11 @@ use std::{ ffi::OsString, panic::catch_unwind, path::PathBuf, - sync::{ - atomic::{AtomicUsize, Ordering}, - Once, - }, + sync::atomic::{AtomicUsize, Ordering}, thread, time::{Duration, Instant}, }; +use tracing::{instrument, Span}; pub(crate) struct TraverseResult { pub(crate) summary: TraversalSummary, @@ -42,11 +41,10 @@ pub(crate) struct TraverseResult { pub(crate) fn traverse( execution: &Execution, session: &mut CliSession, + project_key: ProjectKey, cli_options: &CliOptions, mut inputs: Vec, ) -> Result { - init_thread_pool(); - if inputs.is_empty() { match &execution.traversal_mode { TraversalMode::Check { .. } @@ -81,13 +79,14 @@ pub(crate) fn traverse( let matches = AtomicUsize::new(0); let skipped = AtomicUsize::new(0); - let fs = &*session.app.fs; let workspace = &*session.app.workspace; + let fs = workspace.fs(); let max_diagnostics = execution.get_max_diagnostics(); let remaining_diagnostics = AtomicU32::new(max_diagnostics); - let printer = DiagnosticsPrinter::new(execution) + let working_directory = fs.working_directory(); + let printer = DiagnosticsPrinter::new(execution, working_directory.as_deref()) .with_verbose(cli_options.verbose) .with_diagnostic_level(cli_options.diagnostic_level) .with_max_diagnostics(max_diagnostics); @@ -106,6 +105,7 @@ pub(crate) fn traverse( &TraversalOptions { fs, workspace, + project_key, execution, interner, matches: &matches, @@ -155,19 +155,6 @@ pub(crate) fn traverse( }) } -/// This function will setup the global Rayon thread pool the first time it's called -/// -/// This is currently only used to assign friendly debug names to the threads of the pool -fn init_thread_pool() { - static INIT_ONCE: Once = Once::new(); - INIT_ONCE.call_once(|| { - rayon::ThreadPoolBuilder::new() - .thread_name(|index| format!("biome::worker_{index}")) - .build_global() - .expect("failed to initialize the global thread pool"); - }); -} - /// Initiate the filesystem traversal tasks with the provided input paths and /// run it to completion, returning the duration of the process and the evaluated paths fn traverse_inputs( @@ -178,7 +165,10 @@ fn traverse_inputs( let start = Instant::now(); fs.traversal(Box::new(move |scope: &dyn TraversalScope| { for input in inputs { - scope.evaluate(ctx, PathBuf::from(input)); + scope.evaluate( + ctx, + Utf8PathBuf::from_path_buf(PathBuf::from(input)).expect("Valid UTF-8 path"), + ); } })); @@ -226,10 +216,13 @@ struct DiagnosticsPrinter<'ctx> { not_printed_diagnostics: AtomicU32, printed_diagnostics: AtomicU32, total_skipped_suggested_fixes: AtomicU32, + + /// The current working directory, borrowed from [FileSystem] + working_directory: Option<&'ctx Utf8Path>, } impl<'ctx> DiagnosticsPrinter<'ctx> { - fn new(execution: &'ctx Execution) -> Self { + fn new(execution: &'ctx Execution, working_directory: Option<&'ctx Utf8Path>) -> Self { Self { errors: AtomicU32::new(0), warnings: AtomicU32::new(0), @@ -241,6 +234,7 @@ impl<'ctx> DiagnosticsPrinter<'ctx> { not_printed_diagnostics: AtomicU32::new(0), printed_diagnostics: AtomicU32::new(0), total_skipped_suggested_fixes: AtomicU32::new(0), + working_directory, } } @@ -307,7 +301,7 @@ impl<'ctx> DiagnosticsPrinter<'ctx> { should_print } - fn run(&self, receiver: Receiver, interner: Receiver) -> Vec { + fn run(&self, receiver: Receiver, interner: Receiver) -> Vec { let mut paths: FxHashSet = FxHashSet::default(); let mut diagnostics_to_print = vec![]; @@ -343,9 +337,9 @@ impl<'ctx> DiagnosticsPrinter<'ctx> { None => loop { match interner.recv() { Ok(path) => { - paths.insert(path.display().to_string()); - if path.display().to_string() == *file_path { - break paths.get(&path.display().to_string()); + paths.insert(path.to_string()); + if path.as_str() == *file_path { + break paths.get(&path.to_string()); } } // In case the channel disconnected without sending @@ -369,7 +363,7 @@ impl<'ctx> DiagnosticsPrinter<'ctx> { } Message::Diagnostics { - name, + file_path, content, diagnostics, skipped_diagnostics, @@ -392,7 +386,13 @@ impl<'ctx> DiagnosticsPrinter<'ctx> { self.warnings.fetch_add(1, Ordering::Relaxed); } - let diag = diag.with_file_path(&name).with_file_source_code(&content); + let file_path = self + .working_directory + .and_then(|wd| file_path.strip_prefix(wd.as_str())) + .unwrap_or(file_path.as_str()); + let diag = diag + .with_file_path(file_path) + .with_file_source_code(&content); diagnostics_to_print.push(diag); } } else { @@ -411,8 +411,13 @@ impl<'ctx> DiagnosticsPrinter<'ctx> { let should_print = self.should_print(); if should_print { - let diag = - diag.with_file_path(&name).with_file_source_code(&content); + let file_path = self + .working_directory + .and_then(|wd| file_path.strip_prefix(wd.as_str())) + .unwrap_or(file_path.as_str()); + let diag = diag + .with_file_path(file_path) + .with_file_source_code(&content); diagnostics_to_print.push(diag) } } @@ -424,6 +429,10 @@ impl<'ctx> DiagnosticsPrinter<'ctx> { new, diff_kind, } => { + let file_name = self + .working_directory + .and_then(|wd| file_name.strip_prefix(wd.as_str())) + .unwrap_or(file_name.as_str()); // A diff is an error in CI mode and in format check mode let is_error = self.execution.is_ci() || !self.execution.is_format_write(); if is_error { @@ -448,20 +457,7 @@ impl<'ctx> DiagnosticsPrinter<'ctx> { match diff_kind { DiffKind::Format => { let diag = CIFormatDiffDiagnostic { - file_name: file_name.clone(), - diff: ContentDiffAdvice { - old: old.clone(), - new: new.clone(), - }, - }; - diagnostics_to_print.push( - diag.with_severity(severity) - .with_file_source_code(old.clone()), - ); - } - DiffKind::OrganizeImports => { - let diag = CIOrganizeImportsDiffDiagnostic { - file_name: file_name.clone(), + file_name: file_name.to_string(), diff: ContentDiffAdvice { old: old.clone(), new: new.clone(), @@ -472,9 +468,9 @@ impl<'ctx> DiagnosticsPrinter<'ctx> { .with_file_source_code(old.clone()), ); } - DiffKind::Assists => { - let diag = CIAssistsDiffDiagnostic { - file_name: file_name.clone(), + DiffKind::Assist => { + let diag = CIAssistDiffDiagnostic { + file_name: file_name.to_string(), diff: ContentDiffAdvice { old: old.clone(), new: new.clone(), @@ -490,7 +486,7 @@ impl<'ctx> DiagnosticsPrinter<'ctx> { match diff_kind { DiffKind::Format => { let diag = FormatDiffDiagnostic { - file_name: file_name.clone(), + file_name: file_name.to_string(), diff: ContentDiffAdvice { old: old.clone(), new: new.clone(), @@ -501,22 +497,9 @@ impl<'ctx> DiagnosticsPrinter<'ctx> { .with_file_source_code(old.clone()), ) } - DiffKind::OrganizeImports => { - let diag = OrganizeImportsDiffDiagnostic { - file_name: file_name.clone(), - diff: ContentDiffAdvice { - old: old.clone(), - new: new.clone(), - }, - }; - diagnostics_to_print.push( - diag.with_severity(severity) - .with_file_source_code(old.clone()), - ) - } - DiffKind::Assists => { - let diag = AssistsDiffDiagnostic { - file_name: file_name.clone(), + DiffKind::Assist => { + let diag = AssistDiffDiagnostic { + file_name: file_name.to_string(), diff: ContentDiffAdvice { old: old.clone(), new: new.clone(), @@ -543,6 +526,8 @@ pub(crate) struct TraversalOptions<'ctx, 'app> { pub(crate) fs: &'app dyn FileSystem, /// Instance of [Workspace] used by this instance of the CLI pub(crate) workspace: &'ctx dyn Workspace, + /// Key of the project in which we're traversing. + pub(crate) project_key: ProjectKey, /// Determines how the files should be processed pub(crate) execution: &'ctx Execution, /// File paths interner cache used by the filesystem traversal @@ -565,7 +550,7 @@ pub(crate) struct TraversalOptions<'ctx, 'app> { pub(crate) evaluated_paths: RwLock>, } -impl<'ctx, 'app> TraversalOptions<'ctx, 'app> { +impl TraversalOptions<'_, '_> { pub(crate) fn increment_changed(&self, path: &BiomePath) { self.changed.fetch_add(1, Ordering::Relaxed); self.evaluated_paths @@ -589,19 +574,17 @@ impl<'ctx, 'app> TraversalOptions<'ctx, 'app> { pub(crate) fn miss_handler_err(&self, err: WorkspaceError, biome_path: &BiomePath) { self.push_diagnostic( err.with_category(category!("files/missingHandler")) - .with_file_path(biome_path.display().to_string()) + .with_file_path(biome_path.to_string()) .with_tags(DiagnosticTags::VERBOSE), ); } pub(crate) fn protected_file(&self, biome_path: &BiomePath) { - self.push_diagnostic( - WorkspaceError::protected_file(biome_path.display().to_string()).into(), - ) + self.push_diagnostic(WorkspaceError::protected_file(biome_path.to_string()).into()) } } -impl<'ctx, 'app> TraversalContext for TraversalOptions<'ctx, 'app> { +impl TraversalContext for TraversalOptions<'_, '_> { fn interner(&self) -> &PathInterner { &self.interner } @@ -614,6 +597,7 @@ impl<'ctx, 'app> TraversalContext for TraversalOptions<'ctx, 'app> { self.push_message(error); } + #[instrument(level = "debug", skip(self, biome_path))] fn can_handle(&self, biome_path: &BiomePath) -> bool { let path = biome_path.as_path(); if self.fs.path_is_dir(path) || self.fs.path_is_symlink(path) { @@ -626,22 +610,27 @@ impl<'ctx, 'app> TraversalContext for TraversalOptions<'ctx, 'app> { let can_handle = !self .workspace .is_path_ignored(IsPathIgnoredParams { - biome_path: biome_path.clone(), + project_key: self.project_key, + path: biome_path.clone(), features: self.execution.to_feature(), }) .unwrap_or_else(|err| { self.push_diagnostic(err.into()); false }); + Span::current().record("can_handle", can_handle); + return can_handle; } // bail on fifo and socket files if !self.fs.path_is_file(path) { + Span::current().record("can_handle", false); return false; } let file_features = self.workspace.file_features(SupportsFeatureParams { + project_key: self.project_key, path: biome_path.clone(), features: self.execution.to_feature(), }); @@ -650,34 +639,38 @@ impl<'ctx, 'app> TraversalContext for TraversalOptions<'ctx, 'app> { Ok(file_features) => { if file_features.is_protected() { self.protected_file(biome_path); + Span::current().record("can_handle", false); return false; } if file_features.is_not_supported() && !file_features.is_ignored() { // we should throw a diagnostic if we can't handle a file that isn't ignored self.miss_handler_err(extension_error(biome_path), biome_path); + Span::current().record("can_handle", false); return false; } file_features } Err(err) => { self.miss_handler_err(err, biome_path); - + Span::current().record("can_handle", false); return false; } }; - match self.execution.traversal_mode() { + let result = match self.execution.traversal_mode() { TraversalMode::Check { .. } | TraversalMode::CI { .. } => { file_features.supports_lint() || file_features.supports_format() - || file_features.supports_organize_imports() + || file_features.supports_assist() } TraversalMode::Format { .. } => file_features.supports_format(), TraversalMode::Lint { .. } => file_features.supports_lint(), // Imagine if Biome can't handle its own configuration file... TraversalMode::Migrate { .. } => true, TraversalMode::Search { .. } => file_features.supports_search(), - } + }; + Span::current().record("can_handle", result); + result } fn handle_path(&self, path: BiomePath) { @@ -731,9 +724,7 @@ fn handle_file(ctx: &TraversalOptions, path: &BiomePath) { }, }; - ctx.push_message( - PanicDiagnostic { message }.with_file_path(path.display().to_string()), - ); + ctx.push_message(PanicDiagnostic { message }.with_file_path(path.to_string())); } } } diff --git a/crates/biome_cli/src/lib.rs b/crates/biome_cli/src/lib.rs index 139936b68569..f521d92b7a9f 100644 --- a/crates/biome_cli/src/lib.rs +++ b/crates/biome_cli/src/lib.rs @@ -6,9 +6,8 @@ //! to parse commands and arguments, redirect the execution of the commands and //! execute the traversal of directory and files, based on the command that were passed. -use biome_console::{markup, ColorMode, Console, ConsoleExt}; -use biome_fs::OsFileSystem; -use biome_service::{App, DynRef, Workspace, WorkspaceRef}; +use biome_console::{ColorMode, Console}; +use biome_service::{App, Workspace, WorkspaceRef}; use commands::search::SearchCommandPayload; use std::env; @@ -18,7 +17,6 @@ mod commands; mod diagnostics; mod execute; mod logging; -mod metrics; mod panic; mod reporter; mod service; @@ -55,28 +53,13 @@ impl<'app> CliSession<'app> { console: &'app mut dyn Console, ) -> Result { Ok(Self { - app: App::new( - DynRef::Owned(Box::::default()), - console, - WorkspaceRef::Borrowed(workspace), - ), + app: App::new(console, WorkspaceRef::Borrowed(workspace)), }) } /// Main function to run Biome CLI pub fn run(self, command: BiomeCommand) -> Result<(), CliDiagnostic> { - let has_metrics = command.has_metrics(); - if has_metrics { - crate::metrics::init_metrics(); - } - // TODO: remove in Biome v2 - if env::var_os("BIOME_LOG_DIR").is_some() { - self.app.console.log(markup! { - "The use of BIOME_LOG_DIR is deprecated. Use BIOME_LOG_PATH instead." - }); - } - - let result = match command { + match command { BiomeCommand::Version(_) => commands::version::full_version(self), BiomeCommand::Rage(_, daemon_logs, formatter, linter) => { commands::rage::rage(self, daemon_logs, formatter, linter) @@ -89,8 +72,6 @@ impl<'app> CliSession<'app> { } => commands::daemon::start(self, config_path, Some(log_path), Some(log_prefix_name)), BiomeCommand::Stop => commands::daemon::stop(self), BiomeCommand::Check { - apply, - apply_unsafe, write, fix, unsafe_, @@ -99,9 +80,8 @@ impl<'app> CliSession<'app> { paths, stdin_file_path, linter_enabled, - organize_imports_enabled, formatter_enabled, - assists_enabled, + assist_enabled, staged, changed, since, @@ -109,8 +89,6 @@ impl<'app> CliSession<'app> { self, &cli_options, CheckCommandPayload { - apply_unsafe, - apply, write, fix, unsafe_, @@ -118,17 +96,14 @@ impl<'app> CliSession<'app> { paths, stdin_file_path, linter_enabled, - organize_imports_enabled, formatter_enabled, - assists_enabled, + assist_enabled, staged, changed, since, }, ), BiomeCommand::Lint { - apply, - apply_unsafe, write, suppress, suppression_reason, @@ -153,8 +128,6 @@ impl<'app> CliSession<'app> { self, &cli_options, LintCommandPayload { - apply_unsafe, - apply, write, suppress, suppression_reason, @@ -179,8 +152,7 @@ impl<'app> CliSession<'app> { BiomeCommand::Ci { linter_enabled, formatter_enabled, - organize_imports_enabled, - assists_enabled, + assist_enabled, configuration, paths, cli_options, @@ -192,8 +164,7 @@ impl<'app> CliSession<'app> { CiCommandPayload { linter_enabled, formatter_enabled, - organize_imports_enabled, - assists_enabled, + assist_enabled, configuration, paths, changed, @@ -265,6 +236,7 @@ impl<'app> CliSession<'app> { files_configuration, paths, pattern, + language, stdin_file_path, vcs_configuration, } => run_command( @@ -274,6 +246,7 @@ impl<'app> CliSession<'app> { files_configuration, paths, pattern, + language, stdin_file_path, vcs_configuration, }, @@ -290,13 +263,7 @@ impl<'app> CliSession<'app> { Some(log_prefix_name), ), BiomeCommand::PrintSocket => commands::daemon::print_socket(), - }; - - if has_metrics { - metrics::print_metrics(); } - - result } } diff --git a/crates/biome_cli/src/main.rs b/crates/biome_cli/src/main.rs index 83cbc2e351cd..6a84dca8e1b4 100644 --- a/crates/biome_cli/src/main.rs +++ b/crates/biome_cli/src/main.rs @@ -10,6 +10,7 @@ use biome_cli::{ }; use biome_console::{markup, ConsoleExt, EnvConsole}; use biome_diagnostics::{set_bottom_frame, Diagnostic, PrintDiagnostic}; +use biome_fs::OsFileSystem; use biome_service::workspace; use std::process::{ExitCode, Termination}; use tokio::runtime::Runtime; @@ -57,14 +58,15 @@ fn main() -> ExitCode { fn run_workspace(console: &mut EnvConsole, command: BiomeCommand) -> Result<(), CliDiagnostic> { // If the `--use-server` CLI flag is set, try to open a connection to an // existing Biome server socket + let fs = Box::new(OsFileSystem::default()); let workspace = if command.should_use_server() { let runtime = Runtime::new()?; match open_transport(runtime)? { - Some(transport) => workspace::client(transport)?, + Some(transport) => workspace::client(transport, fs)?, None => return Err(CliDiagnostic::server_not_running()), } } else { - workspace::server() + workspace::server(fs) }; let session = CliSession::new(&*workspace, console)?; diff --git a/crates/biome_cli/src/metrics.rs b/crates/biome_cli/src/metrics.rs deleted file mode 100644 index 03c88cb41f9c..000000000000 --- a/crates/biome_cli/src/metrics.rs +++ /dev/null @@ -1,410 +0,0 @@ -use std::{ - borrow::Cow, - hash::Hash, - ops::Sub, - ptr, - time::{Duration, Instant}, -}; - -use hdrhistogram::Histogram; -use rustc_hash::FxHashMap; -use std::sync::{LazyLock, Mutex, RwLock}; -use tracing::{span, subscriber::Interest, Level, Metadata, Subscriber}; -use tracing_subscriber::{ - layer::Context, - prelude::*, - registry::{LookupSpan, SpanRef}, - Layer, -}; - -/// Implementation of a tracing [Layer] that collects timing information for spans into [Histogram]s -struct MetricsLayer; - -static METRICS: LazyLock>>> = - LazyLock::new(RwLock::default); - -/// Static pointer to the metadata of a callsite, used as a unique identifier -/// for collecting spans created from there in the global metrics map -struct CallsiteKey(&'static Metadata<'static>); - -impl PartialEq for CallsiteKey { - fn eq(&self, other: &Self) -> bool { - ptr::eq(self.0, other.0) - } -} - -impl Eq for CallsiteKey {} - -impl Hash for CallsiteKey { - fn hash(&self, state: &mut H) { - ptr::hash(self.0, state); - } -} - -/// Single entry in the global callsite storage, containing handles to the -/// histograms associated with this callsite -enum CallsiteEntry { - /// Spans with the debug level only count their total duration - Debug { total: Histogram }, - /// Spans with the trace level count their total duration as well as - /// individual busy and idle times - Trace { - total: Histogram, - busy: Histogram, - idle: Histogram, - }, -} - -impl CallsiteEntry { - fn from_level(level: &Level) -> Self { - /// Number of significant figures retained by the histogram - const SIGNIFICANT_FIGURES: u8 = 3; - - match level { - &Level::TRACE => Self::Trace { - // SAFETY: Histogram::new only returns an error if the value of - // SIGNIFICANT_FIGURES is invalid, 3 is statically known to work - total: Histogram::new(SIGNIFICANT_FIGURES).unwrap(), - busy: Histogram::new(SIGNIFICANT_FIGURES).unwrap(), - idle: Histogram::new(SIGNIFICANT_FIGURES).unwrap(), - }, - _ => Self::Debug { - total: Histogram::new(SIGNIFICANT_FIGURES).unwrap(), - }, - } - } - - fn into_histograms(self, name: &str) -> Vec<(Cow, Histogram)> { - match self { - CallsiteEntry::Debug { total } => vec![(Cow::Borrowed(name), total)], - CallsiteEntry::Trace { total, busy, idle } => vec![ - (Cow::Borrowed(name), total), - (Cow::Owned(format!("{name}.busy")), busy), - (Cow::Owned(format!("{name}.idle")), idle), - ], - } - } -} - -/// Extension data attached to tracing spans to keep track of their idle and busy time -/// -/// Most of the associated code is based on the similar logic found in `tracing-subscriber` -/// for printing span timings to the console: -/// https://github.com/tokio-rs/tracing/blob/6f23c128fced6409008838a3223d76d7332d79e9/tracing-subscriber/src/fmt/fmt_subscriber.rs#L973 -struct Timings { - idle: u64, - busy: u64, - last: I, -} - -trait Timepoint: Sub + Copy + Sized { - fn now() -> Self; -} - -impl Timepoint for Instant { - fn now() -> Self { - Instant::now() - } -} - -impl Timings { - fn new() -> Self { - Self { - idle: 0, - busy: 0, - last: I::now(), - } - } - - /// Count the time between the last update and now as idle - fn enter(&mut self, now: I) { - self.idle += (now - self.last).as_nanos() as u64; - self.last = now; - } - - /// Count the time between the last update and now as busy - fn exit(&mut self, now: I) { - self.busy += (now - self.last).as_nanos() as u64; - self.last = now; - } - - /// Exit the timing for this span, and record it into a callsite entry - fn record(mut self, now: I, entry: &mut CallsiteEntry) { - self.exit(now); - - match entry { - CallsiteEntry::Debug { total } => { - total.record(self.busy + self.idle).unwrap(); - } - CallsiteEntry::Trace { total, busy, idle } => { - busy.record(self.busy).unwrap(); - idle.record(self.idle).unwrap(); - total.record(self.busy + self.idle).unwrap(); - } - } - } -} - -fn read_span<'ctx, S>(ctx: &'ctx Context<'_, S>, id: &span::Id) -> SpanRef<'ctx, S> -where - S: Subscriber + for<'a> LookupSpan<'a>, -{ - ctx.span(id) - .expect("Span not found, it should have been stored in the registry") -} - -impl Layer for MetricsLayer -where - S: Subscriber + for<'a> LookupSpan<'a>, -{ - /// Only express interest in span callsites, disabling collection of events, - /// and create new histogram for the spans created by this callsite - fn register_callsite(&self, metadata: &'static Metadata<'static>) -> Interest { - if !metadata.is_span() { - return Interest::never(); - } - - let entry = CallsiteEntry::from_level(metadata.level()); - - METRICS - .write() - .unwrap() - .insert(CallsiteKey(metadata), Mutex::new(entry)); - - Interest::always() - } - - /// When a new span is created, attach the timing data extension to it - fn on_new_span(&self, _attrs: &span::Attributes<'_>, id: &span::Id, ctx: Context<'_, S>) { - let span = read_span(&ctx, id); - let mut extensions = span.extensions_mut(); - - if extensions.get_mut::().is_none() { - extensions.insert(Timings::::new()); - } - } - - /// When a span is entered, start counting idle time for the parent span if - /// it exists and busy time for the entered span itself - fn on_enter(&self, id: &span::Id, ctx: Context<'_, S>) { - let span = read_span(&ctx, id); - - let now = Instant::now(); - if let Some(parent) = span.parent() { - let mut extensions = parent.extensions_mut(); - if let Some(timings) = extensions.get_mut::() { - // The parent span was busy until now - timings.exit(now); - } - } - - let mut extensions = span.extensions_mut(); - if let Some(timings) = extensions.get_mut::() { - // The child span was idle until now - timings.enter(now); - } - } - - /// When a span is exited, stop it from counting busy time and start - /// counting the parent as busy instead - fn on_exit(&self, id: &span::Id, ctx: Context<'_, S>) { - let span = read_span(&ctx, id); - - let now = Instant::now(); - let mut extensions = span.extensions_mut(); - if let Some(timings) = extensions.get_mut::() { - // Child span was busy until now - timings.exit(now); - } - - // Re-enter parent - if let Some(parent) = span.parent() { - let mut extensions = parent.extensions_mut(); - if let Some(timings) = extensions.get_mut::() { - // Parent span was idle until now - timings.enter(now); - } - } - } - - /// When a span is closed, extract its timing information and write it to - /// the associated histograms - fn on_close(&self, id: span::Id, ctx: Context<'_, S>) { - let span = read_span(&ctx, &id); - let mut extensions = span.extensions_mut(); - if let Some(timing) = extensions.remove::() { - let now = Instant::now(); - - // Acquire a read lock on the metrics storage, access the metrics entry - // associated with this call site and acquire a write lock on it - let metrics = METRICS.read().unwrap(); - let entry = metrics - .get(&CallsiteKey(span.metadata())) - .expect("callsite not found, it should have been registered in register_callsite"); - - let mut entry = entry.lock().unwrap(); - timing.record(now, &mut entry); - } - } -} - -/// Initializes metrics recording -pub fn init_metrics() { - // Create and injects the metrics recording layer with the tracing library - tracing_subscriber::registry().with(MetricsLayer).init(); -} - -/// Flush and print the recorded metrics to the console -pub fn print_metrics() { - let mut write_guard = METRICS.write().unwrap(); - let mut histograms: Vec<_> = write_guard - .drain() - .flat_map(|(key, entry)| entry.into_inner().unwrap().into_histograms(key.0.name())) - .collect(); - - histograms.sort_unstable_by(|(a, _), (b, _)| a.cmp(b)); - - for (key, histogram) in histograms { - // Print the header line for the histogram with its name, mean sample - // duration and standard deviation - println!( - "{}: mean = {:.1?}, stdev = {:.1?}", - key, - Duration::from_nanos(histogram.mean().round() as u64), - Duration::from_nanos(histogram.stdev().round() as u64), - ); - - // For each quantile bucket in the histogram print out the associated - // duration, a bar corresponding to the percentage of the total number - // of samples falling within this bucket and the percentile - // corresponding to this bucket - let total = histogram.len() as f64; - for v in histogram.iter_quantiles(1) { - let duration = Duration::from_nanos(v.value_iterated_to()); - - let count = v.count_since_last_iteration() as f64; - let bar_length = (count * 40.0 / total).ceil() as usize; - - println!( - "{: >7.1?} | {:40} | {:5.1}%", - duration, - "*".repeat(bar_length), - v.quantile_iterated_to() * 100.0, - ); - } - - // Print an empty line after each histogram - println!(); - } -} - -#[cfg(test)] -mod tests { - use std::{ops::Sub, thread, time::Duration}; - - use tracing::Level; - use tracing_subscriber::prelude::*; - - use super::{CallsiteEntry, CallsiteKey, MetricsLayer, Timepoint, Timings, METRICS}; - - #[derive(Clone, Copy)] - struct TestTime(u64); - - impl Sub for TestTime { - type Output = Duration; - - fn sub(self, rhs: Self) -> Self::Output { - Duration::from_nanos(self.0 - rhs.0) - } - } - - impl Timepoint for TestTime { - fn now() -> Self { - Self(0) - } - } - - #[test] - fn test_timing() { - let mut entry = CallsiteEntry::from_level(&Level::TRACE); - - for i in 1..=5 { - let mut timing = Timings::::new(); - - timing.enter(TestTime(i)); - - timing.record(TestTime(i * 2), &mut entry); - } - - let histograms = entry.into_histograms("test"); - for (name, histogram) in histograms { - let scale = match name.as_ref() { - "test" => 2.0, - "test.idle" | "test.busy" => 1.0, - _ => unreachable!(), - }; - - let sample_count = 5; - assert_eq!(histogram.len(), sample_count); - - let mean = 3.0 * scale; - assert_eq!(histogram.mean(), mean); - - let sum = (1..=5).fold(0.0, |sum, i| { - let sample = i as f64 * scale; - sum + (sample - mean).powi(2) - }); - - let stddev = (sum / sample_count as f64).sqrt(); - assert_eq!(histogram.stdev(), stddev); - - let s = scale as u64 - 1; - let expected_buckets = [ - (0, s, 0.0), - (1, 2 * s + 1, 0.2), - (1, 3 * s + 2, 0.4), - (1, 4 * s + 3, 0.6), - (1, 5 * s + 4, 0.8), - (1, 6 * s + 5, 1.0), - ]; - - for (bucket, expected) in histogram.iter_linear(scale as u64).zip(&expected_buckets) { - let (count, value, quantile) = *expected; - - assert_eq!(bucket.count_since_last_iteration(), count); - assert_eq!(bucket.value_iterated_to(), value); - assert_eq!(bucket.quantile_iterated_to(), quantile); - } - } - } - - #[test] - fn test_layer() { - let _guard = tracing_subscriber::registry() - .with(MetricsLayer) - .set_default(); - - let key = { - let span = tracing::trace_span!("test_layer"); - span.in_scope(|| { - thread::sleep(Duration::from_millis(1)); - }); - - span.metadata().expect("span is disabled") - }; - - let entry = { - let mut metrics = METRICS.write().unwrap(); - metrics.remove(&CallsiteKey(key)) - }; - - let entry = entry.expect("callsite does not exist in metrics storage"); - - let entry = entry.into_inner().unwrap(); - let histograms = entry.into_histograms(key.name()); - - for (_, histogram) in histograms { - assert_eq!(histogram.len(), 1); - } - } -} diff --git a/crates/biome_cli/src/reporter/github.rs b/crates/biome_cli/src/reporter/github.rs index 172f4d99d921..9f441a8e943d 100644 --- a/crates/biome_cli/src/reporter/github.rs +++ b/crates/biome_cli/src/reporter/github.rs @@ -16,7 +16,7 @@ impl Reporter for GithubReporter { } pub(crate) struct GithubReporterVisitor<'a>(pub(crate) &'a mut dyn Console); -impl<'a> ReporterVisitor for GithubReporterVisitor<'a> { +impl ReporterVisitor for GithubReporterVisitor<'_> { fn report_summary( &mut self, _execution: &Execution, diff --git a/crates/biome_cli/src/reporter/gitlab.rs b/crates/biome_cli/src/reporter/gitlab.rs index c025b0decda9..b9bacff678ed 100644 --- a/crates/biome_cli/src/reporter/gitlab.rs +++ b/crates/biome_cli/src/reporter/gitlab.rs @@ -3,13 +3,14 @@ use biome_console::fmt::{Display, Formatter}; use biome_console::{markup, Console, ConsoleExt}; use biome_diagnostics::display::SourceFile; use biome_diagnostics::{Error, PrintDescription, Resource, Severity}; +use camino::{Utf8Path, Utf8PathBuf}; use path_absolutize::Absolutize; use serde::Serialize; use std::sync::RwLock; use std::{ collections::HashSet, hash::{DefaultHasher, Hash, Hasher}, - path::{Path, PathBuf}, + path::Path, }; pub struct GitLabReporter { @@ -26,7 +27,7 @@ impl Reporter for GitLabReporter { pub(crate) struct GitLabReporterVisitor<'a> { console: &'a mut dyn Console, - repository_root: Option, + repository_root: Option, } #[derive(Default)] @@ -49,7 +50,7 @@ impl GitLabHasher { } impl<'a> GitLabReporterVisitor<'a> { - pub fn new(console: &'a mut dyn Console, repository_root: Option) -> Self { + pub fn new(console: &'a mut dyn Console, repository_root: Option) -> Self { Self { console, repository_root, @@ -57,7 +58,7 @@ impl<'a> GitLabReporterVisitor<'a> { } } -impl<'a> ReporterVisitor for GitLabReporterVisitor<'a> { +impl ReporterVisitor for GitLabReporterVisitor<'_> { fn report_summary(&mut self, _: &Execution, _: TraversalSummary) -> std::io::Result<()> { Ok(()) } @@ -77,11 +78,11 @@ impl<'a> ReporterVisitor for GitLabReporterVisitor<'a> { struct GitLabDiagnostics<'a>( DiagnosticsPayload, &'a RwLock, - Option<&'a Path>, + Option<&'a Utf8Path>, ); -impl<'a> GitLabDiagnostics<'a> { - fn attempt_to_relativize(&self, subject: &str) -> Option { +impl GitLabDiagnostics<'_> { + fn attempt_to_relativize(&self, subject: &str) -> Option { let Ok(resolved) = Path::new(subject).absolutize() else { return None; }; @@ -90,7 +91,7 @@ impl<'a> GitLabDiagnostics<'a> { return None; }; - Some(relativized.to_path_buf()) + Some(Utf8PathBuf::from_path_buf(relativized.to_path_buf()).expect("To be UTF-8 path")) } fn compute_initial_fingerprint(&self, diagnostic: &Error, path: &str) -> u64 { @@ -116,7 +117,7 @@ impl<'a> GitLabDiagnostics<'a> { } } -impl<'a> Display for GitLabDiagnostics<'a> { +impl Display for GitLabDiagnostics<'_> { fn fmt(&self, fmt: &mut Formatter) -> std::io::Result<()> { let mut hasher = self.1.write().unwrap(); let gitlab_diagnostics: Vec<_> = self @@ -139,8 +140,8 @@ impl<'a> Display for GitLabDiagnostics<'a> { .unwrap_or_default(); let path_buf = self.attempt_to_relativize(absolute_path); let path = match path_buf { - Some(buf) => buf.to_str().unwrap_or(absolute_path).to_owned(), - None => absolute_path.to_owned(), + Some(buf) => buf.as_str().to_string(), + None => absolute_path.to_string(), }; let initial_fingerprint = self.compute_initial_fingerprint(biome_diagnostic, &path); diff --git a/crates/biome_cli/src/reporter/junit.rs b/crates/biome_cli/src/reporter/junit.rs index 3e9a2bf03e6b..03782e1a8a16 100644 --- a/crates/biome_cli/src/reporter/junit.rs +++ b/crates/biome_cli/src/reporter/junit.rs @@ -24,7 +24,7 @@ struct JunitDiagnostic<'a> { diagnostic: &'a Error, } -impl<'a> Display for JunitDiagnostic<'a> { +impl Display for JunitDiagnostic<'_> { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { self.diagnostic.description(f) } @@ -39,7 +39,7 @@ impl<'a> JunitReporterVisitor<'a> { } } -impl<'a> ReporterVisitor for JunitReporterVisitor<'a> { +impl ReporterVisitor for JunitReporterVisitor<'_> { fn report_summary( &mut self, _execution: &Execution, diff --git a/crates/biome_cli/src/reporter/summary.rs b/crates/biome_cli/src/reporter/summary.rs index 90aefff0f65d..f8058bcf780e 100644 --- a/crates/biome_cli/src/reporter/summary.rs +++ b/crates/biome_cli/src/reporter/summary.rs @@ -27,7 +27,7 @@ impl Reporter for SummaryReporter { pub(crate) struct SummaryReporterVisitor<'a>(pub(crate) &'a mut dyn Console); -impl<'a> ReporterVisitor for SummaryReporterVisitor<'a> { +impl ReporterVisitor for SummaryReporterVisitor<'_> { fn report_summary( &mut self, execution: &Execution, @@ -42,7 +42,7 @@ impl<'a> ReporterVisitor for SummaryReporterVisitor<'a> { if !execution.is_ci() && summary.diagnostics_not_printed > 0 { self.0.log(markup! { - "The number of diagnostics exceeds the number allowed by Biome.\n" + "The number of diagnostics exceeds the limit allowed. Use ""--max-diagnostics"" to increase it.\n" "Diagnostics not shown: "{summary.diagnostics_not_printed}"." }) } @@ -72,6 +72,7 @@ impl<'a> ReporterVisitor for SummaryReporterVisitor<'a> { let category = diagnostic.category(); let severity = &diagnostic.severity(); + if diagnostic.severity() >= diagnostics_payload.diagnostic_level { if diagnostic.tags().is_verbose() { if diagnostics_payload.verbose { @@ -113,8 +114,8 @@ impl<'a> ReporterVisitor for SummaryReporterVisitor<'a> { if execution.is_check() || execution.is_ci() { if let Some(category) = category { - if category.name() == "organizeImports" { - files_to_diagnostics.insert_organize_imports(location); + if category.name() == "assist" { + files_to_diagnostics.insert_assist(location); } } } @@ -130,7 +131,7 @@ impl<'a> ReporterVisitor for SummaryReporterVisitor<'a> { #[derive(Debug, Default)] struct FileToDiagnostics { formats: BTreeSet, - organize_imports: BTreeSet, + assists: BTreeSet, lints: LintsByCategory, parse: BTreeSet, } @@ -145,8 +146,8 @@ impl FileToDiagnostics { self.formats.insert(location.into()); } - fn insert_organize_imports(&mut self, location: &str) { - self.organize_imports.insert(location.into()); + fn insert_assist(&mut self, location: &str) { + self.assists.insert(location.into()); } fn insert_parse(&mut self, location: &str) { @@ -172,10 +173,10 @@ struct SummaryListDiagnostic<'a> { #[derive(Debug, Diagnostic)] #[diagnostic( severity = Information, - category = "reporter/analyzer", - message = "Some analyzer rules were triggered" + category = "reporter/linter", + message = "Some lint rules were triggered" )] -struct SummaryTableDiagnostic<'a> { +struct LintSummaryDiagnostic<'a> { #[advice] tables: &'a LintsByCategory, } @@ -183,7 +184,7 @@ struct SummaryTableDiagnostic<'a> { #[derive(Debug)] struct SummaryListAdvice<'a>(&'a BTreeSet); -impl<'a> Advices for SummaryListAdvice<'a> { +impl Advices for SummaryListAdvice<'_> { fn record(&self, visitor: &mut dyn Visit) -> io::Result<()> { let list: Vec<_> = self.0.iter().map(|s| s as &dyn Display).collect(); visitor.record_list(&list) @@ -224,7 +225,7 @@ impl Display for FileToDiagnostics { })?; } - if !self.organize_imports.is_empty() { + if !self.assists.is_empty() { let diagnostic = SummaryListDiagnostic { message: MessageAndDescription::from( markup! { @@ -232,8 +233,8 @@ impl Display for FileToDiagnostics { } .to_owned(), ), - list: SummaryListAdvice(&self.organize_imports), - category: category!("reporter/organizeImports"), + list: SummaryListAdvice(&self.assists), + category: category!("reporter/assist"), }; fmt.write_markup(markup! { {PrintDiagnostic::simple(&diagnostic)} @@ -241,7 +242,7 @@ impl Display for FileToDiagnostics { } if !self.lints.0.is_empty() { - let diagnostic = SummaryTableDiagnostic { + let diagnostic = LintSummaryDiagnostic { tables: &self.lints, }; fmt.write_markup(markup! { @@ -267,7 +268,7 @@ impl LintsByCategory { } } -impl<'a> Advices for &'a LintsByCategory { +impl Advices for &LintsByCategory { fn record(&self, visitor: &mut dyn Visit) -> io::Result<()> { let headers = &[ markup!("Rule Name").to_owned(), diff --git a/crates/biome_cli/src/reporter/terminal.rs b/crates/biome_cli/src/reporter/terminal.rs index 42653361d2a0..15ef2f5dc942 100644 --- a/crates/biome_cli/src/reporter/terminal.rs +++ b/crates/biome_cli/src/reporter/terminal.rs @@ -53,7 +53,7 @@ struct FixedPathsDiagnostic { pub(crate) struct ConsoleReporterVisitor<'a>(pub(crate) &'a mut dyn Console); -impl<'a> ReporterVisitor for ConsoleReporterVisitor<'a> { +impl ReporterVisitor for ConsoleReporterVisitor<'_> { fn report_summary( &mut self, execution: &Execution, @@ -68,7 +68,7 @@ impl<'a> ReporterVisitor for ConsoleReporterVisitor<'a> { if !execution.is_ci() && summary.diagnostics_not_printed > 0 { self.0.log(markup! { - "The number of diagnostics exceeds the number allowed by Biome.\n" + "The number of diagnostics exceeds the limit allowed. Use ""--max-diagnostics"" to increase it.\n" "Diagnostics not shown: "{summary.diagnostics_not_printed}"." }) } @@ -83,10 +83,7 @@ impl<'a> ReporterVisitor for ConsoleReporterVisitor<'a> { fn report_handled_paths(&mut self, evaluated_paths: BTreeSet) -> io::Result<()> { let evaluated_paths_diagnostic = EvaluatedPathsDiagnostic { advice: ListAdvice { - list: evaluated_paths - .iter() - .map(|p| p.display().to_string()) - .collect(), + list: evaluated_paths.iter().map(|p| p.to_string()).collect(), }, }; @@ -95,7 +92,7 @@ impl<'a> ReporterVisitor for ConsoleReporterVisitor<'a> { list: evaluated_paths .iter() .filter(|p| p.was_written()) - .map(|p| p.display().to_string()) + .map(|p| p.to_string()) .collect(), }, }; @@ -151,7 +148,7 @@ impl fmt::Display for Files { struct SummaryDetail<'a>(pub(crate) &'a TraversalMode, usize); -impl<'a> fmt::Display for SummaryDetail<'a> { +impl fmt::Display for SummaryDetail<'_> { fn fmt(&self, fmt: &mut Formatter) -> io::Result<()> { if let TraversalMode::Search { .. } = self.0 { return Ok(()); @@ -170,7 +167,7 @@ impl<'a> fmt::Display for SummaryDetail<'a> { } struct SummaryTotal<'a>(&'a TraversalMode, usize, &'a Duration); -impl<'a> fmt::Display for SummaryTotal<'a> { +impl fmt::Display for SummaryTotal<'_> { fn fmt(&self, fmt: &mut Formatter) -> io::Result<()> { let files = Files(self.1); match self.0 { @@ -214,7 +211,7 @@ pub(crate) struct ConsoleTraversalSummary<'a>( pub(crate) &'a TraversalMode, pub(crate) &'a TraversalSummary, ); -impl<'a> fmt::Display for ConsoleTraversalSummary<'a> { +impl fmt::Display for ConsoleTraversalSummary<'_> { fn fmt(&self, fmt: &mut Formatter) -> io::Result<()> { let summary = SummaryTotal(self.0, self.1.changed + self.1.unchanged, &self.1.duration); let detail = SummaryDetail(self.0, self.1.changed); diff --git a/crates/biome_cli/src/service/unix.rs b/crates/biome_cli/src/service/unix.rs index 4638872f5cb2..4d604c41b7e8 100644 --- a/crates/biome_cli/src/service/unix.rs +++ b/crates/biome_cli/src/service/unix.rs @@ -1,12 +1,11 @@ +use biome_lsp::{ServerConnection, ServerFactory}; +use camino::Utf8PathBuf; use std::{ convert::Infallible, env, fs, io::{self, ErrorKind}, - path::PathBuf, time::Duration, }; - -use biome_lsp::{ServerConnection, ServerFactory}; use tokio::{ io::Interest, net::{ @@ -20,22 +19,21 @@ use tracing::{debug, info, Instrument}; /// Returns the filesystem path of the global socket used to communicate with /// the server daemon -fn get_socket_name() -> PathBuf { +fn get_socket_name() -> Utf8PathBuf { biome_fs::ensure_cache_dir().join(format!("biome-socket-{}", biome_configuration::VERSION)) } -pub(crate) fn enumerate_pipes() -> io::Result> { +pub(crate) fn enumerate_pipes() -> io::Result> { fs::read_dir(biome_fs::ensure_cache_dir()).map(|iter| { iter.filter_map(|entry| { - let entry = entry.ok()?.path(); + let entry = Utf8PathBuf::from_path_buf(entry.ok()?.path()).ok()?; let file_name = entry.file_name()?; - let file_name = file_name.to_str()?; let version = file_name.strip_prefix("biome-socket")?; if version.is_empty() { - Some(String::new()) + Some((String::new(), entry)) } else { - Some(version.strip_prefix('-')?.to_string()) + Some((version.strip_prefix('-')?.to_string(), entry)) } }) }) @@ -44,7 +42,7 @@ pub(crate) fn enumerate_pipes() -> io::Result> { /// Try to connect to the global socket and wait for the connection to become ready async fn try_connect() -> io::Result { let socket_name = get_socket_name(); - info!("Trying to connect to socket {}", socket_name.display()); + info!("Trying to connect to socket {}", socket_name.as_str()); let stream = UnixStream::connect(socket_name).await?; stream .ready(Interest::READABLE | Interest::WRITABLE) @@ -55,8 +53,8 @@ async fn try_connect() -> io::Result { /// Spawn the daemon server process in the background fn spawn_daemon( stop_on_disconnect: bool, - config_path: Option, - log_path: Option, + config_path: Option, + log_path: Option, log_file_name_prefix: Option, ) -> io::Result { let binary = env::current_exe()?; @@ -69,10 +67,10 @@ fn spawn_daemon( cmd.arg("--stop-on-disconnect"); } if let Some(config_path) = config_path { - cmd.arg(format!("--config-path={}", config_path.display())); + cmd.arg(format!("--config-path={}", config_path)); } if let Some(log_path) = log_path { - cmd.arg(format!("--log-path={}", log_path.display())); + cmd.arg(format!("--log-path={}", log_path)); } if let Some(log_file_name_prefix) = log_file_name_prefix { @@ -126,8 +124,8 @@ pub(crate) async fn open_socket() -> io::Result, - log_path: Option, + config_path: Option, + log_path: Option, log_file_name_prefix: Option, ) -> io::Result { let mut current_child: Option = None; @@ -195,7 +193,7 @@ pub(crate) async fn ensure_daemon( /// print the global socket name in the standard output pub(crate) async fn print_socket() -> io::Result<()> { ensure_daemon(true, None, None, None).await?; - println!("{}", get_socket_name().display()); + println!("{}", get_socket_name().as_str()); Ok(()) } @@ -203,15 +201,15 @@ pub(crate) async fn print_socket() -> io::Result<()> { /// provided [ServerFactory] pub(crate) async fn run_daemon( factory: ServerFactory, - config_path: Option, + config_path: Option, ) -> io::Result { let path = get_socket_name(); - info!("Trying to connect to socket {}", path.display()); + info!("Trying to connect to socket {}", path.as_str()); // Try to remove the socket file if it already exists if path.exists() { - info!("Remove socket folder {}", path.display()); + info!("Remove socket folder {}", path.as_str()); fs::remove_file(&path)?; } diff --git a/crates/biome_cli/src/service/windows.rs b/crates/biome_cli/src/service/windows.rs index 82d0acf7a911..4d278db9b475 100644 --- a/crates/biome_cli/src/service/windows.rs +++ b/crates/biome_cli/src/service/windows.rs @@ -1,3 +1,5 @@ +use biome_lsp::{ServerConnection, ServerFactory}; +use camino::Utf8PathBuf; use std::{ convert::Infallible, env, @@ -5,15 +7,12 @@ use std::{ io::{self, ErrorKind}, mem::swap, os::windows::process::CommandExt, - path::PathBuf, pin::Pin, process::Command, sync::Arc, task::{Context, Poll}, time::Duration, }; - -use biome_lsp::{ServerConnection, ServerFactory}; use tokio::{ io::{AsyncRead, AsyncWrite, ReadBuf}, net::windows::named_pipe::{ClientOptions, NamedPipeClient, NamedPipeServer, ServerOptions}, @@ -27,18 +26,17 @@ fn get_pipe_name() -> String { format!(r"\\.\pipe\biome-service-{}", biome_configuration::VERSION) } -pub(crate) fn enumerate_pipes() -> io::Result> { +pub(crate) fn enumerate_pipes() -> io::Result> { read_dir(r"\\.\pipe").map(|iter| { iter.filter_map(|entry| { - let entry = entry.ok()?.path(); + let entry = Utf8PathBuf::from_path_buf(entry.ok()?.path()).ok()?; let file_name = entry.file_name()?; - let file_name = file_name.to_str()?; let version = file_name.strip_prefix("rome-service")?; if version.is_empty() { - Some(String::new()) + Some((String::new(), entry)) } else { - Some(version.strip_prefix('-')?.to_string()) + Some((version.strip_prefix('-')?.to_string(), entry)) } }) }) @@ -70,8 +68,8 @@ const CREATE_NEW_PROCESS_GROUP: u32 = 0x00000200; /// Spawn the daemon server process in the background fn spawn_daemon( stop_on_disconnect: bool, - config_path: Option, - log_path: Option, + config_path: Option, + log_path: Option, log_file_name_prefix: Option, ) -> io::Result<()> { let binary = env::current_exe()?; @@ -84,10 +82,10 @@ fn spawn_daemon( } if let Some(config_path) = config_path { - cmd.arg(format!("--config-path={}", config_path.display())); + cmd.arg(format!("--config-path={}", config_path.as_str())); } if let Some(log_path) = log_path { - cmd.arg(format!("--log-path={}", log_path.display())); + cmd.arg(format!("--log-path={}", log_path.as_str())); } if let Some(log_file_name_prefix) = log_file_name_prefix { cmd.arg(format!("--log-prefix-name={}", log_file_name_prefix)); @@ -185,8 +183,8 @@ impl AsyncWrite for ClientWriteHalf { /// to be started pub(crate) async fn ensure_daemon( stop_on_disconnect: bool, - config_path: Option, - log_path: Option, + config_path: Option, + log_path: Option, log_file_name_prefix: Option, ) -> io::Result { let mut did_spawn = false; @@ -223,7 +221,7 @@ pub(crate) async fn print_socket() -> io::Result<()> { /// provided [ServerFactory] pub(crate) async fn run_daemon( factory: ServerFactory, - config_path: Option, + config_path: Option, ) -> io::Result { let mut prev_server = ServerOptions::new() .first_pipe_instance(true) diff --git a/crates/biome_cli/tests/cases/assists.rs b/crates/biome_cli/tests/cases/assist.rs similarity index 75% rename from crates/biome_cli/tests/cases/assists.rs rename to crates/biome_cli/tests/cases/assist.rs index 3b611902310c..fe150cf6b45f 100644 --- a/crates/biome_cli/tests/cases/assists.rs +++ b/crates/biome_cli/tests/cases/assist.rs @@ -2,41 +2,40 @@ use crate::run_cli; use crate::snap_test::{assert_cli_snapshot, assert_file_contents, SnapshotPayload}; use biome_console::BufferConsole; use biome_fs::MemoryFileSystem; -use biome_service::DynRef; use bpaf::Args; -use std::path::Path; +use camino::Utf8Path; #[test] fn assist_emit_diagnostic() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); - let config = Path::new("biome.json"); + let config = Utf8Path::new("biome.json"); fs.insert( config.into(), - r#"{ - "assists": { + r#"{ + "assist": { "enabled": true, "actions": { "source": { "useSortedKeys": "on" } } - }, + }, "formatter": { "enabled": false } }"# .as_bytes(), ); - let file = Path::new("file.json"); + let file = Utf8Path::new("file.json"); fs.insert( file.into(), r#"{ "zod": true, "lorem": "ipsum", "foo": "bar" }"#.as_bytes(), ); - let result = run_cli( - DynRef::Borrowed(&mut fs), + let (fs, result) = run_cli( + fs, &mut console, - Args::from([("check"), file.as_os_str().to_str().unwrap()].as_slice()), + Args::from(["check", file.as_str()].as_slice()), ); assert!(result.is_err(), "run_cli returned {result:?}"); @@ -55,32 +54,32 @@ fn assist_writes() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); - let config = Path::new("biome.json"); + let config = Utf8Path::new("biome.json"); fs.insert( config.into(), - r#"{ - "assists": { + r#"{ + "assist": { "enabled": true, "actions": { "source": { "useSortedKeys": "on" } } - }, + }, "formatter": { "enabled": false } }"# .as_bytes(), ); - let file = Path::new("file.json"); + let file = Utf8Path::new("file.json"); fs.insert( file.into(), r#"{ "zod": true, "lorem": "ipsum", "foo": "bar" }"#.as_bytes(), ); - let result = run_cli( - DynRef::Borrowed(&mut fs), + let (fs, result) = run_cli( + fs, &mut console, - Args::from([("check"), "--write", file.as_os_str().to_str().unwrap()].as_slice()), + Args::from(["check", "--write", file.as_str()].as_slice()), ); assert!(result.is_ok(), "run_cli returned {result:?}"); diff --git a/crates/biome_cli/tests/cases/biome_json_support.rs b/crates/biome_cli/tests/cases/biome_json_support.rs index a2c9eb79cb06..b1215dc6cd51 100644 --- a/crates/biome_cli/tests/cases/biome_json_support.rs +++ b/crates/biome_cli/tests/cases/biome_json_support.rs @@ -3,9 +3,8 @@ use crate::snap_test::{assert_cli_snapshot, assert_file_contents, SnapshotPayloa use crate::{run_cli, UNFORMATTED}; use biome_console::BufferConsole; use biome_fs::{FileSystemExt, MemoryFileSystem}; -use biome_service::DynRef; use bpaf::Args; -use std::path::{Path, PathBuf}; +use camino::{Utf8Path, Utf8PathBuf}; const CUSTOM_CONFIGURATION_BEFORE: &str = r#"function f() { return { a, b } @@ -24,26 +23,26 @@ fn formatter_biome_json() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); - let file_path = Path::new("biome.json"); + let file_path = Utf8Path::new("biome.json"); fs.insert(file_path.into(), CONFIG_FORMAT.as_bytes()); - let file_path = Path::new("file.js"); + let file_path = Utf8Path::new("file.js"); fs.insert(file_path.into(), CUSTOM_CONFIGURATION_BEFORE.as_bytes()); - let result = run_cli( - DynRef::Borrowed(&mut fs), + let (fs, result) = run_cli( + fs, &mut console, Args::from( [ - ("format"), - ("--line-width"), - ("10"), - ("--indent-style"), - ("space"), - ("--indent-size"), - ("8"), - ("--write"), - file_path.as_os_str().to_str().unwrap(), + "format", + "--line-width", + "10", + "--indent-style", + "space", + "--indent-width", + "8", + "--write", + file_path.as_str(), ] .as_slice(), ), @@ -66,10 +65,10 @@ fn linter_biome_json() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); - let file_path = Path::new("fix.js"); + let file_path = Utf8Path::new("fix.js"); fs.insert(file_path.into(), "debugger;\n".as_bytes()); - let config_path = Path::new("biome.json"); + let config_path = Utf8Path::new("biome.json"); fs.insert( config_path.into(), r#"{ @@ -85,17 +84,10 @@ fn linter_biome_json() { .as_bytes(), ); - let result = run_cli( - DynRef::Borrowed(&mut fs), + let (fs, result) = run_cli( + fs, &mut console, - Args::from( - [ - ("lint"), - ("--apply"), - file_path.as_os_str().to_str().unwrap(), - ] - .as_slice(), - ), + Args::from(["lint", "--write", file_path.as_str()].as_slice()), ); assert!(result.is_ok(), "run_cli returned {result:?}"); @@ -122,10 +114,10 @@ fn check_biome_json() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); - let file_path = Path::new("fix.js"); + let file_path = Utf8Path::new("fix.js"); fs.insert(file_path.into(), "debugger".as_bytes()); - let config_path = Path::new("biome.json"); + let config_path = Utf8Path::new("biome.json"); fs.insert( config_path.into(), r#"{ @@ -141,17 +133,10 @@ fn check_biome_json() { .as_bytes(), ); - let result = run_cli( - DynRef::Borrowed(&mut fs), + let (fs, result) = run_cli( + fs, &mut console, - Args::from( - [ - ("check"), - ("--apply"), - file_path.as_os_str().to_str().unwrap(), - ] - .as_slice(), - ), + Args::from(["check", "--write", file_path.as_str()].as_slice()), ); assert!(result.is_ok(), "run_cli returned {result:?}"); @@ -179,7 +164,7 @@ fn ci_biome_json() { let mut console = BufferConsole::default(); fs.insert( - PathBuf::from("biome.json"), + Utf8PathBuf::from("biome.json"), r#"{ "formatter": { "enabled": false @@ -189,14 +174,14 @@ fn ci_biome_json() { .as_bytes(), ); - let input_file = Path::new("file.js"); + let input_file = Utf8Path::new("file.js"); fs.insert(input_file.into(), " statement( ) ".as_bytes()); - let result = run_cli( - DynRef::Borrowed(&mut fs), + let (fs, result) = run_cli( + fs, &mut console, - Args::from([("ci"), input_file.as_os_str().to_str().unwrap()].as_slice()), + Args::from(["ci", input_file.as_str()].as_slice()), ); assert!(result.is_ok(), "run_cli returned {result:?}"); @@ -218,30 +203,26 @@ fn biome_json_is_not_ignored() { let mut console = BufferConsole::default(); fs.insert( - PathBuf::from("biome.json"), + Utf8PathBuf::from("biome.json"), r#"{ - "files": { "ignore": ["*.json"] }, - "formatter": { - "enabled": false - } -} -"# + "files": { "includes": ["**", "!*.json"] }, + "formatter": { + "enabled": false + } + } + "# .as_bytes(), ); - let input_file = Path::new("file.js"); + let input_file = Utf8Path::new("file.js"); fs.insert(input_file.into(), " statement( ) ".as_bytes()); - let input_file = Path::new("file.json"); + let input_file = Utf8Path::new("file.json"); fs.insert(input_file.into(), " statement( ) ".as_bytes()); - let result = run_cli( - DynRef::Borrowed(&mut fs), - &mut console, - Args::from([("ci"), "./"].as_slice()), - ); + let (fs, result) = run_cli(fs, &mut console, Args::from(["ci", "./"].as_slice())); assert!(result.is_ok(), "run_cli returned {result:?}"); @@ -259,7 +240,7 @@ fn always_disable_trailing_commas_biome_json() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); - let file_path = Path::new("biome.json"); + let file_path = Utf8Path::new("biome.json"); let config = r#"{ "formatter": { "indentStyle": "space", @@ -274,8 +255,8 @@ fn always_disable_trailing_commas_biome_json() { "#; fs.insert(file_path.into(), config); - let result = run_cli( - DynRef::Borrowed(&mut fs), + let (fs, result) = run_cli( + fs, &mut console, Args::from(["check", "--write", "."].as_slice()), ); diff --git a/crates/biome_cli/tests/cases/config_extends.rs b/crates/biome_cli/tests/cases/config_extends.rs index e784e5177b20..09266feea2d8 100644 --- a/crates/biome_cli/tests/cases/config_extends.rs +++ b/crates/biome_cli/tests/cases/config_extends.rs @@ -3,35 +3,34 @@ use crate::snap_test::{assert_cli_snapshot, SnapshotPayload}; use biome_console::BufferConsole; use biome_formatter::LineWidth; use biome_fs::MemoryFileSystem; -use biome_service::DynRef; use bpaf::Args; -use std::path::Path; +use camino::Utf8Path; #[test] fn extends_config_ok_formatter_no_linter() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); - let rome_json = Path::new("biome.json"); + let rome_json = Utf8Path::new("biome.json"); fs.insert( rome_json.into(), r#"{ "extends": ["format.json", "linter.json"] }"#, ); - let format = Path::new("format.json"); + let format = Utf8Path::new("format.json"); fs.insert( format.into(), r#"{ "javascript": { "formatter": { "quoteStyle": "single" } } }"#, ); - let lint = Path::new("linter.json"); + let lint = Utf8Path::new("linter.json"); fs.insert(lint.into(), r#"{ "linter": { "enabled": false } }"#); - let test_file = Path::new("test.js"); + let test_file = Utf8Path::new("test.js"); fs.insert(test_file.into(), r#"debugger; console.log("string"); "#); - let result = run_cli( - DynRef::Borrowed(&mut fs), + let (fs, result) = run_cli( + fs, &mut console, - Args::from([("check"), test_file.as_os_str().to_str().unwrap()].as_slice()), + Args::from(["check", test_file.as_str()].as_slice()), ); assert!(result.is_err(), "run_cli returned {result:?}"); @@ -50,20 +49,19 @@ fn extends_config_ok_linter_not_formatter() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); - let rome_json = Path::new("biome.json"); + let rome_json = Utf8Path::new("biome.json"); fs.insert( rome_json.into(), r#"{ "extends": ["format.json", "linter.json"] }"#, ); - let format = Path::new("format.json"); + let format = Utf8Path::new("format.json"); fs.insert(format.into(), r#"{ "formatter": { "enabled": true } }"#); - let lint = Path::new("linter.json"); + let lint = Utf8Path::new("linter.json"); fs.insert( lint.into(), r#"{ "linter": { "rules": { - "all": false, "suspicious": { "noDebugger": "warn" } @@ -73,13 +71,13 @@ fn extends_config_ok_linter_not_formatter() { "#, ); - let test_file = Path::new("test.js"); + let test_file = Utf8Path::new("test.js"); fs.insert(test_file.into(), r#"debugger; console.log("string"); "#); - let result = run_cli( - DynRef::Borrowed(&mut fs), + let (fs, result) = run_cli( + fs, &mut console, - Args::from([("check"), test_file.as_os_str().to_str().unwrap()].as_slice()), + Args::from(["check", test_file.as_str()].as_slice()), ); assert!(result.is_err(), "run_cli returned {result:?}"); @@ -98,26 +96,26 @@ fn extends_should_raise_an_error_for_unresolved_configuration() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); - let rome_json = Path::new("biome.json"); + let rome_json = Utf8Path::new("biome.json"); fs.insert( rome_json.into(), r#"{ "extends": ["formatTYPO.json", "linter.json"] }"#, ); - let format = Path::new("format.json"); + let format = Utf8Path::new("format.json"); fs.insert( format.into(), r#"{ "javascript": { "formatter": { "quoteStyle": "single" } } }"#, ); - let lint = Path::new("linter.json"); + let lint = Utf8Path::new("linter.json"); fs.insert(lint.into(), r#"{ "linter": { "enabled": false } }"#); - let test_file = Path::new("test.js"); + let test_file = Utf8Path::new("test.js"); fs.insert(test_file.into(), r#"debugger; console.log("string"); "#); - let result = run_cli( - DynRef::Borrowed(&mut fs), + let (fs, result) = run_cli( + fs, &mut console, - Args::from([("check"), test_file.as_os_str().to_str().unwrap()].as_slice()), + Args::from(["check", test_file.as_str()].as_slice()), ); assert!(result.is_err(), "run_cli returned {result:?}"); @@ -136,33 +134,26 @@ fn extends_should_raise_an_error_for_unresolved_configuration_and_show_verbose() let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); - let rome_json = Path::new("biome.json"); + let rome_json = Utf8Path::new("biome.json"); fs.insert( rome_json.into(), r#"{ "extends": ["formatTYPO.json", "linter.json"] }"#, ); - let format = Path::new("format.json"); + let format = Utf8Path::new("format.json"); fs.insert( format.into(), r#"{ "javascript": { "formatter": { "quoteStyle": "single" } } }"#, ); - let lint = Path::new("linter.json"); + let lint = Utf8Path::new("linter.json"); fs.insert(lint.into(), r#"{ "linter": { "enabled": false } }"#); - let test_file = Path::new("test.js"); + let test_file = Utf8Path::new("test.js"); fs.insert(test_file.into(), r#"debugger; console.log("string"); "#); - let result = run_cli( - DynRef::Borrowed(&mut fs), + let (fs, result) = run_cli( + fs, &mut console, - Args::from( - [ - ("check"), - "--verbose", - test_file.as_os_str().to_str().unwrap(), - ] - .as_slice(), - ), + Args::from(["check", "--verbose", test_file.as_str()].as_slice()), ); assert!(result.is_err(), "run_cli returned {result:?}"); @@ -181,33 +172,26 @@ fn extends_resolves_when_using_config_path() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); - let rome_json = Path::new("config/biome.json"); + let rome_json = Utf8Path::new("config/biome.json"); fs.insert( rome_json.into(), r#"{ "extends": ["format.json", "linter.json"] }"#, ); - let format = Path::new("config/format.json"); + let format = Utf8Path::new("config/format.json"); fs.insert( format.into(), r#"{ "javascript": { "formatter": { "quoteStyle": "single" } } }"#, ); - let lint = Path::new("config/linter.json"); + let lint = Utf8Path::new("config/linter.json"); fs.insert(lint.into(), r#"{ "linter": { "enabled": true } }"#); - let test_file = Path::new("test.js"); + let test_file = Utf8Path::new("test.js"); fs.insert(test_file.into(), r#"debugger; console.log("string"); "#); - let result = run_cli( - DynRef::Borrowed(&mut fs), + let (fs, result) = run_cli( + fs, &mut console, - Args::from( - [ - ("check"), - "--config-path=config/", - test_file.as_os_str().to_str().unwrap(), - ] - .as_slice(), - ), + Args::from(["check", "--config-path=config/", test_file.as_str()].as_slice()), ); assert!(result.is_err(), "run_cli returned {result:?}"); @@ -226,35 +210,28 @@ fn applies_extended_values_in_current_config() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); - let format = Path::new("format.json"); + let format = Utf8Path::new("format.json"); fs.insert( format.into(), r#"{ "javascript": { "formatter": { "quoteStyle": "single" } } }"#, ); - let rome_json = Path::new("biome.json"); + let rome_json = Utf8Path::new("biome.json"); fs.insert( rome_json.into(), r#"{ "extends": ["format.json"], "formatter": { "lineWidth": 20 } }"#, ); - let test_file = Path::new("test.js"); + let test_file = Utf8Path::new("test.js"); fs.insert( test_file.into(), r#"debugger; const a = ["lorem", "ipsum"]; "#, ); - let result = run_cli( - DynRef::Borrowed(&mut fs), + let (fs, result) = run_cli( + fs, &mut console, - Args::from( - [ - ("format"), - "--write", - test_file.as_os_str().to_str().unwrap(), - ] - .as_slice(), - ), + Args::from(["format", "--write", test_file.as_str()].as_slice()), ); assert!(result.is_ok(), "run_cli returned {result:?}"); @@ -273,32 +250,25 @@ fn respects_unaffected_values_from_extended_config() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); - let format = Path::new("format.json"); + let format = Utf8Path::new("format.json"); fs.insert(format.into(), r#"{ "formatter": { "lineWidth": 20 } }"#); - let rome_json = Path::new("biome.json"); + let rome_json = Utf8Path::new("biome.json"); fs.insert( rome_json.into(), r#"{ "extends": ["format.json"], "formatter": { "indentStyle": "space", "indentWidth": 2 } }"#, ); - let test_file = Path::new("test.js"); + let test_file = Utf8Path::new("test.js"); fs.insert( test_file.into(), r#"debugger; const a = ["lorem", "ipsum"]; "#, ); - let result = run_cli( - DynRef::Borrowed(&mut fs), + let (fs, result) = run_cli( + fs, &mut console, - Args::from( - [ - ("format"), - "--write", - test_file.as_os_str().to_str().unwrap(), - ] - .as_slice(), - ), + Args::from(["format", "--write", test_file.as_str()].as_slice()), ); assert!(result.is_ok(), "run_cli returned {result:?}"); @@ -317,10 +287,10 @@ fn allows_reverting_fields_in_extended_config_to_default() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); - let format = Path::new("format.json"); + let format = Utf8Path::new("format.json"); fs.insert(format.into(), r#"{ "formatter": { "lineWidth": 20 } }"#); - let rome_json = Path::new("biome.json"); + let rome_json = Utf8Path::new("biome.json"); fs.insert( rome_json.into(), format!( @@ -329,23 +299,16 @@ fn allows_reverting_fields_in_extended_config_to_default() { ), ); - let test_file = Path::new("test.js"); + let test_file = Utf8Path::new("test.js"); fs.insert( test_file.into(), r#"debugger; const a = ["lorem", "ipsum"]; "#, ); - let result = run_cli( - DynRef::Borrowed(&mut fs), + let (fs, result) = run_cli( + fs, &mut console, - Args::from( - [ - ("format"), - "--write", - test_file.as_os_str().to_str().unwrap(), - ] - .as_slice(), - ), + Args::from(["format", "--write", test_file.as_str()].as_slice()), ); assert!(result.is_ok(), "run_cli returned {result:?}"); @@ -364,36 +327,36 @@ fn extends_config_merge_overrides() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); - let shared = Path::new("shared.json"); + let shared = Utf8Path::new("shared.json"); fs.insert( shared.into(), r#"{ "overrides": [{ - "include": ["**/*.js"], + "includes": ["**/*.js"], "linter": { "rules": { "suspicious": { "noDebugger": "off" } } } }] }"#, ); - let biome_json = Path::new("biome.json"); + let biome_json = Utf8Path::new("biome.json"); fs.insert( biome_json.into(), r#"{ "extends": ["shared.json"], "overrides": [{ - "include": ["**/*.js"], + "includes": ["**/*.js"], "linter": { "rules": { "correctness": { "noUnusedVariables": "error" } } } }] }"#, ); - let test_file = Path::new("test.js"); + let test_file = Utf8Path::new("test.js"); fs.insert(test_file.into(), "debugger; const a = 0;"); - let result = run_cli( - DynRef::Borrowed(&mut fs), + let (fs, result) = run_cli( + fs, &mut console, - Args::from(["lint", test_file.as_os_str().to_str().unwrap()].as_slice()), + Args::from(["lint", test_file.as_str()].as_slice()), ); assert_cli_snapshot(SnapshotPayload::new( diff --git a/crates/biome_cli/tests/cases/config_path.rs b/crates/biome_cli/tests/cases/config_path.rs index a29b20c53568..bfb0b38c7799 100644 --- a/crates/biome_cli/tests/cases/config_path.rs +++ b/crates/biome_cli/tests/cases/config_path.rs @@ -2,23 +2,22 @@ use crate::run_cli; use crate::snap_test::{assert_cli_snapshot, SnapshotPayload}; use biome_console::BufferConsole; use biome_fs::MemoryFileSystem; -use biome_service::DynRef; use bpaf::Args; -use std::path::Path; +use camino::Utf8Path; #[test] fn set_config_path_to_directory() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); - let file_path = Path::new("src/index.js"); + let file_path = Utf8Path::new("src/index.js"); fs.insert(file_path.into(), "a['b'] = 42;".as_bytes()); - let config_path = Path::new("config/biome.jsonc"); + let config_path = Utf8Path::new("config/biome.jsonc"); fs.insert( config_path.into(), r#"{ - "organizeImports": { + "assist": { "enabled": true }, "linter": { @@ -36,10 +35,10 @@ fn set_config_path_to_directory() { .as_bytes(), ); - let result = run_cli( - DynRef::Borrowed(&mut fs), + let (fs, result) = run_cli( + fs, &mut console, - Args::from([("check"), ("--config-path=config"), ("src")].as_slice()), + Args::from(["check", "--config-path=config", "src"].as_slice()), ); assert!(result.is_err(), "run_cli returned {result:?}"); @@ -58,14 +57,14 @@ fn set_config_path_to_file() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); - let file_path = Path::new("src/index.js"); + let file_path = Utf8Path::new("src/index.js"); fs.insert(file_path.into(), "a['b'] = 42;".as_bytes()); - let config_path = Path::new("config/a.jsonc"); + let config_path = Utf8Path::new("config/a.jsonc"); fs.insert( config_path.into(), r#"{ - "organizeImports": { + "assist": { "enabled": true }, "linter": { @@ -83,10 +82,10 @@ fn set_config_path_to_file() { .as_bytes(), ); - let result = run_cli( - DynRef::Borrowed(&mut fs), + let (fs, result) = run_cli( + fs, &mut console, - Args::from([("check"), ("--config-path=config/a.jsonc"), ("src")].as_slice()), + Args::from(["check", "--config-path=config/a.jsonc", "src"].as_slice()), ); assert!(result.is_err(), "run_cli returned {result:?}"); @@ -99,3 +98,56 @@ fn set_config_path_to_file() { result, )); } + +#[test] +fn raises_an_error_when_the_config_file_is_not_json() { + let mut fs = MemoryFileSystem::default(); + let mut console = BufferConsole::default(); + + let config_path = Utf8Path::new("biome.yml"); + fs.insert(config_path.into(), r#"blah: foo"#.as_bytes()); + + let (fs, result) = run_cli( + fs, + &mut console, + Args::from(["check", "--config-path=biome.yml", "src"].as_slice()), + ); + + assert!(result.is_err(), "run_cli returned {result:?}"); + + assert_cli_snapshot(SnapshotPayload::new( + module_path!(), + "raises_an_error_when_the_config_file_is_not_json", + fs, + console, + result, + )); +} + +#[test] +fn raises_an_error_for_no_configuration_file_found() { + let mut fs = MemoryFileSystem::default(); + let mut console = BufferConsole::default(); + + let file = Utf8Path::new("file.js"); + fs.insert( + file.into(), + r#"function name() { return "lorem" }"#.as_bytes(), + ); + + let (fs, result) = run_cli( + fs, + &mut console, + Args::from(["check", "--config-path=config", file.as_str()].as_slice()), + ); + + assert!(result.is_err(), "run_cli returned {result:?}"); + + assert_cli_snapshot(SnapshotPayload::new( + module_path!(), + "raises_an_error_for_no_configuration_file_found", + fs, + console, + result, + )); +} diff --git a/crates/biome_cli/tests/cases/cts_files.rs b/crates/biome_cli/tests/cases/cts_files.rs index 0dd3a86318b4..e2b6bffd5d0c 100644 --- a/crates/biome_cli/tests/cases/cts_files.rs +++ b/crates/biome_cli/tests/cases/cts_files.rs @@ -2,25 +2,24 @@ use crate::run_cli; use crate::snap_test::{assert_cli_snapshot, SnapshotPayload}; use biome_console::BufferConsole; use biome_fs::MemoryFileSystem; -use biome_service::DynRef; use bpaf::Args; -use std::path::Path; +use camino::Utf8Path; #[test] fn should_allow_using_export_statements() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); - let file_path = Path::new("a.cts"); + let file_path = Utf8Path::new("a.cts"); fs.insert( file_path.into(), r#"export default { cjs: true };"#.as_bytes(), ); - let result = run_cli( - DynRef::Borrowed(&mut fs), + let (fs, result) = run_cli( + fs, &mut console, - Args::from([("lint"), file_path.as_os_str().to_str().unwrap()].as_slice()), + Args::from(["lint", file_path.as_str()].as_slice()), ); assert!(result.is_ok(), "run_cli returned {result:?}"); diff --git a/crates/biome_cli/tests/cases/diagnostics.rs b/crates/biome_cli/tests/cases/diagnostics.rs index 4339ad643e0f..f298a9dc10c8 100644 --- a/crates/biome_cli/tests/cases/diagnostics.rs +++ b/crates/biome_cli/tests/cases/diagnostics.rs @@ -2,9 +2,8 @@ use crate::snap_test::{assert_cli_snapshot, SnapshotPayload}; use crate::{run_cli, UNFORMATTED}; use biome_console::{BufferConsole, LogLevel}; use biome_fs::MemoryFileSystem; -use biome_service::DynRef; use bpaf::Args; -use std::path::{Path, PathBuf}; +use camino::{Utf8Path, Utf8PathBuf}; const TEST_CONTENTS: &str = "debugger;"; @@ -12,12 +11,12 @@ const TEST_CONTENTS: &str = "debugger;"; fn logs_the_appropriate_messages_according_to_set_diagnostics_level() { let mut console = BufferConsole::default(); let mut fs = MemoryFileSystem::default(); - let file_path = Path::new("biome.json"); + let file_path = Utf8Path::new("biome.json"); fs.insert( file_path.into(), r#"{ "files": { - "include": ["test.js"] + "includes": ["test.js"] }, "linter": { "rules": { @@ -32,20 +31,13 @@ fn logs_the_appropriate_messages_according_to_set_diagnostics_level() { .as_bytes(), ); - let test = Path::new("test.js"); + let test = Utf8Path::new("test.js"); fs.insert(test.into(), TEST_CONTENTS.as_bytes()); - let result = run_cli( - DynRef::Borrowed(&mut fs), + let (fs, result) = run_cli( + fs, &mut console, - Args::from( - [ - ("lint"), - ("--diagnostic-level=error"), - test.as_os_str().to_str().unwrap(), - ] - .as_slice(), - ), + Args::from(["lint", "--diagnostic-level=error", test.as_str()].as_slice()), ); assert!(result.is_ok(), "run_cli returned {result:?}"); @@ -76,23 +68,23 @@ fn max_diagnostics_no_verbose() { let mut console = BufferConsole::default(); for i in 0..10 { - let file_path = PathBuf::from(format!("src/folder_{i}/package-lock.json")); + let file_path = Utf8PathBuf::from(format!("src/folder_{i}/package-lock.json")); fs.insert(file_path, "{}".as_bytes()); } - let file_path = PathBuf::from("src/file.js".to_string()); + let file_path = Utf8PathBuf::from("src/file.js".to_string()); fs.insert(file_path, UNFORMATTED.as_bytes()); - let result = run_cli( - DynRef::Borrowed(&mut fs), + let (mut fs, result) = run_cli( + fs, &mut console, - Args::from([("ci"), ("--max-diagnostics"), ("10"), ("src")].as_slice()), + Args::from(["ci", "--max-diagnostics", "10", "src"].as_slice()), ); assert!(result.is_err(), "run_cli returned {result:?}"); for i in 0..10 { - let file_path = PathBuf::from(format!("src/folder_{i}/package-lock.json")); - fs.remove(Path::new(&file_path)); + let file_path = Utf8PathBuf::from(format!("src/folder_{i}/package-lock.json")); + fs.remove(Utf8Path::new(&file_path)); } assert_cli_snapshot(SnapshotPayload::new( @@ -110,23 +102,23 @@ fn max_diagnostics_verbose() { let mut console = BufferConsole::default(); for i in 0..8 { - let file_path = PathBuf::from(format!("src/folder_{i}/package-lock.json")); + let file_path = Utf8PathBuf::from(format!("src/folder_{i}/package-lock.json")); fs.insert(file_path, "{}".as_bytes()); } - let file_path = PathBuf::from("src/file.js".to_string()); + let file_path = Utf8PathBuf::from("src/file.js".to_string()); fs.insert(file_path, UNFORMATTED.as_bytes()); - let result = run_cli( - DynRef::Borrowed(&mut fs), + let (mut fs, result) = run_cli( + fs, &mut console, - Args::from([("ci"), ("--max-diagnostics=10"), "--verbose", ("src")].as_slice()), + Args::from(["ci", "--max-diagnostics=10", "--verbose", "src"].as_slice()), ); assert!(result.is_err(), "run_cli returned {result:?}"); for i in 0..8 { - let file_path = PathBuf::from(format!("src/folder_{i}/package-lock.json")); - fs.remove(Path::new(&file_path)); + let file_path = Utf8PathBuf::from(format!("src/folder_{i}/package-lock.json")); + fs.remove(Utf8Path::new(&file_path)); } assert_cli_snapshot(SnapshotPayload::new( @@ -143,14 +135,14 @@ fn diagnostic_level() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); - let file_path = Path::new("biome.json"); + let file_path = Utf8Path::new("biome.json"); fs.insert( file_path.into(), r#"{ "formatter": { "enabled": true }, - "organizeImports": { + "assist": { "enabled": true }, "linter": { @@ -160,7 +152,7 @@ fn diagnostic_level() { "#, ); - let file_path = PathBuf::from("src/index.js".to_string()); + let file_path = Utf8PathBuf::from("src/index.js".to_string()); fs.insert( file_path, r#"import { graphql, useFragment, useMutation } from "react-relay"; @@ -168,10 +160,10 @@ import { FC, memo, useCallback } from "react"; "#, ); - let result = run_cli( - DynRef::Borrowed(&mut fs), + let (fs, result) = run_cli( + fs, &mut console, - Args::from([("check"), ("--diagnostic-level=error"), ("src")].as_slice()), + Args::from(["check", "--diagnostic-level=error", "src"].as_slice()), ); assert!(result.is_err(), "run_cli returned {result:?}"); @@ -183,7 +175,7 @@ import { FC, memo, useCallback } from "react"; .filter(|m| m.level == LogLevel::Error) .any(|m| { let content = format!("{:?}", m.content); - content.contains("organizeImports") + content.contains("assist") })); assert_cli_snapshot(SnapshotPayload::new( @@ -201,34 +193,26 @@ fn max_diagnostics_are_lifted() { let mut console = BufferConsole::default(); for i in 0..u8::MAX { - let file_path = PathBuf::from(format!("src/file_{i}.js")); + let file_path = Utf8PathBuf::from(format!("src/file_{i}.js")); fs.insert(file_path, UNFORMATTED.as_bytes()); } - let file_path = PathBuf::from("file.js".to_string()); + let file_path = Utf8PathBuf::from("file.js".to_string()); fs.insert( file_path.clone(), "debugger;".repeat(u8::MAX as usize * 2).as_bytes(), ); - let result = run_cli( - DynRef::Borrowed(&mut fs), + let (mut fs, result) = run_cli( + fs, &mut console, - Args::from( - [ - ("ci"), - ("--max-diagnostics"), - ("none"), - file_path.as_os_str().to_str().unwrap(), - ] - .as_slice(), - ), + Args::from(["ci", "--max-diagnostics", "none", file_path.as_str()].as_slice()), ); assert!(result.is_err(), "run_cli returned {result:?}"); for i in 0..u8::MAX { - let file_path = PathBuf::from(format!("src/file_{i}.js")); + let file_path = Utf8PathBuf::from(format!("src/file_{i}.js")); fs.remove(&file_path); } diff --git a/crates/biome_cli/tests/cases/editorconfig.rs b/crates/biome_cli/tests/cases/editorconfig.rs index b4f273c00ce1..15e457728ee5 100644 --- a/crates/biome_cli/tests/cases/editorconfig.rs +++ b/crates/biome_cli/tests/cases/editorconfig.rs @@ -1,39 +1,41 @@ use crate::run_cli; -use crate::snap_test::{assert_cli_snapshot, assert_file_contents, SnapshotPayload}; +use crate::snap_test::{assert_cli_snapshot, SnapshotPayload}; use biome_console::BufferConsole; use biome_fs::MemoryFileSystem; -use biome_service::DynRef; use bpaf::Args; -use std::path::Path; +use camino::Utf8Path; #[test] fn should_use_editorconfig() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); - let editorconfig = Path::new(".editorconfig"); + let editorconfig = Utf8Path::new(".editorconfig"); fs.insert( editorconfig.into(), r#" [*] -max_line_length = 300 +indent_style = space +indent_size = 8 "#, ); - let test_file = Path::new("test.js"); - let contents = r#"console.log("really long string that should cause a break if the line width remains at the default 80 characters"); + let test_file = Utf8Path::new("test.js"); + let contents = r#"function setName(name) { + currentName = name; +} "#; fs.insert(test_file.into(), contents); - let result = run_cli( - DynRef::Borrowed(&mut fs), + let (fs, result) = run_cli( + fs, &mut console, Args::from( [ - ("format"), - ("--write"), - ("--use-editorconfig=true"), - test_file.as_os_str().to_str().unwrap(), + "format", + "--write", + "--use-editorconfig=true", + test_file.as_str(), ] .as_slice(), ), @@ -41,7 +43,6 @@ max_line_length = 300 assert!(result.is_ok(), "run_cli returned {result:?}"); - assert_file_contents(&fs, test_file, contents); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "should_use_editorconfig", @@ -56,16 +57,17 @@ fn should_use_editorconfig_enabled_from_biome_conf() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); - let editorconfig = Path::new(".editorconfig"); + let editorconfig = Utf8Path::new(".editorconfig"); fs.insert( editorconfig.into(), r#" [*] -max_line_length = 300 +indent_style = space +indent_size = 8 "#, ); - let biomeconfig = Path::new("biome.json"); + let biomeconfig = Utf8Path::new("biome.json"); fs.insert( biomeconfig.into(), r#"{ @@ -76,27 +78,21 @@ max_line_length = 300 "#, ); - let test_file = Path::new("test.js"); - let contents = r#"console.log("really long string that should cause a break if the line width remains at the default 80 characters"); + let test_file = Utf8Path::new("test.js"); + let contents = r#"function setName(name) { + currentName = name; +} "#; fs.insert(test_file.into(), contents); - let result = run_cli( - DynRef::Borrowed(&mut fs), + let (fs, result) = run_cli( + fs, &mut console, - Args::from( - [ - ("format"), - ("--write"), - test_file.as_os_str().to_str().unwrap(), - ] - .as_slice(), - ), + Args::from(["format", "--write", test_file.as_str()].as_slice()), ); assert!(result.is_ok(), "run_cli returned {result:?}"); - assert_file_contents(&fs, test_file, contents); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "should_use_editorconfig_enabled_from_biome_conf", @@ -111,36 +107,31 @@ fn should_use_editorconfig_check() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); - let editorconfig = Path::new(".editorconfig"); + let editorconfig = Utf8Path::new(".editorconfig"); fs.insert( editorconfig.into(), r#" [*] -max_line_length = 300 +indent_style = space +indent_size = 8 "#, ); - let test_file = Path::new("test.js"); - let contents = r#"console.log("really long string that should cause a break if the line width remains at the default 80 characters"); + let test_file = Utf8Path::new("test.js"); + let contents = r#"function setName(name) { + currentName = name; +} "#; fs.insert(test_file.into(), contents); - let result = run_cli( - DynRef::Borrowed(&mut fs), + let (fs, result) = run_cli( + fs, &mut console, - Args::from( - [ - ("check"), - ("--use-editorconfig=true"), - test_file.as_os_str().to_str().unwrap(), - ] - .as_slice(), - ), + Args::from(["check", "--use-editorconfig=true", test_file.as_str()].as_slice()), ); - assert!(result.is_ok(), "run_cli returned {result:?}"); + assert!(result.is_err(), "run_cli returned {result:?}"); - assert_file_contents(&fs, test_file, contents); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "should_use_editorconfig_check", @@ -155,16 +146,17 @@ fn should_use_editorconfig_check_enabled_from_biome_conf() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); - let editorconfig = Path::new(".editorconfig"); + let editorconfig = Utf8Path::new(".editorconfig"); fs.insert( editorconfig.into(), r#" [*] -max_line_length = 300 +indent_style = space +indent_size = 8 "#, ); - let biomeconfig = Path::new("biome.json"); + let biomeconfig = Utf8Path::new("biome.json"); fs.insert( biomeconfig.into(), r#"{ @@ -175,20 +167,21 @@ max_line_length = 300 "#, ); - let test_file = Path::new("test.js"); - let contents = r#"console.log("really long string that should cause a break if the line width remains at the default 80 characters"); + let test_file = Utf8Path::new("test.js"); + let contents = r#"function setName(name) { + currentName = name; +} "#; fs.insert(test_file.into(), contents); - let result = run_cli( - DynRef::Borrowed(&mut fs), + let (fs, result) = run_cli( + fs, &mut console, - Args::from([("check"), test_file.as_os_str().to_str().unwrap()].as_slice()), + Args::from(["check", test_file.as_str()].as_slice()), ); - assert!(result.is_ok(), "run_cli returned {result:?}"); + assert!(result.is_err(), "run_cli returned {result:?}"); - assert_file_contents(&fs, test_file, contents); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "should_use_editorconfig_check_enabled_from_biome_conf", @@ -203,18 +196,17 @@ fn should_have_biome_override_editorconfig() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); - let editorconfig = Path::new(".editorconfig"); + let editorconfig = Utf8Path::new(".editorconfig"); fs.insert( editorconfig.into(), r#" [*] -max_line_length = 100 indent_style = tab "#, ); - let biomeconfig = Path::new("biome.json"); + let biome_config_path = Utf8Path::new("biome.json"); fs.insert( - biomeconfig.into(), + biome_config_path.into(), r#" { "formatter": { @@ -224,22 +216,22 @@ indent_style = tab "#, ); - let test_file = Path::new("test.js"); + let test_file = Utf8Path::new("test.js"); let contents = r#"console.log( "really long string that should break if the line width is <=90, but not at 100", ); "#; fs.insert(test_file.into(), contents); - let result = run_cli( - DynRef::Borrowed(&mut fs), + let (fs, result) = run_cli( + fs, &mut console, Args::from( [ - ("format"), - ("--write"), - ("--use-editorconfig=true"), - test_file.as_os_str().to_str().unwrap(), + "format", + "--write", + "--use-editorconfig=true", + test_file.as_str(), ] .as_slice(), ), @@ -247,7 +239,6 @@ indent_style = tab assert!(result.is_ok(), "run_cli returned {result:?}"); - assert_file_contents(&fs, test_file, contents); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "should_have_biome_override_editorconfig", @@ -262,34 +253,40 @@ fn should_have_cli_override_editorconfig() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); - let editorconfig = Path::new(".editorconfig"); + let editorconfig = Utf8Path::new(".editorconfig"); fs.insert( editorconfig.into(), r#" [*] -max_line_length = 90 +indent_style = space +indent_size = 8 "#, ); - let test_file = Path::new("test.js"); - fs.insert(test_file.into(), r#"console.log("really long string that should break if the line width is <=90, but not at 100"); -"#); + let test_file = Utf8Path::new("test.js"); + fs.insert( + test_file.into(), + r#"function setName(name) { + currentName = name; +} +"#, + ); - let result = run_cli( - DynRef::Borrowed(&mut fs), + let (fs, result) = run_cli( + fs, &mut console, Args::from( [ - ("check"), - ("--line-width=100"), - ("--use-editorconfig=true"), - test_file.as_os_str().to_str().unwrap(), + "check", + "--indent-width=4", + "--use-editorconfig=true", + test_file.as_str(), ] .as_slice(), ), ); - assert!(result.is_ok(), "run_cli returned {result:?}"); + assert!(result.is_err(), "run_cli returned {result:?}"); assert_cli_snapshot(SnapshotPayload::new( module_path!(), @@ -305,7 +302,7 @@ fn should_apply_path_overrides() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); - let editorconfig = Path::new(".editorconfig"); + let editorconfig = Utf8Path::new(".editorconfig"); fs.insert( editorconfig.into(), r#" @@ -317,7 +314,7 @@ indent_style = space "#, ); - let test_file = Path::new("tabs.js"); + let test_file = Utf8Path::new("tabs.js"); fs.insert( test_file.into(), r#" @@ -326,7 +323,7 @@ indent_style = space } "#, ); - let test_file2 = Path::new("foo/spaces.js"); + let test_file2 = Utf8Path::new("foo/spaces.js"); fs.insert( test_file.into(), r#" @@ -336,15 +333,15 @@ indent_style = space "#, ); - let result = run_cli( - DynRef::Borrowed(&mut fs), + let (fs, result) = run_cli( + fs, &mut console, Args::from( [ - ("check"), - ("--use-editorconfig=true"), - test_file.as_os_str().to_str().unwrap(), - test_file2.as_os_str().to_str().unwrap(), + "check", + "--use-editorconfig=true", + test_file.as_str(), + test_file2.as_str(), ] .as_slice(), ), @@ -366,36 +363,31 @@ fn should_use_editorconfig_ci() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); - let editorconfig = Path::new(".editorconfig"); + let editorconfig = Utf8Path::new(".editorconfig"); fs.insert( editorconfig.into(), r#" [*] -max_line_length = 300 +indent_style = space +indent_size = 8 "#, ); - let test_file = Path::new("test.js"); - let contents = r#"console.log("really long string that should cause a break if the line width remains at the default 80 characters"); + let test_file = Utf8Path::new("test.js"); + let contents = r#"function setName(name) { + currentName = name; +} "#; fs.insert(test_file.into(), contents); - let result = run_cli( - DynRef::Borrowed(&mut fs), + let (fs, result) = run_cli( + fs, &mut console, - Args::from( - [ - ("ci"), - ("--use-editorconfig=true"), - test_file.as_os_str().to_str().unwrap(), - ] - .as_slice(), - ), + Args::from(["ci", "--use-editorconfig=true", test_file.as_str()].as_slice()), ); - assert!(result.is_ok(), "run_cli returned {result:?}"); + assert!(result.is_err(), "run_cli returned {result:?}"); - assert_file_contents(&fs, test_file, contents); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "should_use_editorconfig_ci", @@ -410,16 +402,17 @@ fn should_use_editorconfig_ci_enabled_from_biome_conf() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); - let editorconfig = Path::new(".editorconfig"); + let editorconfig = Utf8Path::new(".editorconfig"); fs.insert( editorconfig.into(), r#" [*] -max_line_length = 300 +indent_style = space +indent_size = 8 "#, ); - let biomeconfig = Path::new("biome.json"); + let biomeconfig = Utf8Path::new("biome.json"); fs.insert( biomeconfig.into(), r#"{ @@ -430,20 +423,19 @@ max_line_length = 300 "#, ); - let test_file = Path::new("test.js"); + let test_file = Utf8Path::new("test.js"); let contents = r#"console.log("really long string that should cause a break if the line width remains at the default 80 characters"); "#; fs.insert(test_file.into(), contents); - let result = run_cli( - DynRef::Borrowed(&mut fs), + let (fs, result) = run_cli( + fs, &mut console, - Args::from([("ci"), test_file.as_os_str().to_str().unwrap()].as_slice()), + Args::from(["ci", test_file.as_str()].as_slice()), ); - assert!(result.is_ok(), "run_cli returned {result:?}"); + assert!(result.is_err(), "run_cli returned {result:?}"); - assert_file_contents(&fs, test_file, contents); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "should_use_editorconfig_ci_enabled_from_biome_conf", @@ -458,7 +450,7 @@ fn should_emit_diagnostics() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); - let editorconfig = Path::new(".editorconfig"); + let editorconfig = Utf8Path::new(".editorconfig"); fs.insert( editorconfig.into(), r#" @@ -467,20 +459,20 @@ insert_final_newline = false "#, ); - let test_file = Path::new("test.js"); + let test_file = Utf8Path::new("test.js"); let contents = r#"console.log("foo"); "#; fs.insert(test_file.into(), contents); - let result = run_cli( - DynRef::Borrowed(&mut fs), + let (fs, result) = run_cli( + fs, &mut console, Args::from( [ - ("format"), - ("--write"), - ("--use-editorconfig=true"), - test_file.as_os_str().to_str().unwrap(), + "format", + "--write", + "--use-editorconfig=true", + test_file.as_str(), ] .as_slice(), ), @@ -488,7 +480,6 @@ insert_final_newline = false assert!(result.is_ok(), "run_cli returned {result:?}"); - assert_file_contents(&fs, test_file, contents); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "should_emit_diagnostics", diff --git a/crates/biome_cli/tests/cases/graphql.rs b/crates/biome_cli/tests/cases/graphql.rs index 5a43e961a739..270a687ff49a 100644 --- a/crates/biome_cli/tests/cases/graphql.rs +++ b/crates/biome_cli/tests/cases/graphql.rs @@ -2,9 +2,8 @@ use crate::run_cli; use crate::snap_test::{assert_cli_snapshot, assert_file_contents, SnapshotPayload}; use biome_console::BufferConsole; use biome_fs::MemoryFileSystem; -use biome_service::DynRef; use bpaf::Args; -use std::path::Path; +use camino::Utf8Path; const UNFORMATTED: &str = r#"type Query { me: User @@ -26,13 +25,13 @@ fn format_graphql_files() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); - let file_path = Path::new("file.graphql"); + let file_path = Utf8Path::new("file.graphql"); fs.insert(file_path.into(), UNFORMATTED.as_bytes()); - let result = run_cli( - DynRef::Borrowed(&mut fs), + let (fs, result) = run_cli( + fs, &mut console, - Args::from([("format"), file_path.as_os_str().to_str().unwrap()].as_slice()), + Args::from(["format", file_path.as_str()].as_slice()), ); assert!(result.is_err(), "run_cli returned {result:?}"); @@ -53,20 +52,13 @@ fn format_and_write_graphql_files() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); - let file_path = Path::new("file.graphql"); + let file_path = Utf8Path::new("file.graphql"); fs.insert(file_path.into(), UNFORMATTED.as_bytes()); - let result = run_cli( - DynRef::Borrowed(&mut fs), + let (fs, result) = run_cli( + fs, &mut console, - Args::from( - [ - ("format"), - "--write", - file_path.as_os_str().to_str().unwrap(), - ] - .as_slice(), - ), + Args::from(["format", "--write", file_path.as_str()].as_slice()), ); assert!(result.is_ok(), "run_cli returned {result:?}"); @@ -87,17 +79,17 @@ fn lint_single_rule() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); - let file_path = Path::new("file.graphql"); + let file_path = Utf8Path::new("file.graphql"); fs.insert(file_path.into(), MISSING_REASON.as_bytes()); - let result = run_cli( - DynRef::Borrowed(&mut fs), + let (fs, result) = run_cli( + fs, &mut console, Args::from( [ - ("lint"), + "lint", "--only=nursery/useDeprecatedReason", - file_path.as_os_str().to_str().unwrap(), + file_path.as_str(), ] .as_slice(), ), diff --git a/crates/biome_cli/tests/cases/handle_astro_files.rs b/crates/biome_cli/tests/cases/handle_astro_files.rs index 714f86ebd0bf..26c9f9e54c74 100644 --- a/crates/biome_cli/tests/cases/handle_astro_files.rs +++ b/crates/biome_cli/tests/cases/handle_astro_files.rs @@ -1,12 +1,9 @@ use crate::run_cli; -use crate::snap_test::{ - assert_cli_snapshot, assert_file_contents, markup_to_string, SnapshotPayload, -}; +use crate::snap_test::{assert_cli_snapshot, markup_to_string, SnapshotPayload}; use biome_console::{markup, BufferConsole}; use biome_fs::MemoryFileSystem; -use biome_service::DynRef; use bpaf::Args; -use std::path::Path; +use camino::Utf8Path; const ASTRO_FILE_UNFORMATTED: &str = r#"--- import { something } from "file.astro"; @@ -57,12 +54,6 @@ if (foo) { ---
"#; -const ASTRO_FILE_IMPORTS_AFTER: &str = r#"--- -import { Code } from "astro:components"; -import { getLocale } from "astro:i18n"; ---- -
"#; - const ASTRO_CARRIAGE_RETURN_LINE_FEED_FILE_UNFORMATTED: &str = "---\r\n const a = \"b\";\r\n---\r\n
"; @@ -75,23 +66,6 @@ var foo: string = ""; ---
"#; -const ASTRO_FILE_CHECK_APPLY_AFTER: &str = r#"--- -import { something } from "file.astro"; -import { a } from "mod"; -debugger; -statement(); -var foo = ""; ---- -
"#; - -const ASTRO_FILE_CHECK_APPLY_UNSAFE_AFTER: &str = r#"--- -import { something } from "file.astro"; -import { a } from "mod"; -statement(); -const foo = ""; ---- -
"#; - const ASTRO_FILE_ASTRO_GLOBAL_OBJECT: &str = r#"--- const { some } = Astro.props --- @@ -102,19 +76,17 @@ fn format_astro_files() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); - let astro_file_path = Path::new("file.astro"); + let astro_file_path = Utf8Path::new("file.astro"); fs.insert(astro_file_path.into(), ASTRO_FILE_UNFORMATTED.as_bytes()); - let result = run_cli( - DynRef::Borrowed(&mut fs), + let (fs, result) = run_cli( + fs, &mut console, - Args::from([("format"), astro_file_path.as_os_str().to_str().unwrap()].as_slice()), + Args::from(["format", astro_file_path.as_str()].as_slice()), ); assert!(result.is_err(), "run_cli returned {result:?}"); - assert_file_contents(&fs, astro_file_path, ASTRO_FILE_UNFORMATTED); - assert_cli_snapshot(SnapshotPayload::new( module_path!(), "format_astro_files", @@ -129,26 +101,17 @@ fn format_astro_files_write() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); - let astro_file_path = Path::new("file.astro"); + let astro_file_path = Utf8Path::new("file.astro"); fs.insert(astro_file_path.into(), ASTRO_FILE_UNFORMATTED.as_bytes()); - let result = run_cli( - DynRef::Borrowed(&mut fs), + let (fs, result) = run_cli( + fs, &mut console, - Args::from( - [ - "format", - "--write", - astro_file_path.as_os_str().to_str().unwrap(), - ] - .as_slice(), - ), + Args::from(["format", "--write", astro_file_path.as_str()].as_slice()), ); assert!(result.is_ok(), "run_cli returned {result:?}"); - assert_file_contents(&fs, astro_file_path, ASTRO_FILE_FORMATTED); - assert_cli_snapshot(SnapshotPayload::new( module_path!(), "format_astro_files_write", @@ -163,26 +126,17 @@ fn format_empty_astro_files_write() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); - let astro_file_path = Path::new("file.astro"); + let astro_file_path = Utf8Path::new("file.astro"); fs.insert(astro_file_path.into(), "
".as_bytes()); - let result = run_cli( - DynRef::Borrowed(&mut fs), + let (fs, result) = run_cli( + fs, &mut console, - Args::from( - [ - "format", - "--write", - astro_file_path.as_os_str().to_str().unwrap(), - ] - .as_slice(), - ), + Args::from(["format", "--write", astro_file_path.as_str()].as_slice()), ); assert!(result.is_ok(), "run_cli returned {result:?}"); - assert_file_contents(&fs, astro_file_path, "
"); - assert_cli_snapshot(SnapshotPayload::new( module_path!(), "format_empty_astro_files_write", @@ -197,26 +151,20 @@ fn format_astro_carriage_return_line_feed_files() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); - let astro_file_path = Path::new("file.astro"); + let astro_file_path = Utf8Path::new("file.astro"); fs.insert( astro_file_path.into(), ASTRO_CARRIAGE_RETURN_LINE_FEED_FILE_UNFORMATTED.as_bytes(), ); - let result = run_cli( - DynRef::Borrowed(&mut fs), + let (fs, result) = run_cli( + fs, &mut console, - Args::from([("format"), astro_file_path.as_os_str().to_str().unwrap()].as_slice()), + Args::from(["format", astro_file_path.as_str()].as_slice()), ); assert!(result.is_err(), "run_cli returned {result:?}"); - assert_file_contents( - &fs, - astro_file_path, - ASTRO_CARRIAGE_RETURN_LINE_FEED_FILE_UNFORMATTED, - ); - assert_cli_snapshot(SnapshotPayload::new( module_path!(), "format_astro_carriage_return_line_feed_files", @@ -231,16 +179,16 @@ fn lint_astro_files() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); - let astro_file_path = Path::new("file.astro"); + let astro_file_path = Utf8Path::new("file.astro"); fs.insert( astro_file_path.into(), ASTRO_FILE_DEBUGGER_BEFORE.as_bytes(), ); - let result = run_cli( - DynRef::Borrowed(&mut fs), + let (fs, result) = run_cli( + fs, &mut console, - Args::from([("lint"), astro_file_path.as_os_str().to_str().unwrap()].as_slice()), + Args::from(["lint", astro_file_path.as_str()].as_slice()), ); assert!(result.is_err(), "run_cli returned {result:?}"); @@ -259,29 +207,20 @@ fn lint_and_fix_astro_files() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); - let astro_file_path = Path::new("file.astro"); + let astro_file_path = Utf8Path::new("file.astro"); fs.insert( astro_file_path.into(), ASTRO_FILE_DEBUGGER_BEFORE.as_bytes(), ); - let result = run_cli( - DynRef::Borrowed(&mut fs), + let (fs, result) = run_cli( + fs, &mut console, - Args::from( - [ - ("lint"), - "--apply-unsafe", - astro_file_path.as_os_str().to_str().unwrap(), - ] - .as_slice(), - ), + Args::from(["lint", "--write", "--unsafe", astro_file_path.as_str()].as_slice()), ); assert!(result.is_ok(), "run_cli returned {result:?}"); - assert_file_contents(&fs, astro_file_path, ASTRO_FILE_DEBUGGER_AFTER); - assert_cli_snapshot(SnapshotPayload::new( module_path!(), "lint_and_fix_astro_files", @@ -296,18 +235,18 @@ fn sorts_imports_check() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); - let astro_file_path = Path::new("file.astro"); + let astro_file_path = Utf8Path::new("file.astro"); fs.insert(astro_file_path.into(), ASTRO_FILE_IMPORTS_BEFORE.as_bytes()); - let result = run_cli( - DynRef::Borrowed(&mut fs), + let (fs, result) = run_cli( + fs, &mut console, Args::from( [ - ("check"), + "check", "--formatter-enabled=false", "--linter-enabled=false", - astro_file_path.as_os_str().to_str().unwrap(), + astro_file_path.as_str(), ] .as_slice(), ), @@ -315,8 +254,6 @@ fn sorts_imports_check() { assert!(result.is_err(), "run_cli returned {result:?}"); - assert_file_contents(&fs, astro_file_path, ASTRO_FILE_IMPORTS_BEFORE); - assert_cli_snapshot(SnapshotPayload::new( module_path!(), "sorts_imports_check", @@ -331,19 +268,19 @@ fn sorts_imports_write() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); - let astro_file_path = Path::new("file.astro"); + let astro_file_path = Utf8Path::new("file.astro"); fs.insert(astro_file_path.into(), ASTRO_FILE_IMPORTS_BEFORE.as_bytes()); - let result = run_cli( - DynRef::Borrowed(&mut fs), + let (fs, result) = run_cli( + fs, &mut console, Args::from( [ - ("check"), + "check", "--formatter-enabled=false", "--linter-enabled=false", - "--apply", - astro_file_path.as_os_str().to_str().unwrap(), + "--write", + astro_file_path.as_str(), ] .as_slice(), ), @@ -351,8 +288,6 @@ fn sorts_imports_write() { assert!(result.is_ok(), "run_cli returned {result:?}"); - assert_file_contents(&fs, astro_file_path, ASTRO_FILE_IMPORTS_AFTER); - assert_cli_snapshot(SnapshotPayload::new( module_path!(), "sorts_imports_write", @@ -367,13 +302,13 @@ fn does_not_throw_parse_error_for_return() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); - let astro_file_path = Path::new("file.astro"); + let astro_file_path = Utf8Path::new("file.astro"); fs.insert(astro_file_path.into(), ASTRO_RETURN.as_bytes()); - let result = run_cli( - DynRef::Borrowed(&mut fs), + let (fs, result) = run_cli( + fs, &mut console, - Args::from([("lint"), astro_file_path.as_os_str().to_str().unwrap()].as_slice()), + Args::from(["lint", astro_file_path.as_str()].as_slice()), ); assert!(result.is_ok(), "run_cli returned {result:?}"); @@ -389,13 +324,13 @@ fn does_not_throw_parse_error_for_return() { #[test] fn format_stdin_successfully() { - let mut fs = MemoryFileSystem::default(); + let fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); console.in_buffer.push(ASTRO_FILE_UNFORMATTED.to_string()); - let result = run_cli( - DynRef::Borrowed(&mut fs), + let (fs, result) = run_cli( + fs, &mut console, Args::from(["format", "--stdin-file-path", "file.astro"].as_slice()), ); @@ -424,13 +359,13 @@ fn format_stdin_successfully() { #[test] fn format_stdin_write_successfully() { - let mut fs = MemoryFileSystem::default(); + let fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); console.in_buffer.push(ASTRO_FILE_UNFORMATTED.to_string()); - let result = run_cli( - DynRef::Borrowed(&mut fs), + let (fs, result) = run_cli( + fs, &mut console, Args::from(["format", "--write", "--stdin-file-path", "file.astro"].as_slice()), ); @@ -459,15 +394,15 @@ fn format_stdin_write_successfully() { #[test] fn lint_stdin_successfully() { - let mut fs = MemoryFileSystem::default(); + let fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); console .in_buffer .push(ASTRO_FILE_USELESS_RENAME_BEFORE.to_string()); - let result = run_cli( - DynRef::Borrowed(&mut fs), + let (fs, result) = run_cli( + fs, &mut console, Args::from(["lint", "--stdin-file-path", "file.astro"].as_slice()), ); @@ -496,15 +431,15 @@ fn lint_stdin_successfully() { #[test] fn lint_stdin_write_successfully() { - let mut fs = MemoryFileSystem::default(); + let fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); console .in_buffer .push(ASTRO_FILE_USELESS_RENAME_BEFORE.to_string()); - let result = run_cli( - DynRef::Borrowed(&mut fs), + let (fs, result) = run_cli( + fs, &mut console, Args::from(["lint", "--write", "--stdin-file-path", "file.astro"].as_slice()), ); @@ -533,15 +468,15 @@ fn lint_stdin_write_successfully() { #[test] fn lint_stdin_write_unsafe_successfully() { - let mut fs = MemoryFileSystem::default(); + let fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); console .in_buffer .push(ASTRO_FILE_DEBUGGER_BEFORE.to_string()); - let result = run_cli( - DynRef::Borrowed(&mut fs), + let (fs, result) = run_cli( + fs, &mut console, Args::from( [ @@ -579,13 +514,13 @@ fn lint_stdin_write_unsafe_successfully() { #[test] fn check_stdin_successfully() { - let mut fs = MemoryFileSystem::default(); + let fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); console.in_buffer.push(ASTRO_FILE_CHECK_BEFORE.to_string()); - let result = run_cli( - DynRef::Borrowed(&mut fs), + let (fs, result) = run_cli( + fs, &mut console, Args::from(["check", "--stdin-file-path", "file.astro"].as_slice()), ); @@ -614,30 +549,19 @@ fn check_stdin_successfully() { #[test] fn check_stdin_write_successfully() { - let mut fs = MemoryFileSystem::default(); + let fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); console.in_buffer.push(ASTRO_FILE_CHECK_BEFORE.to_string()); - let result = run_cli( - DynRef::Borrowed(&mut fs), + let (fs, result) = run_cli( + fs, &mut console, Args::from(["check", "--write", "--stdin-file-path", "file.astro"].as_slice()), ); assert!(result.is_ok(), "run_cli returned {result:?}"); - let message = console - .out_buffer - .first() - .expect("Console should have written a message"); - - let content = markup_to_string(markup! { - {message.content} - }); - - assert_eq!(content, ASTRO_FILE_CHECK_APPLY_AFTER); - assert_cli_snapshot(SnapshotPayload::new( module_path!(), "check_stdin_write_successfully", @@ -649,13 +573,13 @@ fn check_stdin_write_successfully() { #[test] fn check_stdin_write_unsafe_successfully() { - let mut fs = MemoryFileSystem::default(); + let fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); console.in_buffer.push(ASTRO_FILE_CHECK_BEFORE.to_string()); - let result = run_cli( - DynRef::Borrowed(&mut fs), + let (fs, result) = run_cli( + fs, &mut console, Args::from( [ @@ -671,17 +595,6 @@ fn check_stdin_write_unsafe_successfully() { assert!(result.is_ok(), "run_cli returned {result:?}"); - let message = console - .out_buffer - .first() - .expect("Console should have written a message"); - - let content = markup_to_string(markup! { - {message.content} - }); - - assert_eq!(content, ASTRO_FILE_CHECK_APPLY_UNSAFE_AFTER); - assert_cli_snapshot(SnapshotPayload::new( module_path!(), "check_stdin_write_unsafe_successfully", @@ -696,22 +609,20 @@ fn astro_global_object() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); - let astro_file_path = Path::new("file.astro"); + let astro_file_path = Utf8Path::new("file.astro"); fs.insert( astro_file_path.into(), ASTRO_FILE_ASTRO_GLOBAL_OBJECT.as_bytes(), ); - let result = run_cli( - DynRef::Borrowed(&mut fs), + let (fs, result) = run_cli( + fs, &mut console, - Args::from([("lint"), astro_file_path.as_os_str().to_str().unwrap()].as_slice()), + Args::from(["lint", astro_file_path.as_str()].as_slice()), ); assert!(result.is_ok(), "run_cli returned {result:?}"); - assert_file_contents(&fs, astro_file_path, ASTRO_FILE_ASTRO_GLOBAL_OBJECT); - assert_cli_snapshot(SnapshotPayload::new( module_path!(), "astro_global", diff --git a/crates/biome_cli/tests/cases/handle_css_files.rs b/crates/biome_cli/tests/cases/handle_css_files.rs index ff0025fc8c57..981d4bf2fc69 100644 --- a/crates/biome_cli/tests/cases/handle_css_files.rs +++ b/crates/biome_cli/tests/cases/handle_css_files.rs @@ -2,9 +2,8 @@ use crate::run_cli; use crate::snap_test::{assert_cli_snapshot, SnapshotPayload}; use biome_console::BufferConsole; use biome_fs::MemoryFileSystem; -use biome_service::DynRef; use bpaf::Args; -use std::path::Path; +use camino::Utf8Path; #[test] fn should_not_format_files_by_default() { @@ -12,13 +11,13 @@ fn should_not_format_files_by_default() { let mut console = BufferConsole::default(); let css_file_content = r#"html {}"#; - let css_file = Path::new("input.css"); + let css_file = Utf8Path::new("input.css"); fs.insert(css_file.into(), css_file_content.as_bytes()); - let result = run_cli( - DynRef::Borrowed(&mut fs), + let (fs, result) = run_cli( + fs, &mut console, - Args::from([("format"), css_file.as_os_str().to_str().unwrap()].as_slice()), + Args::from(["format", css_file.as_str()].as_slice()), ); // no files processed error @@ -39,20 +38,13 @@ fn should_format_files_by_when_opt_in() { let mut console = BufferConsole::default(); let css_file_content = r#"html {}"#; - let css_file = Path::new("input.css"); + let css_file = Utf8Path::new("input.css"); fs.insert(css_file.into(), css_file_content.as_bytes()); - let result = run_cli( - DynRef::Borrowed(&mut fs), + let (fs, result) = run_cli( + fs, &mut console, - Args::from( - [ - "format", - "--css-formatter-enabled=true", - css_file.as_os_str().to_str().unwrap(), - ] - .as_slice(), - ), + Args::from(["format", "--css-formatter-enabled=true", css_file.as_str()].as_slice()), ); // not formatted error @@ -73,18 +65,18 @@ fn should_format_write_files_by_when_opt_in() { let mut console = BufferConsole::default(); let css_file_content = r#"html {}"#; - let css_file = Path::new("input.css"); + let css_file = Utf8Path::new("input.css"); fs.insert(css_file.into(), css_file_content.as_bytes()); - let result = run_cli( - DynRef::Borrowed(&mut fs), + let (fs, result) = run_cli( + fs, &mut console, Args::from( [ "format", "--write", "--css-formatter-enabled=true", - css_file.as_os_str().to_str().unwrap(), + css_file.as_str(), ] .as_slice(), ), @@ -100,84 +92,3 @@ fn should_format_write_files_by_when_opt_in() { result, )); } - -#[test] -fn should_not_lint_files_by_default() { - let mut fs = MemoryFileSystem::default(); - let mut console = BufferConsole::default(); - - let file_path = Path::new("biome.json"); - fs.insert( - file_path.into(), - r#"{ - "linter": { "rules": { "all": true } } -} -"# - .as_bytes(), - ); - - let css_file_content = r#"html {}"#; - let css_file = Path::new("input.css"); - fs.insert(css_file.into(), css_file_content.as_bytes()); - - let result = run_cli( - DynRef::Borrowed(&mut fs), - &mut console, - Args::from(["lint", css_file.as_os_str().to_str().unwrap()].as_slice()), - ); - - // no files processed error - assert!(result.is_err(), "run_cli returned {result:?}"); - - assert_cli_snapshot(SnapshotPayload::new( - module_path!(), - "should_not_lint_files_by_default", - fs, - console, - result, - )); -} - -#[test] -fn should_lint_files_by_when_enabled() { - let mut fs = MemoryFileSystem::default(); - let mut console = BufferConsole::default(); - - let file_path = Path::new("biome.json"); - fs.insert( - file_path.into(), - r#"{ - "linter": { "rules": { "all": true } } -} -"# - .as_bytes(), - ); - - let css_file_content = r#"html {}"#; - let css_file = Path::new("input.css"); - fs.insert(css_file.into(), css_file_content.as_bytes()); - - let result = run_cli( - DynRef::Borrowed(&mut fs), - &mut console, - Args::from( - [ - "lint", - "--css-linter-enabled=true", - css_file.as_os_str().to_str().unwrap(), - ] - .as_slice(), - ), - ); - - // diagnostic - assert!(result.is_err(), "run_cli returned {result:?}"); - - assert_cli_snapshot(SnapshotPayload::new( - module_path!(), - "should_lint_files_by_when_enabled", - fs, - console, - result, - )); -} diff --git a/crates/biome_cli/tests/cases/handle_svelte_files.rs b/crates/biome_cli/tests/cases/handle_svelte_files.rs index 3ba2d52fb78f..bc05e84e38c5 100644 --- a/crates/biome_cli/tests/cases/handle_svelte_files.rs +++ b/crates/biome_cli/tests/cases/handle_svelte_files.rs @@ -1,12 +1,9 @@ use crate::run_cli; -use crate::snap_test::{ - assert_cli_snapshot, assert_file_contents, markup_to_string, SnapshotPayload, -}; -use biome_console::{markup, BufferConsole}; +use crate::snap_test::{assert_cli_snapshot, assert_file_contents, SnapshotPayload}; +use biome_console::BufferConsole; use biome_fs::MemoryFileSystem; -use biome_service::DynRef; use bpaf::Args; -use std::path::Path; +use camino::Utf8Path; const SVELTE_FILE_IMPORTS_BEFORE: &str = r#"
"#; -const SVELTE_TS_FILE_LINT_APPLY_AFTER: &str = r#" -
"#; - -const SVELTE_TS_FILE_LINT_APPLY_UNSAFE_AFTER: &str = r#" -
"#; - const SVELTE_TS_FILE_CHECK_BEFORE: &str = r#"
"#; -const SVELTE_TS_FILE_CHECK_APPLY_AFTER: &str = r#" -
"#; - -const SVELTE_TS_FILE_CHECK_APPLY_UNSAFE_AFTER: &str = r#" -
"#; - #[test] fn sorts_imports_check() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); - let svelte_file_path = Path::new("file.svelte"); + let svelte_file_path = Utf8Path::new("file.svelte"); fs.insert( svelte_file_path.into(), SVELTE_FILE_IMPORTS_BEFORE.as_bytes(), ); - let result = run_cli( - DynRef::Borrowed(&mut fs), + let (fs, result) = run_cli( + fs, &mut console, Args::from( [ - ("check"), + "check", "--formatter-enabled=false", "--linter-enabled=false", - svelte_file_path.as_os_str().to_str().unwrap(), + svelte_file_path.as_str(), ] .as_slice(), ), @@ -119,22 +89,22 @@ fn sorts_imports_write() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); - let svelte_file_path = Path::new("file.svelte"); + let svelte_file_path = Utf8Path::new("file.svelte"); fs.insert( svelte_file_path.into(), SVELTE_FILE_IMPORTS_BEFORE.as_bytes(), ); - let result = run_cli( - DynRef::Borrowed(&mut fs), + let (fs, result) = run_cli( + fs, &mut console, Args::from( [ - ("check"), + "check", "--formatter-enabled=false", "--linter-enabled=false", - "--apply", - svelte_file_path.as_os_str().to_str().unwrap(), + "--write", + svelte_file_path.as_str(), ] .as_slice(), ), @@ -158,16 +128,16 @@ fn format_svelte_ts_context_module_files() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); - let svelte_file_path = Path::new("file.svelte"); + let svelte_file_path = Utf8Path::new("file.svelte"); fs.insert( svelte_file_path.into(), SVELTE_TS_CONTEXT_MODULE_FILE_UNFORMATTED.as_bytes(), ); - let result = run_cli( - DynRef::Borrowed(&mut fs), + let (fs, result) = run_cli( + fs, &mut console, - Args::from([("format"), svelte_file_path.as_os_str().to_str().unwrap()].as_slice()), + Args::from(["format", svelte_file_path.as_str()].as_slice()), ); assert!(result.is_err(), "run_cli returned {result:?}"); @@ -192,23 +162,16 @@ fn format_svelte_ts_context_module_files_write() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); - let svelte_file_path = Path::new("file.svelte"); + let svelte_file_path = Utf8Path::new("file.svelte"); fs.insert( svelte_file_path.into(), SVELTE_TS_CONTEXT_MODULE_FILE_UNFORMATTED.as_bytes(), ); - let result = run_cli( - DynRef::Borrowed(&mut fs), + let (fs, result) = run_cli( + fs, &mut console, - Args::from( - [ - "format", - "--write", - svelte_file_path.as_os_str().to_str().unwrap(), - ] - .as_slice(), - ), + Args::from(["format", "--write", svelte_file_path.as_str()].as_slice()), ); assert!(result.is_ok(), "run_cli returned {result:?}"); @@ -233,16 +196,16 @@ fn format_svelte_carriage_return_line_feed_files() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); - let svelte_file_path = Path::new("file.svelte"); + let svelte_file_path = Utf8Path::new("file.svelte"); fs.insert( svelte_file_path.into(), SVELTE_CARRIAGE_RETURN_LINE_FEED_FILE_UNFORMATTED.as_bytes(), ); - let result = run_cli( - DynRef::Borrowed(&mut fs), + let (fs, result) = run_cli( + fs, &mut console, - Args::from([("format"), svelte_file_path.as_os_str().to_str().unwrap()].as_slice()), + Args::from(["format", svelte_file_path.as_str()].as_slice()), ); assert!(result.is_err(), "run_cli returned {result:?}"); @@ -264,32 +227,21 @@ fn format_svelte_carriage_return_line_feed_files() { #[test] fn format_stdin_successfully() { - let mut fs = MemoryFileSystem::default(); + let fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); console .in_buffer .push(SVELTE_TS_CONTEXT_MODULE_FILE_UNFORMATTED.to_string()); - let result = run_cli( - DynRef::Borrowed(&mut fs), + let (fs, result) = run_cli( + fs, &mut console, Args::from(["format", "--stdin-file-path", "file.svelte"].as_slice()), ); assert!(result.is_ok(), "run_cli returned {result:?}"); - let message = console - .out_buffer - .first() - .expect("Console should have written a message"); - - let content = markup_to_string(markup! { - {message.content} - }); - - assert_eq!(content, SVELTE_TS_CONTEXT_MODULE_FILE_FORMATTED); - assert_cli_snapshot(SnapshotPayload::new( module_path!(), "format_stdin_successfully", @@ -301,32 +253,21 @@ fn format_stdin_successfully() { #[test] fn format_stdin_write_successfully() { - let mut fs = MemoryFileSystem::default(); + let fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); console .in_buffer .push(SVELTE_TS_CONTEXT_MODULE_FILE_UNFORMATTED.to_string()); - let result = run_cli( - DynRef::Borrowed(&mut fs), + let (fs, result) = run_cli( + fs, &mut console, Args::from(["format", "--write", "--stdin-file-path", "file.svelte"].as_slice()), ); assert!(result.is_ok(), "run_cli returned {result:?}"); - let message = console - .out_buffer - .first() - .expect("Console should have written a message"); - - let content = markup_to_string(markup! { - {message.content} - }); - - assert_eq!(content, SVELTE_TS_CONTEXT_MODULE_FILE_FORMATTED); - assert_cli_snapshot(SnapshotPayload::new( module_path!(), "format_stdin_write_successfully", @@ -338,32 +279,21 @@ fn format_stdin_write_successfully() { #[test] fn lint_stdin_successfully() { - let mut fs = MemoryFileSystem::default(); + let fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); console .in_buffer .push(SVELTE_TS_FILE_LINT_BEFORE.to_string()); - let result = run_cli( - DynRef::Borrowed(&mut fs), + let (fs, result) = run_cli( + fs, &mut console, Args::from(["lint", "--stdin-file-path", "file.svelte"].as_slice()), ); assert!(result.is_err(), "run_cli returned {result:?}"); - let message = console - .out_buffer - .first() - .expect("Console should have written a message"); - - let content = markup_to_string(markup! { - {message.content} - }); - - assert_eq!(content, SVELTE_TS_FILE_LINT_BEFORE); - assert_cli_snapshot(SnapshotPayload::new( module_path!(), "lint_stdin_successfully", @@ -375,32 +305,21 @@ fn lint_stdin_successfully() { #[test] fn lint_stdin_write_successfully() { - let mut fs = MemoryFileSystem::default(); + let fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); console .in_buffer .push(SVELTE_TS_FILE_LINT_BEFORE.to_string()); - let result = run_cli( - DynRef::Borrowed(&mut fs), + let (fs, result) = run_cli( + fs, &mut console, Args::from(["lint", "--write", "--stdin-file-path", "file.svelte"].as_slice()), ); assert!(result.is_ok(), "run_cli returned {result:?}"); - let message = console - .out_buffer - .first() - .expect("Console should have written a message"); - - let content = markup_to_string(markup! { - {message.content} - }); - - assert_eq!(content, SVELTE_TS_FILE_LINT_APPLY_AFTER); - assert_cli_snapshot(SnapshotPayload::new( module_path!(), "lint_stdin_write_successfully", @@ -412,15 +331,15 @@ fn lint_stdin_write_successfully() { #[test] fn lint_stdin_write_unsafe_successfully() { - let mut fs = MemoryFileSystem::default(); + let fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); console .in_buffer .push(SVELTE_TS_FILE_LINT_BEFORE.to_string()); - let result = run_cli( - DynRef::Borrowed(&mut fs), + let (fs, result) = run_cli( + fs, &mut console, Args::from( [ @@ -436,17 +355,6 @@ fn lint_stdin_write_unsafe_successfully() { assert!(result.is_ok(), "run_cli returned {result:?}"); - let message = console - .out_buffer - .first() - .expect("Console should have written a message"); - - let content = markup_to_string(markup! { - {message.content} - }); - - assert_eq!(content, SVELTE_TS_FILE_LINT_APPLY_UNSAFE_AFTER); - assert_cli_snapshot(SnapshotPayload::new( module_path!(), "lint_stdin_write_unsafe_successfully", @@ -458,32 +366,21 @@ fn lint_stdin_write_unsafe_successfully() { #[test] fn check_stdin_successfully() { - let mut fs = MemoryFileSystem::default(); + let fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); console .in_buffer .push(SVELTE_TS_FILE_CHECK_BEFORE.to_string()); - let result = run_cli( - DynRef::Borrowed(&mut fs), + let (fs, result) = run_cli( + fs, &mut console, Args::from(["check", "--stdin-file-path", "file.svelte"].as_slice()), ); assert!(result.is_err(), "run_cli returned {result:?}"); - let message = console - .out_buffer - .first() - .expect("Console should have written a message"); - - let content = markup_to_string(markup! { - {message.content} - }); - - assert_eq!(content, SVELTE_TS_FILE_CHECK_BEFORE); - assert_cli_snapshot(SnapshotPayload::new( module_path!(), "check_stdin_successfully", @@ -495,32 +392,21 @@ fn check_stdin_successfully() { #[test] fn check_stdin_write_successfully() { - let mut fs = MemoryFileSystem::default(); + let fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); console .in_buffer .push(SVELTE_TS_FILE_CHECK_BEFORE.to_string()); - let result = run_cli( - DynRef::Borrowed(&mut fs), + let (fs, result) = run_cli( + fs, &mut console, Args::from(["check", "--write", "--stdin-file-path", "file.svelte"].as_slice()), ); assert!(result.is_ok(), "run_cli returned {result:?}"); - let message = console - .out_buffer - .first() - .expect("Console should have written a message"); - - let content = markup_to_string(markup! { - {message.content} - }); - - assert_eq!(content, SVELTE_TS_FILE_CHECK_APPLY_AFTER); - assert_cli_snapshot(SnapshotPayload::new( module_path!(), "check_stdin_write_successfully", @@ -532,15 +418,15 @@ fn check_stdin_write_successfully() { #[test] fn check_stdin_write_unsafe_successfully() { - let mut fs = MemoryFileSystem::default(); + let fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); console .in_buffer .push(SVELTE_TS_FILE_CHECK_BEFORE.to_string()); - let result = run_cli( - DynRef::Borrowed(&mut fs), + let (fs, result) = run_cli( + fs, &mut console, Args::from( [ @@ -556,17 +442,6 @@ fn check_stdin_write_unsafe_successfully() { assert!(result.is_ok(), "run_cli returned {result:?}"); - let message = console - .out_buffer - .first() - .expect("Console should have written a message"); - - let content = markup_to_string(markup! { - {message.content} - }); - - assert_eq!(content, SVELTE_TS_FILE_CHECK_APPLY_UNSAFE_AFTER); - assert_cli_snapshot(SnapshotPayload::new( module_path!(), "check_stdin_write_unsafe_successfully", diff --git a/crates/biome_cli/tests/cases/handle_vue_files.rs b/crates/biome_cli/tests/cases/handle_vue_files.rs index da261f14b810..9a16660834b1 100644 --- a/crates/biome_cli/tests/cases/handle_vue_files.rs +++ b/crates/biome_cli/tests/cases/handle_vue_files.rs @@ -1,12 +1,9 @@ use crate::run_cli; -use crate::snap_test::{ - assert_cli_snapshot, assert_file_contents, markup_to_string, SnapshotPayload, -}; -use biome_console::{markup, BufferConsole}; +use crate::snap_test::{assert_cli_snapshot, assert_file_contents, SnapshotPayload}; +use biome_console::BufferConsole; use biome_fs::MemoryFileSystem; -use biome_service::DynRef; use bpaf::Args; -use std::path::Path; +use camino::Utf8Path; const VUE_IMPLICIT_JS_FILE_UNFORMATTED: &str = r#" "#; -const VUE_TS_FILE_SAFE_LINTED: &str = r#" -"#; - -const VUE_TS_FILE_UNSAFE_LINTED: &str = r#" -"#; - const VUE_FILE_IMPORTS_BEFORE: &str = r#" "#; -const VUE_TS_FILE_CHECK_APPLY_AFTER: &str = r#" -"#; - -const VUE_TS_FILE_CHECK_APPLY_UNSAFE_AFTER: &str = r#" -"#; - const VUE_TS_FILE_SETUP_GLOBALS: &str = r#"
``` diff --git a/crates/biome_cli/tests/snapshots/main_cases_handle_svelte_files/check_stdin_write_unsafe_successfully.snap b/crates/biome_cli/tests/snapshots/main_cases_handle_svelte_files/check_stdin_write_unsafe_successfully.snap index 0a98c5fcd685..7cf98f1404e3 100644 --- a/crates/biome_cli/tests/snapshots/main_cases_handle_svelte_files/check_stdin_write_unsafe_successfully.snap +++ b/crates/biome_cli/tests/snapshots/main_cases_handle_svelte_files/check_stdin_write_unsafe_successfully.snap @@ -1,6 +1,7 @@ --- source: crates/biome_cli/tests/snap_test.rs expression: content +snapshot_kind: text --- # Input messages @@ -22,7 +23,7 @@ var foo: string = ""; import Button from "./components/Button.svelte"; import { Form } from "./components/Form.svelte"; statement(); -const foo = ""; +var foo: string = "";
``` diff --git a/crates/biome_cli/tests/snapshots/main_cases_handle_svelte_files/lint_stdin_write_successfully.snap b/crates/biome_cli/tests/snapshots/main_cases_handle_svelte_files/lint_stdin_write_successfully.snap index 2d0ed2d990fc..84c51eb6a9c1 100644 --- a/crates/biome_cli/tests/snapshots/main_cases_handle_svelte_files/lint_stdin_write_successfully.snap +++ b/crates/biome_cli/tests/snapshots/main_cases_handle_svelte_files/lint_stdin_write_successfully.snap @@ -1,6 +1,7 @@ --- source: crates/biome_cli/tests/snap_test.rs expression: content +snapshot_kind: text --- # Input messages @@ -15,7 +16,7 @@ var foo: string = ""; ```block
``` diff --git a/crates/biome_cli/tests/snapshots/main_cases_handle_svelte_files/lint_stdin_write_unsafe_successfully.snap b/crates/biome_cli/tests/snapshots/main_cases_handle_svelte_files/lint_stdin_write_unsafe_successfully.snap index 0dc49a8449e6..84c51eb6a9c1 100644 --- a/crates/biome_cli/tests/snapshots/main_cases_handle_svelte_files/lint_stdin_write_unsafe_successfully.snap +++ b/crates/biome_cli/tests/snapshots/main_cases_handle_svelte_files/lint_stdin_write_unsafe_successfully.snap @@ -1,6 +1,7 @@ --- source: crates/biome_cli/tests/snap_test.rs expression: content +snapshot_kind: text --- # Input messages @@ -15,7 +16,7 @@ var foo: string = ""; ```block
``` diff --git a/crates/biome_cli/tests/snapshots/main_cases_handle_svelte_files/sorts_imports_check.snap b/crates/biome_cli/tests/snapshots/main_cases_handle_svelte_files/sorts_imports_check.snap index 271aac845eee..7c3d565332ff 100644 --- a/crates/biome_cli/tests/snapshots/main_cases_handle_svelte_files/sorts_imports_check.snap +++ b/crates/biome_cli/tests/snapshots/main_cases_handle_svelte_files/sorts_imports_check.snap @@ -1,6 +1,7 @@ --- source: crates/biome_cli/tests/snap_test.rs expression: content +snapshot_kind: text --- ## `file.svelte` @@ -26,9 +27,9 @@ check ━━━━━━━━━━━━━━━━━━━━━━━━ # Emitted Messages ```block -file.svelte organizeImports ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ +file.svelte assist ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ - × Import statements could be sorted: + × Not all actions were applied: 1 1 │ - -``` - -# Emitted Messages - -```block -Formatted 1 file(s) in