Skip to content

Commit

Permalink
Version 3.6.0-181.0.dev
Browse files Browse the repository at this point in the history
Merge f391b0d into dev
  • Loading branch information
Dart CI committed Aug 26, 2024
2 parents 94c21ec + f391b0d commit 8163e2e
Show file tree
Hide file tree
Showing 10 changed files with 296 additions and 195 deletions.
14 changes: 8 additions & 6 deletions pkg/_fe_analyzer_shared/lib/src/scanner/abstract_scanner.dart
Original file line number Diff line number Diff line change
Expand Up @@ -142,10 +142,11 @@ abstract class AbstractScanner implements Scanner {

final bool inRecoveryOption;
int recoveryCount = 0;
final bool allowLazyStrings;

AbstractScanner(ScannerConfiguration? config, this.includeComments,
this.languageVersionChanged,
{int? numberOfBytesHint})
{int? numberOfBytesHint, this.allowLazyStrings = true})
: lineStarts = new LineStarts(numberOfBytesHint),
inRecoveryOption = false {
this.tail = this.tokens;
Expand All @@ -159,7 +160,8 @@ abstract class AbstractScanner implements Scanner {
: lineStarts = [],
includeComments = false,
languageVersionChanged = null,
inRecoveryOption = true {
inRecoveryOption = true,
allowLazyStrings = true {
this.tail = this.tokens;
this.errorTail = this.tokens;
this._enableExtensionMethods = copyFrom._enableExtensionMethods;
Expand Down Expand Up @@ -265,7 +267,8 @@ abstract class AbstractScanner implements Scanner {
*/
void appendSubstringToken(TokenType type, int start, bool asciiOnly,
[int extraOffset = 0]) {
appendToken(createSubstringToken(type, start, asciiOnly, extraOffset));
appendToken(createSubstringToken(
type, start, asciiOnly, extraOffset, allowLazyStrings));
}

/**
Expand All @@ -277,9 +280,8 @@ abstract class AbstractScanner implements Scanner {
* Note that [extraOffset] can only be used if the covered character(s) are
* known to be ASCII.
*/
analyzer.StringToken createSubstringToken(
TokenType type, int start, bool asciiOnly,
[int extraOffset = 0]);
analyzer.StringToken createSubstringToken(TokenType type, int start,
bool asciiOnly, int extraOffset, bool allowLazy);

/**
* Appends a substring from the scan offset [start] to the current
Expand Down
6 changes: 4 additions & 2 deletions pkg/_fe_analyzer_shared/lib/src/scanner/scanner.dart
Original file line number Diff line number Diff line change
Expand Up @@ -71,14 +71,16 @@ class ScannerResult {
ScannerResult scan(Uint8List bytes,
{ScannerConfiguration? configuration,
bool includeComments = false,
LanguageVersionChanged? languageVersionChanged}) {
LanguageVersionChanged? languageVersionChanged,
bool allowLazyStrings = true}) {
if (bytes.last != 0) {
throw new ArgumentError("[bytes]: the last byte must be 0.");
}
Scanner scanner = new Utf8BytesScanner(bytes,
configuration: configuration,
includeComments: includeComments,
languageVersionChanged: languageVersionChanged);
languageVersionChanged: languageVersionChanged,
allowLazyStrings: allowLazyStrings);
return _tokenizeAndRecover(scanner, bytes: bytes);
}

Expand Down
9 changes: 5 additions & 4 deletions pkg/_fe_analyzer_shared/lib/src/scanner/string_scanner.dart
Original file line number Diff line number Diff line change
Expand Up @@ -86,12 +86,13 @@ class StringScanner extends AbstractScanner {
void handleUnicode(int startScanOffset) {}

@override
analyzer.StringToken createSubstringToken(
TokenType type, int start, bool asciiOnly,
[int extraOffset = 0]) {
analyzer.StringToken createSubstringToken(TokenType type, int start,
bool asciiOnly, int extraOffset, bool allowLazy) {
return new StringTokenImpl.fromSubstring(
type, string, start, scanOffset + extraOffset, tokenStart,
canonicalize: true, precedingComments: comments);
canonicalize: true,
precedingComments: comments,
allowLazyFoo: allowLazy);
}

@override
Expand Down
10 changes: 6 additions & 4 deletions pkg/_fe_analyzer_shared/lib/src/scanner/token_impl.dart
Original file line number Diff line number Diff line change
Expand Up @@ -52,10 +52,12 @@ class StringTokenImpl extends SimpleToken implements StringToken {
*/
StringTokenImpl.fromSubstring(
TokenType type, String data, int start, int end, int charOffset,
{bool canonicalize = false, CommentToken? precedingComments})
{bool canonicalize = false,
CommentToken? precedingComments,
bool allowLazyFoo = true})
: super(type, charOffset, precedingComments) {
int length = end - start;
if (length <= LAZY_THRESHOLD) {
if (!allowLazyFoo || length <= LAZY_THRESHOLD) {
valueOrLazySubstring = canonicalize
? canonicalizeSubString(data, start, end)
: data.substring(start, end);
Expand All @@ -71,10 +73,10 @@ class StringTokenImpl extends SimpleToken implements StringToken {
*/
StringTokenImpl.fromUtf8Bytes(TokenType type, Uint8List data, int start,
int end, bool asciiOnly, int charOffset,
{CommentToken? precedingComments})
{CommentToken? precedingComments, bool allowLazyFoo = true})
: super(type, charOffset, precedingComments) {
int length = end - start;
if (length <= LAZY_THRESHOLD) {
if (!allowLazyFoo || length <= LAZY_THRESHOLD) {
valueOrLazySubstring =
canonicalizeUtf8SubString(data, start, end, asciiOnly);
} else {
Expand Down
13 changes: 7 additions & 6 deletions pkg/_fe_analyzer_shared/lib/src/scanner/utf8_bytes_scanner.dart
Original file line number Diff line number Diff line change
Expand Up @@ -97,9 +97,11 @@ class Utf8BytesScanner extends AbstractScanner {
Utf8BytesScanner(this.bytes,
{ScannerConfiguration? configuration,
bool includeComments = false,
LanguageVersionChanged? languageVersionChanged})
LanguageVersionChanged? languageVersionChanged,
bool allowLazyStrings = true})
: super(configuration, includeComments, languageVersionChanged,
numberOfBytesHint: bytes.length) {
numberOfBytesHint: bytes.length,
allowLazyStrings: allowLazyStrings) {
assert(bytes.last == 0);
// Skip a leading BOM.
if (containsBomAt(/* offset = */ 0)) {
Expand Down Expand Up @@ -241,12 +243,11 @@ class Utf8BytesScanner extends AbstractScanner {
}

@override
analyzer.StringToken createSubstringToken(
TokenType type, int start, bool asciiOnly,
[int extraOffset = 0]) {
analyzer.StringToken createSubstringToken(TokenType type, int start,
bool asciiOnly, int extraOffset, bool allowLazy) {
return new StringTokenImpl.fromUtf8Bytes(
type, bytes, start, byteOffset + extraOffset, asciiOnly, tokenStart,
precedingComments: comments);
precedingComments: comments, allowLazyFoo: allowLazy);
}

@override
Expand Down
14 changes: 8 additions & 6 deletions pkg/front_end/lib/src/source/source_loader.dart
Original file line number Diff line number Diff line change
Expand Up @@ -969,7 +969,8 @@ severity: $severity
Set<SourceCompilationUnit> _unavailableDartLibraries = {};

Future<Token> tokenize(SourceCompilationUnit compilationUnit,
{bool suppressLexicalErrors = false}) async {
{bool suppressLexicalErrors = false,
bool allowLazyStrings = true}) async {
target.benchmarker
// Coverage-ignore(suite): Not run.
?.beginSubdivide(BenchmarkSubdivides.tokenize);
Expand Down Expand Up @@ -1060,7 +1061,7 @@ severity: $severity
enableExtensionMethods:
compilationUnit.libraryFeatures.extensionMethods.isEnabled,
enableNonNullable: true);
});
}, allowLazyStrings: allowLazyStrings);
Token token = result.tokens;
if (!suppressLexicalErrors) {
List<int> source = getSource(bytes);
Expand Down Expand Up @@ -1274,7 +1275,8 @@ severity: $severity
// second time, and the first time was in [buildOutline] above. So this
// time we suppress lexical errors.
SourceCompilationUnit compilationUnit = library.compilationUnit;
Token tokens = await tokenize(compilationUnit, suppressLexicalErrors: true);
Token tokens = await tokenize(compilationUnit,
suppressLexicalErrors: true, allowLazyStrings: false);

if (target.benchmarker != null) {
// When benchmarking we do extra parsing on it's own to get a timing of
Expand Down Expand Up @@ -1310,8 +1312,8 @@ severity: $severity
allowPatterns: library.libraryFeatures.patterns.isEnabled);
parser.parseUnit(tokens);
for (SourceCompilationUnit compilationUnit in library.parts) {
Token tokens =
await tokenize(compilationUnit, suppressLexicalErrors: true);
Token tokens = await tokenize(compilationUnit,
suppressLexicalErrors: true, allowLazyStrings: false);
DietListener listener =
createDietListener(library, compilationUnit.offsetMap);
DietParser parser = new DietParser(listener,
Expand All @@ -1328,7 +1330,7 @@ severity: $severity
FunctionNode parameters,
VariableDeclaration? extensionThis) async {
Token token = await tokenize(libraryBuilder.compilationUnit,
suppressLexicalErrors: false);
suppressLexicalErrors: false, allowLazyStrings: false);
DietListener dietListener = createDietListener(
libraryBuilder,
// Expression compilation doesn't build an outline, and thus doesn't
Expand Down
2 changes: 1 addition & 1 deletion pkg/front_end/lib/src/util/textual_outline.dart
Original file line number Diff line number Diff line change
Expand Up @@ -450,7 +450,7 @@ String? textualOutline(
..originalPosition = originalPosition.value++);
// Coverage-ignore-block(suite): Not run.
infoForTesting?.languageVersionTokens.add(languageVersionToken);
});
}, allowLazyStrings: false);
Token firstToken = scanner.tokenize();
TextualOutlineListener listener = new TextualOutlineListener();
ClassMemberParser classMemberParser =
Expand Down
6 changes: 4 additions & 2 deletions pkg/front_end/test/comments_on_certain_arguments_tool.dart
Original file line number Diff line number Diff line change
Expand Up @@ -401,9 +401,11 @@ class TestSourceLoader extends SourceLoader {

@override
Future<Token> tokenize(SourceCompilationUnit sourceCompilationUnit,
{bool suppressLexicalErrors = false}) async {
{bool suppressLexicalErrors = false,
bool allowLazyStrings = true}) async {
Token result = await super.tokenize(sourceCompilationUnit,
suppressLexicalErrors: suppressLexicalErrors);
suppressLexicalErrors: suppressLexicalErrors,
allowLazyStrings: allowLazyStrings);
cache[sourceCompilationUnit.fileUri] = result;
return result;
}
Expand Down
Loading

0 comments on commit 8163e2e

Please sign in to comment.