diff --git a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala
index c54bd5b08b6b..009a4e44a91e 100644
--- a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala
+++ b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala
@@ -48,6 +48,9 @@ class ScalaSettings extends Settings.SettingGroup {
val silentWarnings: Setting[Boolean] = BooleanSetting("-nowarn", "Silence all warnings.") withAbbreviation "--no-warnings"
val fromTasty: Setting[Boolean] = BooleanSetting("-from-tasty", "Compile classes from tasty in classpath. The arguments are used as class names.") withAbbreviation "--from-tasty"
+ val newSyntax: Setting[Boolean] = BooleanSetting("-new-syntax", "Require `then` and `do` in control expressions")
+ val oldSyntax: Setting[Boolean] = BooleanSetting("-old-syntax", "Require `(...)` around conditions")
+
/** Decompiler settings */
val printTasty: Setting[Boolean] = BooleanSetting("-print-tasty", "Prints the raw tasty.") withAbbreviation "--print-tasty"
val printLines: Setting[Boolean] = BooleanSetting("-print-lines", "Show source code line numbers.") withAbbreviation "--print-lines"
diff --git a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala
index 7dd5ccf429e0..b41f44c47c58 100644
--- a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala
+++ b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala
@@ -22,7 +22,7 @@ import Constants._
import Symbols.defn
import ScriptParsers._
import Decorators._
-import scala.internal.Chars.isIdentifierStart
+import scala.internal.Chars
import scala.annotation.{tailrec, switch}
import rewrites.Rewrites.patch
@@ -351,6 +351,31 @@ object Parsers {
accept(SEMI)
}
+ def rewriteNotice(additionalOption: String = "") = {
+ val optionStr = if (additionalOption.isEmpty) "" else " " ++ additionalOption
+ i"\nThis construct can be rewritten automatically under$optionStr -rewrite."
+ }
+
+ def syntaxVersionError(option: String, span: Span) = {
+ syntaxError(em"""This construct is not allowed under $option.${rewriteNotice(option)}""", span)
+ }
+
+ def rewriteToNewSyntax(span: Span = Span(in.offset)): Boolean = {
+ if (in.newSyntax) {
+ if (in.rewrite) return true
+ syntaxVersionError("-new-syntax", span)
+ }
+ false
+ }
+
+ def rewriteToOldSyntax(span: Span = Span(in.offset)): Boolean = {
+ if (in.oldSyntax) {
+ if (in.rewrite) return true
+ syntaxVersionError("-old-syntax", span)
+ }
+ false
+ }
+
def errorTermTree: Literal = atSpan(in.offset) { Literal(Constant(null)) }
private[this] var inFunReturnType = false
@@ -525,6 +550,131 @@ object Parsers {
def commaSeparated[T](part: () => T): List[T] = tokenSeparated(COMMA, part)
+ def inSepRegion[T](opening: Token, closing: Token)(op: => T): T = {
+ in.adjustSepRegions(opening)
+ try op finally in.adjustSepRegions(closing)
+ }
+
+/* -------- REWRITES ----------------------------------------------------------- */
+
+ /** A list of pending patches, to be issued if we can rewrite all enclosing braces to
+ * indentation regions.
+ */
+ var pendingPatches: List[() => Unit] = Nil
+
+ def testChar(idx: Int, p: Char => Boolean): Boolean = {
+ val txt = source.content
+ idx < txt.length && p(txt(idx))
+ }
+
+ def testChar(idx: Int, c: Char): Boolean = {
+ val txt = source.content
+ idx < txt.length && txt(idx) == c
+ }
+
+ def testChars(from: Int, str: String): Boolean =
+ str.isEmpty ||
+ testChar(from, str.head) && testChars(from + 1, str.tail)
+
+ def skipBlanks(idx: Int, step: Int = 1): Int =
+ if (testChar(idx, c => c == ' ' || c == '\t' || c == Chars.CR)) skipBlanks(idx + step, step)
+ else idx
+
+ def skipLineCommentsRightOf(idx: Int, column: Int): Int = {
+ val j = skipBlanks(idx)
+ if (testChar(j, '/') && testChar(j + 1, '/') && source.column(j) > column)
+ skipLineCommentsRightOf(source.nextLine(j), column)
+ else idx
+ }
+
+ /** The region to eliminate when replacing a closing `)` or `}` that starts
+ * a new line
+ */
+ def closingElimRegion(): (Offset, Offset) = {
+ val skipped = skipBlanks(in.lastOffset)
+ if (testChar(skipped, Chars.LF)) // if `}` is on a line by itself
+ (source.startOfLine(in.lastOffset), skipped + 1) // skip the whole line
+ else // else
+ (in.lastOffset - 1, skipped) // move the following text up to where the `}` was
+ }
+
+ /** Drop (...) or { ... }, replacing the closing element with `endStr` */
+ def dropParensOrBraces(start: Offset, endStr: String): Unit = {
+ patch(source, Span(start, start + 1),
+ if (testChar(start - 1, Chars.isIdentifierPart)) " " else "")
+ val closingStartsLine = testChar(skipBlanks(in.lastOffset - 2, -1), Chars.LF)
+ val preFill = if (closingStartsLine || endStr.isEmpty) "" else " "
+ val postFill = if (in.lastOffset == in.offset) " " else ""
+ val (startClosing, endClosing) =
+ if (closingStartsLine && endStr.isEmpty) closingElimRegion()
+ else (in.lastOffset - 1, in.lastOffset)
+ patch(source, Span(startClosing, endClosing), s"$preFill$endStr$postFill")
+ }
+
+ /** Drop current token, which is assumed to be `then` or `do`. */
+ def dropTerminator(): Unit = {
+ var startOffset = in.offset
+ var endOffset = in.lastCharOffset
+ if (in.isAfterLineEnd()) {
+ if (testChar(endOffset, ' ')) endOffset += 1
+ }
+ else {
+ if (testChar(startOffset - 1, ' ')) startOffset -= 1
+ }
+ patch(source, Span(startOffset, endOffset), "")
+ }
+
+ /** rewrite code with (...) around the source code of `t` */
+ def revertToParens(t: Tree): Unit =
+ if (t.span.exists) {
+ patch(source, t.span.startPos, "(")
+ patch(source, t.span.endPos, ")")
+ dropTerminator()
+ }
+
+ /** In the tokens following the current one, does `query` precede any of the tokens that
+ * - must start a statement, or
+ * - separate two statements, or
+ * - continue a statement (e.g. `else`, catch`)?
+ */
+ def followedByToken(query: Token): Boolean = {
+ val lookahead = in.lookaheadScanner
+ var braces = 0
+ while (true) {
+ val token = lookahead.token
+ if (braces == 0) {
+ if (token == query) return true
+ if (stopScanTokens.contains(token) || lookahead.token == RBRACE) return false
+ }
+ else if (token == EOF)
+ return false
+ else if (lookahead.token == RBRACE)
+ braces -= 1
+ if (lookahead.token == LBRACE) braces += 1
+ lookahead.nextToken()
+ }
+ false
+ }
+
+ /** A the generators of a for-expression enclosed in (...)? */
+ def parensEncloseGenerators: Boolean = {
+ val lookahead = in.lookaheadScanner
+ var parens = 1
+ lookahead.nextToken()
+ while (parens != 0 && lookahead.token != EOF) {
+ val token = lookahead.token
+ if (token == LPAREN) parens += 1
+ else if (token == RPAREN) parens -= 1
+ lookahead.nextToken()
+ }
+ if (lookahead.token == LARROW)
+ false // it's a pattern
+ else if (lookahead.token != IDENTIFIER && lookahead.token != BACKQUOTED_IDENT)
+ true // it's not a pattern since token cannot be an infix operator
+ else
+ followedByToken(LARROW) // `<-` comes before possible statement starts
+ }
+
/* --------- OPERAND/OPERATOR STACK --------------------------------------- */
var opStack: List[OpInfo] = Nil
@@ -758,7 +908,7 @@ object Parsers {
}
else atSpan(negOffset) {
if (in.token == QUOTEID) {
- if ((staged & StageKind.Spliced) != 0 && isIdentifierStart(in.name(0))) {
+ if ((staged & StageKind.Spliced) != 0 && Chars.isIdentifierStart(in.name(0))) {
val t = atSpan(in.offset + 1) {
val tok = in.toToken(in.name)
tok match {
@@ -844,7 +994,7 @@ object Parsers {
def newLineOptWhenFollowedBy(token: Int): Unit = {
// note: next is defined here because current == NEWLINE
- if (in.token == NEWLINE && in.next.token == token) newLineOpt()
+ if (in.token == NEWLINE && in.next.token == token) in.nextToken()
}
def newLineOptWhenFollowing(p: Int => Boolean): Unit = {
@@ -1235,11 +1385,22 @@ object Parsers {
def condExpr(altToken: Token): Tree = {
if (in.token == LPAREN) {
- val t = atSpan(in.offset) { Parens(inParens(exprInParens())) }
- if (in.token == altToken) in.nextToken()
+ var t: Tree = atSpan(in.offset) { Parens(inParens(exprInParens())) }
+ if (in.token != altToken && followedByToken(altToken))
+ t = inSepRegion(LPAREN, RPAREN) {
+ newLineOpt()
+ expr1Rest(postfixExprRest(simpleExprRest(t)), Location.ElseWhere)
+ }
+ if (in.token == altToken) {
+ if (rewriteToOldSyntax()) revertToParens(t)
+ in.nextToken()
+ }
+ else if (rewriteToNewSyntax(t.span))
+ dropParensOrBraces(t.span.start, s"${tokenString(altToken)}")
t
} else {
- val t = expr()
+ val t = inSepRegion(LPAREN, RPAREN)(expr())
+ if (rewriteToOldSyntax(t.span.startPos)) revertToParens(t)
accept(altToken)
t
}
@@ -1333,7 +1494,7 @@ object Parsers {
in.errorOrMigrationWarning(
i"""`do
while ' is no longer supported,
|use `while ({ ; }) ()' instead.
- |The statement can be rewritten automatically under -language:Scala2 -migration -rewrite.
+ |${rewriteNotice("-language:Scala2")}
""")
val start = in.skipToken()
atSpan(start) {
@@ -1342,7 +1503,7 @@ object Parsers {
val whileStart = in.offset
accept(WHILE)
val cond = expr()
- if (ctx.settings.migration.value) {
+ if (in.isScala2Mode) {
patch(source, Span(start, start + 2), "while ({")
patch(source, Span(whileStart, whileStart + 5), ";")
cond match {
@@ -1576,8 +1737,10 @@ object Parsers {
* | InfixExpr id [nl] InfixExpr
* | InfixExpr ‘given’ (InfixExpr | ParArgumentExprs)
*/
- def postfixExpr(): Tree =
- infixOps(prefixExpr(), canStartExpressionTokens, prefixExpr, maybePostfix = true)
+ def postfixExpr(): Tree = postfixExprRest(prefixExpr())
+
+ def postfixExprRest(t: Tree): Tree =
+ infixOps(t, canStartExpressionTokens, prefixExpr, maybePostfix = true)
/** PrefixExpr ::= [`-' | `+' | `~' | `!'] SimpleExpr
*/
@@ -1799,8 +1962,13 @@ object Parsers {
def enumerators(): List[Tree] = generator() :: enumeratorsRest()
def enumeratorsRest(): List[Tree] =
- if (isStatSep) { in.nextToken(); enumerator() :: enumeratorsRest() }
- else if (in.token == IF) guard() :: enumeratorsRest()
+ if (isStatSep) {
+ in.nextToken()
+ if (in.token == DO || in.token == YIELD || in.token == RBRACE) Nil
+ else enumerator() :: enumeratorsRest()
+ }
+ else if (in.token == IF)
+ guard() :: enumeratorsRest()
else Nil
/** Enumerator ::= Generator
@@ -1838,13 +2006,16 @@ object Parsers {
*/
def forExpr(): Tree = atSpan(in.skipToken()) {
var wrappedEnums = true
+ val start = in.offset
+ val forEnd = in.lastOffset
+ val leading = in.token
val enums =
- if (in.token == LBRACE) inBraces(enumerators())
- else if (in.token == LPAREN) {
- val lparenOffset = in.skipToken()
- openParens.change(LPAREN, 1)
+ if (leading == LBRACE || leading == LPAREN && parensEncloseGenerators) {
+ in.nextToken()
+ openParens.change(leading, 1)
val res =
- if (in.token == CASE) enumerators()
+ if (leading == LBRACE || in.token == CASE)
+ enumerators()
else {
val pats = patternsOpt()
val pat =
@@ -1852,23 +2023,55 @@ object Parsers {
wrappedEnums = false
accept(RPAREN)
openParens.change(LPAREN, -1)
- atSpan(lparenOffset) { makeTupleOrParens(pats) } // note: alternatives `|' need to be weeded out by typer.
+ atSpan(start) { makeTupleOrParens(pats) } // note: alternatives `|' need to be weeded out by typer.
}
else pats.head
generatorRest(pat, casePat = false) :: enumeratorsRest()
}
if (wrappedEnums) {
- accept(RPAREN)
- openParens.change(LPAREN, -1)
+ val closingOnNewLine = in.isAfterLineEnd()
+ accept(leading + 1)
+ openParens.change(leading, -1)
+ def hasMultiLineEnum =
+ res.exists { t =>
+ val pos = t.sourcePos
+ pos.startLine < pos.endLine
+ }
+ if (rewriteToNewSyntax(Span(start)) && (leading == LBRACE || !hasMultiLineEnum)) {
+ // Don't rewrite if that could change meaning of newlines
+ newLinesOpt()
+ dropParensOrBraces(start, if (in.token == YIELD || in.token == DO) "" else "do")
+ }
}
res
- } else {
+ }
+ else {
wrappedEnums = false
- enumerators()
+
+ /*if (in.token == INDENT) inBracesOrIndented(enumerators()) else*/
+ val ts = inSepRegion(LBRACE, RBRACE)(enumerators())
+ if (rewriteToOldSyntax(Span(start)) && ts.nonEmpty) {
+ if (ts.length > 1 && ts.head.sourcePos.startLine != ts.last.sourcePos.startLine) {
+ patch(source, Span(forEnd), " {")
+ patch(source, Span(in.offset), "} ")
+ }
+ else {
+ patch(source, ts.head.span.startPos, "(")
+ patch(source, ts.last.span.endPos, ")")
+ }
+ }
+ ts
}
newLinesOpt()
- if (in.token == YIELD) { in.nextToken(); ForYield(enums, expr()) }
- else if (in.token == DO) { in.nextToken(); ForDo(enums, expr()) }
+ if (in.token == YIELD) {
+ in.nextToken()
+ ForYield(enums, expr())
+ }
+ else if (in.token == DO) {
+ if (rewriteToOldSyntax()) dropTerminator()
+ in.nextToken()
+ ForDo(enums, expr())
+ }
else {
if (!wrappedEnums) syntaxErrorOrIncomplete(YieldOrDoExpectedInForComprehension())
ForDo(enums, expr())
@@ -2675,7 +2878,7 @@ object Parsers {
}
/** ConstrExpr ::= SelfInvocation
- * | ConstrBlock
+ * | `{' SelfInvocation {semi BlockStat} `}'
*/
def constrExpr(): Tree =
if (in.token == LBRACE) constrBlock()
diff --git a/compiler/src/dotty/tools/dotc/parsing/Scanners.scala b/compiler/src/dotty/tools/dotc/parsing/Scanners.scala
index 6181c5ac7c44..b5a8c7686597 100644
--- a/compiler/src/dotty/tools/dotc/parsing/Scanners.scala
+++ b/compiler/src/dotty/tools/dotc/parsing/Scanners.scala
@@ -222,6 +222,10 @@ object Scanners {
/** A switch whether operators at the start of lines can be infix operators */
private var allowLeadingInfixOperators = true
+ val rewrite = ctx.settings.rewrite.value.isDefined
+ val oldSyntax = ctx.settings.oldSyntax.value
+ val newSyntax = ctx.settings.newSyntax.value
+
/** All doc comments kept by their end position in a `Map` */
private[this] var docstringMap: SortedMap[Int, Comment] = SortedMap.empty
@@ -236,7 +240,7 @@ object Scanners {
def nextPos: Int = (lookahead.getc(): @switch) match {
case ' ' | '\t' => nextPos
case CR | LF | FF =>
- // if we encounter line delimitng whitespace we don't count it, since
+ // if we encounter line delimiting whitespace we don't count it, since
// it seems not to affect positions in source
nextPos - 1
case _ => lookahead.charOffset - 1
@@ -420,7 +424,7 @@ object Scanners {
insertNL(NEWLINES)
else if (!isLeadingInfixOperator)
insertNL(NEWLINE)
- else if (isScala2Mode)
+ else if (isScala2Mode || oldSyntax)
ctx.warning(em"""Line starts with an operator;
|it is now treated as a continuation of the expression on the previous line,
|not as a separate statement.""",
diff --git a/compiler/src/dotty/tools/dotc/parsing/Tokens.scala b/compiler/src/dotty/tools/dotc/parsing/Tokens.scala
index 0e7d9754fcde..4f458cf9d58c 100644
--- a/compiler/src/dotty/tools/dotc/parsing/Tokens.scala
+++ b/compiler/src/dotty/tools/dotc/parsing/Tokens.scala
@@ -251,6 +251,12 @@ object Tokens extends TokensCommon {
final val canEndStatTokens: TokenSet = atomicExprTokens | BitSet(
TYPE, RPAREN, RBRACE, RBRACKET)
+ /** Tokens that stop a lookahead scan search for a `<-`, `then`, or `do`.
+ * Used for disambiguating between old and new syntax.
+ */
+ final val stopScanTokens: BitSet = mustStartStatTokens |
+ BitSet(IF, ELSE, WHILE, DO, FOR, YIELD, NEW, TRY, CATCH, FINALLY, THROW, RETURN, MATCH, SEMI, EOF)
+
final val numericLitTokens: TokenSet = BitSet(INTLIT, LONGLIT, FLOATLIT, DOUBLELIT)
final val scala3keywords = BitSet(ENUM, ERASED, GIVEN, IMPLIED)
diff --git a/docs/docs/reference/changed-features/operators.md b/docs/docs/reference/changed-features/operators.md
index 0a618e0426a3..e8900740d76d 100644
--- a/docs/docs/reference/changed-features/operators.md
+++ b/docs/docs/reference/changed-features/operators.md
@@ -3,7 +3,8 @@ layout: doc-page
title: Rules for Operators
---
-There are two annotations that regulate operators: `infix` and `alpha`.
+The rules for infix operators have changed. There are two annotations that regulate operators: `infix` and `alpha`.
+Furthermore, a syntax change allows infix operators to be written on the left in a multi-line expression.
## The @alpha Annotation
@@ -127,3 +128,53 @@ The purpose of the `@infix` annotation is to achieve consistency across a code b
5. To smooth migration to Scala 3.0, alphanumeric operations will only be deprecated from Scala 3.1 onwards,
or if the `-strict` option is given in Dotty/Scala 3.
+
+## Syntax Change
+
+Infix operators can now appear at the start of lines in a multi-line expression. Examples:
+```scala
+val str = "hello"
+ ++ " world"
+ ++ "!"
+
+def condition =
+ x > 0
+ || xs.exists(_ > 0)
+ || xs.isEmpty
+```
+Previously, these expressions would have been rejected, since the compiler's semicolon inference
+would have treated the continuations `++ " world"` or `|| xs.isEmpty` as separate statements.
+
+To make this syntax work, the rules are modified to not infer semicolons in front of leading infix operators.
+A _leading infix operator_ is
+ - a symbolic identifier such as `+`, or `approx_==`, or an identifier in backticks,
+ - that starts a new line,
+ - that precedes a token on the same line that can start an expression,
+ - and that is immediately followed by at least one space character `' '`.
+
+Example:
+
+```scala
+ freezing
+ | boiling
+```
+This is recognized as a single infix operation. Compare with:
+```scala
+ freezing
+ !boiling
+```
+This is seen as two statements, `freezing` and `!boiling`. The difference is that only the operator in the first example
+is followed by a space.
+
+Another example:
+```scala
+ println("hello")
+ ???
+ ??? match { case 0 => 1 }
+```
+This code is recognized as three different statements. `???` is syntactically a symbolic identifier, but
+neither of its occurrences is followed by a space and a token that can start an expression.
+
+
+
+
diff --git a/docs/sidebar.yml b/docs/sidebar.yml
index 9da7fa35f921..bc007227b073 100644
--- a/docs/sidebar.yml
+++ b/docs/sidebar.yml
@@ -101,6 +101,8 @@ sidebar:
url: docs/reference/other-new-features/tupled-function.html
- title: threadUnsafe Annotation
url: docs/reference/other-new-features/threadUnsafe-annotation.html
+ - title: New Control Syntax
+ url: docs/reference/other-new-features/control-syntax.html
- title: Other Changed Features
subsection:
- title: Structural Types
@@ -143,6 +145,8 @@ sidebar:
url: docs/reference/dropped-features/existential-types.html
- title: Type Projection
url: docs/reference/dropped-features/type-projection.html
+ - title: Do-While
+ url: docs/reference/dropped-features/do-while.html
- title: Procedure Syntax
url: docs/reference/dropped-features/procedure-syntax.html
- title: Package Objects