From d6c8601767cd1676a7a652f85762bf34b05d4e6f Mon Sep 17 00:00:00 2001 From: DieMyst Date: Mon, 13 Nov 2023 16:05:03 +0700 Subject: [PATCH 01/30] add token info --- .../scala/aqua/lsp/LocationsInterpreter.scala | 20 +++++++++++-------- .../src/main/scala/aqua/lsp/LspContext.scala | 4 ++-- .../abilities/AbilitiesInterpreter.scala | 1 - .../locations/DummyLocationsInterpreter.scala | 2 +- .../rules/locations/LocationsAlgebra.scala | 3 ++- .../rules/names/NamesInterpreter.scala | 11 +++++----- 6 files changed, 22 insertions(+), 19 deletions(-) diff --git a/language-server/language-server-api/src/main/scala/aqua/lsp/LocationsInterpreter.scala b/language-server/language-server-api/src/main/scala/aqua/lsp/LocationsInterpreter.scala index 342b202b5..4c129e8c0 100644 --- a/language-server/language-server-api/src/main/scala/aqua/lsp/LocationsInterpreter.scala +++ b/language-server/language-server-api/src/main/scala/aqua/lsp/LocationsInterpreter.scala @@ -2,7 +2,8 @@ package aqua.lsp import aqua.parser.lexer.Token import aqua.semantics.rules.StackInterpreter -import aqua.semantics.rules.locations.{LocationsAlgebra, LocationsState} +import aqua.semantics.rules.locations.{LocationsAlgebra, LocationsState, TokenInfo} +import aqua.types.{BottomType, Type} import cats.data.State import monocle.Lens @@ -21,8 +22,9 @@ class LocationsInterpreter[S[_], X](using import stack.* - override def addToken(name: String, token: Token[S]): State[X, Unit] = modify { st => - st.copy(tokens = st.tokens.updated(name, token)) + override def addToken(name: String, tokenInfo: TokenInfo[S]): State[X, Unit] = modify { + st => + st.copy(tokens = st.tokens.updated(name, tokenInfo)) } private def combineFieldName(name: String, field: String): String = name + "." + field @@ -33,7 +35,9 @@ class LocationsInterpreter[S[_], X](using fields: List[(String, Token[S])] ): State[X, Unit] = modify { st => st.copy(tokens = - st.tokens ++ ((name, token) +: fields.map(kv => (combineFieldName(name, kv._1), kv._2))).toMap + st.tokens ++ ((name, TokenInfo(token, BottomType)) +: fields.map(kv => + (combineFieldName(name, kv._1), TokenInfo(kv._2, BottomType)) + )).toMap ) } @@ -55,8 +59,8 @@ class LocationsInterpreter[S[_], X](using override def pointLocation(name: String, token: Token[S]): State[X, Unit] = { modify { st => val newLoc: Option[Token[S]] = st.stack.collectFirst { - case frame if frame.tokens.contains(name) => frame.tokens(name) - } orElse st.tokens.get(name) + case frame if frame.tokens.contains(name) => frame.tokens(name).token + } orElse st.tokens.get(name).map(_.token) st.copy(locations = st.locations ++ newLoc.map(token -> _).toList) } } @@ -66,8 +70,8 @@ class LocationsInterpreter[S[_], X](using val newLocs = locations.flatMap { case (name, token) => (st.stack.collectFirst { - case frame if frame.tokens.contains(name) => frame.tokens(name) - } orElse st.tokens.get(name)).map(token -> _) + case frame if frame.tokens.contains(name) => frame.tokens(name).token + } orElse st.tokens.get(name).map(_.token)).map(token -> _) } st.copy(locations = st.locations ++ newLocs) diff --git a/language-server/language-server-api/src/main/scala/aqua/lsp/LspContext.scala b/language-server/language-server-api/src/main/scala/aqua/lsp/LspContext.scala index aa2ddb555..612ce8b45 100644 --- a/language-server/language-server-api/src/main/scala/aqua/lsp/LspContext.scala +++ b/language-server/language-server-api/src/main/scala/aqua/lsp/LspContext.scala @@ -4,8 +4,8 @@ import aqua.parser.lexer.{LiteralToken, NamedTypeToken, Token} import aqua.raw.{RawContext, RawPart} import aqua.semantics.{SemanticError, SemanticWarning} import aqua.semantics.header.Picker +import aqua.semantics.rules.locations.TokenInfo import aqua.types.{ArrowType, Type} - import cats.syntax.monoid.* import cats.{Monoid, Semigroup} @@ -15,7 +15,7 @@ case class LspContext[S[_]]( abDefinitions: Map[String, NamedTypeToken[S]] = Map.empty[String, NamedTypeToken[S]], rootArrows: Map[String, ArrowType] = Map.empty[String, ArrowType], constants: Map[String, Type] = Map.empty[String, Type], - tokens: Map[String, Token[S]] = Map.empty[String, Token[S]], + tokens: Map[String, TokenInfo[S]] = Map.empty[String, TokenInfo[S]], locations: List[(Token[S], Token[S])] = Nil, importTokens: List[LiteralToken[S]] = Nil, errors: List[SemanticError[S]] = Nil, diff --git a/semantics/src/main/scala/aqua/semantics/rules/abilities/AbilitiesInterpreter.scala b/semantics/src/main/scala/aqua/semantics/rules/abilities/AbilitiesInterpreter.scala index e5c89c664..7cfcbb129 100644 --- a/semantics/src/main/scala/aqua/semantics/rules/abilities/AbilitiesInterpreter.scala +++ b/semantics/src/main/scala/aqua/semantics/rules/abilities/AbilitiesInterpreter.scala @@ -56,7 +56,6 @@ class AbilitiesInterpreter[S[_], X](using case false => for { _ <- modify(_.defineService(name, defaultId)) - // TODO: Is it used? _ <- locations.addTokenWithFields( name.value, name, diff --git a/semantics/src/main/scala/aqua/semantics/rules/locations/DummyLocationsInterpreter.scala b/semantics/src/main/scala/aqua/semantics/rules/locations/DummyLocationsInterpreter.scala index 43a876b1c..448d81190 100644 --- a/semantics/src/main/scala/aqua/semantics/rules/locations/DummyLocationsInterpreter.scala +++ b/semantics/src/main/scala/aqua/semantics/rules/locations/DummyLocationsInterpreter.scala @@ -9,7 +9,7 @@ import cats.data.{NonEmptyList, NonEmptyMap, State} class DummyLocationsInterpreter[S[_], X] extends LocationsAlgebra[S, State[X, *]] { - def addToken(name: String, token: Token[S]): State[X, Unit] = State.pure(()) + def addToken(name: String, tokenInfo: TokenInfo[S]): State[X, Unit] = State.pure(()) def addTokenWithFields( name: String, diff --git a/semantics/src/main/scala/aqua/semantics/rules/locations/LocationsAlgebra.scala b/semantics/src/main/scala/aqua/semantics/rules/locations/LocationsAlgebra.scala index 3a41fbdbc..c17d81458 100644 --- a/semantics/src/main/scala/aqua/semantics/rules/locations/LocationsAlgebra.scala +++ b/semantics/src/main/scala/aqua/semantics/rules/locations/LocationsAlgebra.scala @@ -1,8 +1,9 @@ package aqua.semantics.rules.locations import aqua.parser.lexer.Token +import aqua.types.Type trait LocationsAlgebra[S[_], Alg[_]] { - def addToken(name: String, token: Token[S]): Alg[Unit] + def addToken(name: String, tokenInfo: TokenInfo[S]): Alg[Unit] def addTokenWithFields(name: String, token: Token[S], fields: List[(String, Token[S])]): Alg[Unit] def pointTokenWithFieldLocation(typeName: String, typeToken: Token[S], fieldName: String, token: Token[S]): Alg[Unit] diff --git a/semantics/src/main/scala/aqua/semantics/rules/names/NamesInterpreter.scala b/semantics/src/main/scala/aqua/semantics/rules/names/NamesInterpreter.scala index b55b53a75..2a75a6ee1 100644 --- a/semantics/src/main/scala/aqua/semantics/rules/names/NamesInterpreter.scala +++ b/semantics/src/main/scala/aqua/semantics/rules/names/NamesInterpreter.scala @@ -3,10 +3,9 @@ package aqua.semantics.rules.names import aqua.parser.lexer.{Name, Token} import aqua.semantics.Levenshtein import aqua.semantics.rules.StackInterpreter -import aqua.semantics.rules.locations.LocationsAlgebra +import aqua.semantics.rules.locations.{LocationsAlgebra, TokenInfo} import aqua.semantics.rules.report.ReportAlgebra import aqua.types.{ArrowType, StreamType, Type} - import cats.data.{OptionT, State} import cats.syntax.all.* import cats.syntax.applicative.* @@ -104,13 +103,13 @@ class NamesInterpreter[S[_], X](using case None => mapStackHeadM(report.error(name, "Cannot define a variable in the root scope").as(false))( fr => (fr.addName(name, `type`) -> true).pure - ) <* locations.addToken(name.value, name) + ) <* locations.addToken(name.value, TokenInfo(name, `type`)) } override def derive(name: Name[S], `type`: Type, derivedFrom: Set[String]): State[X, Boolean] = define(name, `type`).flatTap(defined => mapStackHead_(_.derived(name, derivedFrom)).whenA(defined) - ) <* locations.addToken(name.value, name) + ) <* locations.addToken(name.value, TokenInfo(name, `type`)) override def getDerivedFrom(fromNames: List[Set[String]]): State[X, List[Set[String]]] = mapStackHead(Nil)(frame => @@ -129,7 +128,7 @@ class NamesInterpreter[S[_], X](using constants = st.constants.updated(name.value, `type`) ) ).as(true) - }.flatTap(_ => locations.addToken(name.value, name)) + }.flatTap(_ => locations.addToken(name.value, TokenInfo(name, `type`))) override def defineArrow(name: Name[S], arrowType: ArrowType, isRoot: Boolean): SX[Boolean] = readName(name.value).flatMap { @@ -154,7 +153,7 @@ class NamesInterpreter[S[_], X](using .error(name, "Cannot define a variable in the root scope") .as(false) )(fr => (fr.addArrow(name, arrowType) -> true).pure) - }.flatTap(_ => locations.addToken(name.value, name)) + }.flatTap(_ => locations.addToken(name.value, TokenInfo[S](name, arrowType))) override def streamsDefinedWithinScope(): SX[Map[String, StreamType]] = mapStackHead(Map.empty) { frame => From 59408a201305fa1eed628b9bae2fa3e4681e3f61 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 13 Nov 2023 12:17:48 +0300 Subject: [PATCH 02/30] chore(deps): update dependency co.fs2:fs2-io to v3.9.3 (#969) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index 40fd61bc4..07f1a8ba3 100644 --- a/build.sbt +++ b/build.sbt @@ -8,7 +8,7 @@ val catsParseV = "0.3.10" val monocleV = "3.1.0" val scalaTestV = "3.2.17" val sourcecodeV = "0.3.0" -val fs2V = "3.9.2" +val fs2V = "3.9.3" val catsEffectV = "3.6-1f95fd7" val declineV = "2.3.0" val circeVersion = "0.14.2" From a7e4338ca1ae5cf37d20926255b0a1e17daa3315 Mon Sep 17 00:00:00 2001 From: InversionSpaces Date: Tue, 14 Nov 2023 12:02:58 +0100 Subject: [PATCH 03/30] feat(compiler)!: Make `nil` option bottom [LNG-279] (#968) * Make nil option of bottom * Fix tests * Make literals of data type * Add unit tests * Remove print --- integration-tests/aqua/examples/stream.aqua | 21 ++++-- integration-tests/src/examples/streamCall.ts | 10 +-- .../raw/ApplyPropertiesRawInliner.scala | 14 ++-- .../main/scala/aqua/raw/value/ValueRaw.scala | 4 +- .../main/scala/aqua/model/ValueModel.scala | 4 +- .../main/scala/aqua/parser/lexer/Token.scala | 2 +- .../scala/aqua/semantics/SemanticsSpec.scala | 70 +++++++++++++++++-- .../aqua/semantics/ValuesAlgebraSpec.scala | 29 +++++++- 8 files changed, 122 insertions(+), 32 deletions(-) diff --git a/integration-tests/aqua/examples/stream.aqua b/integration-tests/aqua/examples/stream.aqua index 020df4a46..3f1dad084 100644 --- a/integration-tests/aqua/examples/stream.aqua +++ b/integration-tests/aqua/examples/stream.aqua @@ -1,6 +1,15 @@ +aqua Stream + import "@fluencelabs/aqua-lib/builtin.aqua" import "println.aqua" +export Stringer +export checkStreams, returnStreamFromFunc +export stringEmpty, returnEmptyLiteral +export returnNilLength, stringNone +export streamFunctor, streamAssignment +export streamIntFunctor, streamJoin + service Stringer("stringer-id"): returnString: string -> string @@ -20,16 +29,18 @@ func returnStreamFromFunc() -> *u32: nums <- getStream() <- nums -func stringNil() -> *string: +func stringEmpty() -> *string: valueNil: *string <- valueNil -func returnNil() -> *string: - relayNil <- stringNil() +func returnEmpty() -> *string: + relayNil <- stringEmpty() <- relayNil -func returnNilLiteral() -> *string: - <- nil +func returnEmptyLiteral() -> *string: + empty: *string + -- TODO: return *[] here after LNG-280 + <- empty func returnNilLength() -> u32: arr = nil diff --git a/integration-tests/src/examples/streamCall.ts b/integration-tests/src/examples/streamCall.ts index 3604330e5..1a3815987 100644 --- a/integration-tests/src/examples/streamCall.ts +++ b/integration-tests/src/examples/streamCall.ts @@ -1,14 +1,14 @@ import { - checkStreams, registerStringer, + checkStreams, returnNilLength, - returnNilLiteral, + returnEmptyLiteral, returnStreamFromFunc, streamAssignment, streamFunctor, streamIntFunctor, streamJoin, - stringNil, + stringEmpty, stringNone, } from "../compiled/examples/stream.js"; @@ -23,7 +23,7 @@ export async function streamCall() { } export async function returnNilCall() { - return stringNil(); + return stringEmpty(); } export async function returnNoneCall() { @@ -47,7 +47,7 @@ export async function streamAssignmentCall() { } export async function nilLiteralCall() { - return await returnNilLiteral(); + return await returnEmptyLiteral(); } export async function nilLengthCall() { diff --git a/model/inline/src/main/scala/aqua/model/inline/raw/ApplyPropertiesRawInliner.scala b/model/inline/src/main/scala/aqua/model/inline/raw/ApplyPropertiesRawInliner.scala index f683fe013..43df1e0fd 100644 --- a/model/inline/src/main/scala/aqua/model/inline/raw/ApplyPropertiesRawInliner.scala +++ b/model/inline/src/main/scala/aqua/model/inline/raw/ApplyPropertiesRawInliner.scala @@ -17,8 +17,8 @@ import cats.syntax.applicative.* import cats.syntax.bifunctor.* import cats.syntax.foldable.* import cats.syntax.monoid.* -import cats.syntax.traverse.* import cats.syntax.option.* +import cats.syntax.traverse.* import scribe.Logging object ApplyPropertiesRawInliner extends RawInliner[ApplyPropertyRaw] with Logging { @@ -33,19 +33,15 @@ object ApplyPropertiesRawInliner extends RawInliner[ApplyPropertyRaw] with Loggi apName <- Mangler[S].findAndForbidName("literal_ap") resultName <- Mangler[S].findAndForbidName(s"literal_props") } yield { - val cleanedType = literal.`type` match { - // literals cannot be streams, use it as an array to use properties - case StreamType(el) => ArrayType(el) - case tt => tt - } - val apVar = VarModel(apName, cleanedType, properties) + val typ = literal.`type` + val apVar = VarModel(apName, typ, properties) val tree = inl |+| Inline.tree( SeqModel.wrap( - FlattenModel(literal.copy(`type` = cleanedType), apVar.name).leaf, + FlattenModel(literal.copy(`type` = typ), apVar.name).leaf, FlattenModel(apVar, resultName).leaf ) ) - VarModel(resultName, properties.lastOption.map(_.`type`).getOrElse(cleanedType)) -> tree + VarModel(resultName, properties.lastOption.map(_.`type`).getOrElse(typ)) -> tree } } diff --git a/model/raw/src/main/scala/aqua/raw/value/ValueRaw.scala b/model/raw/src/main/scala/aqua/raw/value/ValueRaw.scala index cf200f502..285e0d605 100644 --- a/model/raw/src/main/scala/aqua/raw/value/ValueRaw.scala +++ b/model/raw/src/main/scala/aqua/raw/value/ValueRaw.scala @@ -35,7 +35,7 @@ object ValueRaw { val ParticleTtl: LiteralRaw = LiteralRaw("%ttl%", ScalarType.u32) val ParticleTimestamp: LiteralRaw = LiteralRaw("%timestamp%", ScalarType.u64) - val Nil: LiteralRaw = LiteralRaw("[]", StreamType(BottomType)) + val Nil: LiteralRaw = LiteralRaw("[]", OptionType(BottomType)) /** * Type of error value @@ -125,7 +125,7 @@ case class VarRaw(name: String, baseType: Type) extends ValueRaw { override def varNames: Set[String] = Set(name) } -case class LiteralRaw(value: String, baseType: Type) extends ValueRaw { +case class LiteralRaw(value: String, baseType: DataType) extends ValueRaw { override def mapValues(f: ValueRaw => ValueRaw): ValueRaw = this override def toString: String = s"{$value: ${baseType}}" diff --git a/model/src/main/scala/aqua/model/ValueModel.scala b/model/src/main/scala/aqua/model/ValueModel.scala index 74b0106c6..79a601ed1 100644 --- a/model/src/main/scala/aqua/model/ValueModel.scala +++ b/model/src/main/scala/aqua/model/ValueModel.scala @@ -6,8 +6,8 @@ import aqua.types.* import cats.Eq import cats.data.{Chain, NonEmptyMap} -import cats.syntax.option.* import cats.syntax.apply.* +import cats.syntax.option.* import scribe.Logging sealed trait ValueModel { @@ -75,7 +75,7 @@ object ValueModel { } } -case class LiteralModel(value: String, `type`: Type) extends ValueModel { +case class LiteralModel(value: String, `type`: DataType) extends ValueModel { override def toString: String = s"{$value: ${`type`}}" diff --git a/parser/src/main/scala/aqua/parser/lexer/Token.scala b/parser/src/main/scala/aqua/parser/lexer/Token.scala index 8c44347a1..82f1f841e 100644 --- a/parser/src/main/scala/aqua/parser/lexer/Token.scala +++ b/parser/src/main/scala/aqua/parser/lexer/Token.scala @@ -4,8 +4,8 @@ import aqua.parser.lift.Span.S import cats.data.NonEmptyList import cats.parse.{Accumulator0, Parser as P, Parser0 as P0} -import cats.{~>, Comonad, Functor} import cats.syntax.functor.* +import cats.{Comonad, Functor, ~>} trait Token[F[_]] { def as[T](v: T): F[T] diff --git a/semantics/src/test/scala/aqua/semantics/SemanticsSpec.scala b/semantics/src/test/scala/aqua/semantics/SemanticsSpec.scala index 8b92e3c2f..6d7b01339 100644 --- a/semantics/src/test/scala/aqua/semantics/SemanticsSpec.scala +++ b/semantics/src/test/scala/aqua/semantics/SemanticsSpec.scala @@ -16,6 +16,7 @@ import cats.data.Validated import cats.data.{Chain, EitherNec, NonEmptyChain} import cats.free.Cofree import cats.syntax.foldable.* +import cats.syntax.option.* import cats.syntax.show.* import cats.syntax.traverse.* import cats.~> @@ -49,11 +50,13 @@ class SemanticsSpec extends AnyFlatSpec with Matchers with Inside { inside(semantics.process(ast, init).value.run)(test) } - def insideBody(script: String)(test: RawTag.Tree => Any): Unit = + def insideBody(script: String, func: Option[String] = None)(test: RawTag.Tree => Any): Unit = insideResult(script) { case (_, Right(ctx)) => - inside(ctx.funcs.headOption) { case Some((_, func)) => - test(func.arrow.body) - } + inside( + func.fold( + ctx.funcs.headOption.map { case (_, raw) => raw } + )(ctx.funcs.get) + ) { case Some(func) => test(func.arrow.body) } } def insideSemErrors(script: String)(test: NonEmptyChain[SemanticError[Span.S]] => Any): Unit = @@ -877,7 +880,6 @@ class SemanticsSpec extends AnyFlatSpec with Matchers with Inside { |""".stripMargin insideBody(script) { body => - println(body.show) matchSubtree(body) { case (CallArrowRawTag(_, ca: CallArrowRaw), _) => inside(ca.arguments) { case (c: CollectionRaw) :: Nil => c.values.exists { @@ -892,4 +894,62 @@ class SemanticsSpec extends AnyFlatSpec with Matchers with Inside { test("[]", "") test("?", "?") } + + it should "allow `nil` in place of an array or an option" in { + def test(p: String) = { + val script = s""" + |func length(col: ${p}string) -> u32: + | <- col.length + | + |func return() -> ${p}string: + | <- nil + | + |func test() -> u32: + | l <- length(nil) + | n <- return() + | <- l + n.length + |""".stripMargin + + insideBody(script, "test".some) { body => + matchSubtree(body) { + case (CallArrowRawTag(_, ca: CallArrowRaw), _) if ca.name == "length" => + ca.arguments.length shouldEqual 1 + } + matchSubtree(body) { + case (CallArrowRawTag(_, ca: CallArrowRaw), _) if ca.name == "return" => + ca.arguments.length shouldEqual 0 + } + } + } + + test("[]") + test("?") + } + + it should "forbid `nil` in place of a stream" in { + val scriptAccept = s""" + |func length(col: *string) -> u32: + | <- col.length + | + |func test() -> u32: + | <- length(nil) + |""".stripMargin + + val scriptReturn = s""" + |func return() -> *string: + | <- nil + | + |func test() -> u32: + | n <- return() + | <- n.length + |""".stripMargin + + insideSemErrors(scriptAccept) { errors => + atLeast(1, errors.toChain.toList) shouldBe a[RulesViolated[Span.S]] + } + + insideSemErrors(scriptReturn) { errors => + atLeast(1, errors.toChain.toList) shouldBe a[RulesViolated[Span.S]] + } + } } diff --git a/semantics/src/test/scala/aqua/semantics/ValuesAlgebraSpec.scala b/semantics/src/test/scala/aqua/semantics/ValuesAlgebraSpec.scala index 107d6bd1b..eed6f6ffa 100644 --- a/semantics/src/test/scala/aqua/semantics/ValuesAlgebraSpec.scala +++ b/semantics/src/test/scala/aqua/semantics/ValuesAlgebraSpec.scala @@ -1,6 +1,7 @@ package aqua.semantics import aqua.parser.lexer.* +import aqua.raw.ConstantRaw import aqua.raw.RawContext import aqua.raw.value.* import aqua.semantics.rules.ValuesAlgebra @@ -14,7 +15,7 @@ import aqua.semantics.rules.types.{TypesAlgebra, TypesInterpreter} import aqua.types.* import cats.Id -import cats.data.{NonEmptyList, NonEmptyMap, State} +import cats.data.{Chain, NonEmptyList, NonEmptyMap, State} import monocle.syntax.all.* import org.scalatest.Inside import org.scalatest.flatspec.AnyFlatSpec @@ -66,9 +67,15 @@ class ValuesAlgebraSpec extends AnyFlatSpec with Matchers with Inside { b <- list } yield (a, b) - def genState(vars: Map[String, Type] = Map.empty) = + def genState(vars: Map[String, Type] = Map.empty) = { + val init = RawContext.blank.copy( + parts = Chain + .fromSeq(ConstantRaw.defaultConstants()) + .map(const => RawContext.blank -> const) + ) + CompilerState - .init[Id](RawContext.blank) + .init[Id](init) .focus(_.names) .modify( _.focus(_.stack).modify( @@ -78,6 +85,7 @@ class ValuesAlgebraSpec extends AnyFlatSpec with Matchers with Inside { ) :: _ ) ) + } def valueOfType(t: Type)( varName: String, @@ -572,4 +580,19 @@ class ValuesAlgebraSpec extends AnyFlatSpec with Matchers with Inside { atLeast(1, st.errors.toList) shouldBe a[RulesViolated[Id]] } } + + it should "consider `nil` of type `?⊥`" in { + val nil = variable("nil") + + val alg = algebra() + + val (st, res) = alg + .valueToRaw(nil) + .run(genState()) + .value + + inside(res) { case Some(value) => + value.`type` shouldBe OptionType(BottomType) + } + } } From d941f47d2297dbf9c3aef8dbb07f2fd432764461 Mon Sep 17 00:00:00 2001 From: InversionSpaces Date: Fri, 17 Nov 2023 11:43:13 +0100 Subject: [PATCH 04/30] feat(compiler): Generate empty calls to `responseHandlerSrv` [LNG-286] (#979) * Add noEmptyResponse * Fix tests --- api/api-npm/index.d.ts | 2 + api/api-npm/index.js | 2 + .../src/main/scala/api/types/InputTypes.scala | 9 ++-- .../main/scala/aqua/api/AquaAPIConfig.scala | 6 ++- .../aqua/compiler/AquaCompilerSpec.scala | 21 ++++----- .../aqua/model/transform/Transform.scala | 11 ++--- .../model/transform/TransformConfig.scala | 18 +++++++- .../model/transform/pre/ResultsHandler.scala | 11 +++-- .../aqua/model/transform/ModelBuilder.scala | 43 +++++++++++-------- .../aqua/model/transform/TransformSpec.scala | 8 ++-- 10 files changed, 84 insertions(+), 47 deletions(-) diff --git a/api/api-npm/index.d.ts b/api/api-npm/index.d.ts index 6c82b719e..d24940a9b 100644 --- a/api/api-npm/index.d.ts +++ b/api/api-npm/index.d.ts @@ -37,6 +37,8 @@ type CommonArgs = { targetType?: "ts" | "js" | "air" | undefined; /** Compile aqua in tracing mode (for debugging purposes). Default: false */ tracing?: boolean | undefined; + /** Do not generate response call if there are no returned values */ + noEmptyResponse?: boolean | undefined; }; type CodeString = { diff --git a/api/api-npm/index.js b/api/api-npm/index.js index b8f07788e..bc7a7bcf6 100644 --- a/api/api-npm/index.js +++ b/api/api-npm/index.js @@ -7,6 +7,7 @@ function getConfig({ noXor = false, targetType = "air", tracing = false, + noEmptyResponse = false, }) { return new AquaConfig( logLevel, @@ -19,6 +20,7 @@ function getConfig({ air: "air", }[targetType], tracing, + noEmptyResponse, ); } diff --git a/api/api/.js/src/main/scala/api/types/InputTypes.scala b/api/api/.js/src/main/scala/api/types/InputTypes.scala index f0ad685aa..98ade0ee1 100644 --- a/api/api/.js/src/main/scala/api/types/InputTypes.scala +++ b/api/api/.js/src/main/scala/api/types/InputTypes.scala @@ -4,9 +4,9 @@ import aqua.api.AquaAPIConfig import aqua.api.TargetType.* import aqua.js.{FunctionDefJs, ServiceDefJs} import aqua.model.transform.TransformConfig + import cats.data.Validated.{invalidNec, validNec} import cats.data.{Chain, NonEmptyChain, Validated, ValidatedNec} - import scala.scalajs.js import scala.scalajs.js.JSConverters.* import scala.scalajs.js.annotation.{JSExport, JSExportTopLevel} @@ -47,7 +47,9 @@ class AquaConfig( @JSExport val targetType: js.UndefOr[String], @JSExport - val tracing: js.UndefOr[Boolean] + val tracing: js.UndefOr[Boolean], + @JSExport + val noEmptyResponse: js.UndefOr[Boolean] ) object AquaConfig { @@ -69,7 +71,8 @@ object AquaConfig { constants = cjs.constants.map(_.toList).getOrElse(Nil), noXor = cjs.noXor.getOrElse(false), noRelay = cjs.noRelay.getOrElse(false), - tracing = cjs.tracing.getOrElse(false) + tracing = cjs.tracing.getOrElse(false), + noEmptyResponse = cjs.noEmptyResponse.getOrElse(false) ) } } diff --git a/api/api/src/main/scala/aqua/api/AquaAPIConfig.scala b/api/api/src/main/scala/aqua/api/AquaAPIConfig.scala index 18c949d29..ce0735a8f 100644 --- a/api/api/src/main/scala/aqua/api/AquaAPIConfig.scala +++ b/api/api/src/main/scala/aqua/api/AquaAPIConfig.scala @@ -10,12 +10,14 @@ case class AquaAPIConfig( constants: List[String] = Nil, noXor: Boolean = false, // TODO: Remove noRelay: Boolean = false, - tracing: Boolean = false + tracing: Boolean = false, + noEmptyResponse: Boolean = false ) { def getTransformConfig: TransformConfig = { val config = TransformConfig( - tracing = Option.when(tracing)(TransformConfig.TracingConfig.default) + tracing = Option.when(tracing)(TransformConfig.TracingConfig.default), + noEmptyResponse = noEmptyResponse ) if (noRelay) config.copy(relayVarName = None) diff --git a/compiler/src/test/scala/aqua/compiler/AquaCompilerSpec.scala b/compiler/src/test/scala/aqua/compiler/AquaCompilerSpec.scala index 0f68680ec..e43b5345e 100644 --- a/compiler/src/test/scala/aqua/compiler/AquaCompilerSpec.scala +++ b/compiler/src/test/scala/aqua/compiler/AquaCompilerSpec.scala @@ -1,12 +1,15 @@ package aqua.compiler -import aqua.model.{CallModel, ForModel, FunctorModel, LiteralModel, ValueModel, VarModel} +import aqua.model.AquaContext +import aqua.model.CallServiceModel +import aqua.model.FlattenModel import aqua.model.transform.ModelBuilder -import aqua.model.transform.TransformConfig import aqua.model.transform.Transform -import aqua.parser.ParserError +import aqua.model.transform.TransformConfig +import aqua.model.{CallModel, ForModel, FunctorModel, LiteralModel, ValueModel, VarModel} import aqua.parser.Ast import aqua.parser.Parser +import aqua.parser.ParserError import aqua.parser.lift.Span import aqua.parser.lift.Span.S import aqua.raw.ConstantRaw @@ -18,15 +21,12 @@ import aqua.types.{ArrayType, CanonStreamType, LiteralType, ScalarType, StreamTy import cats.Id import cats.data.{Chain, NonEmptyChain, NonEmptyMap, Validated, ValidatedNec} import cats.instances.string.* -import cats.syntax.show.* -import cats.syntax.option.* import cats.syntax.either.* +import cats.syntax.option.* +import cats.syntax.show.* +import org.scalatest.Inside import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers -import org.scalatest.Inside -import aqua.model.AquaContext -import aqua.model.FlattenModel -import aqua.model.CallServiceModel class AquaCompilerSpec extends AnyFlatSpec with Matchers with Inside { import ModelBuilder.* @@ -358,7 +358,8 @@ class AquaCompilerSpec extends AnyFlatSpec with Matchers with Inside { join(VarModel(streamName, streamType), arg), decrement ) - ) + ), + emptyRespCall(transformCfg, initPeer) ), errorCall(transformCfg, 0, initPeer) ) diff --git a/model/transform/src/main/scala/aqua/model/transform/Transform.scala b/model/transform/src/main/scala/aqua/model/transform/Transform.scala index 1d57e32fe..760be8cc4 100644 --- a/model/transform/src/main/scala/aqua/model/transform/Transform.scala +++ b/model/transform/src/main/scala/aqua/model/transform/Transform.scala @@ -1,25 +1,25 @@ package aqua.model.transform +import aqua.model.* import aqua.model.inline.ArrowInliner import aqua.model.inline.state.InliningState +import aqua.model.transform.TransformConfig.TracingConfig import aqua.model.transform.funcop.* import aqua.model.transform.pre.* +import aqua.model.transform.pre.{CallbackErrorHandler, ErrorHandler} import aqua.model.transform.topology.Topology -import aqua.model.* import aqua.raw.ops.RawTag import aqua.raw.value.VarRaw import aqua.res.* import aqua.types.ScalarType -import aqua.model.transform.TransformConfig.TracingConfig -import aqua.model.transform.pre.{CallbackErrorHandler, ErrorHandler} import cats.Eval import cats.data.Chain import cats.free.Cofree +import cats.instances.list.* import cats.syntax.option.* import cats.syntax.show.* import cats.syntax.traverse.* -import cats.instances.list.* import scribe.Logging // API for transforming RawTag to Res @@ -90,7 +90,8 @@ object Transform extends Logging { val resultsHandler: ResultsHandler = CallbackResultsHandler( callbackSrvId = conf.callbackSrvId, - funcName = conf.respFuncName + funcName = conf.respFuncName, + noEmptyResponse = conf.noEmptyResponse ) val errorHandler: ErrorHandler = CallbackErrorHandler( diff --git a/model/transform/src/main/scala/aqua/model/transform/TransformConfig.scala b/model/transform/src/main/scala/aqua/model/transform/TransformConfig.scala index 63557bfcd..313cdc94f 100644 --- a/model/transform/src/main/scala/aqua/model/transform/TransformConfig.scala +++ b/model/transform/src/main/scala/aqua/model/transform/TransformConfig.scala @@ -1,19 +1,33 @@ package aqua.model.transform import aqua.model.{AquaContext, LiteralModel, ValueModel, VarModel} -import aqua.raw.{ConstantRaw, RawContext, RawPart} import aqua.raw.value.{LiteralRaw, ValueRaw, VarRaw} +import aqua.raw.{ConstantRaw, RawContext, RawPart} import aqua.types.ScalarType + import cats.data.Chain import cats.kernel.Monoid -// TODO docs +/** + * Configuration for function pre transformer + * + * @param getDataService - name of the service that provides arguments + * @param callbackService - name of the service that provides callbacks + * @param errorHandlingService - name of the service that handles errors + * @param errorFuncName - name of the function that handles errors (in errorHandlingService) + * @param respFuncName - name of the function that handles responses (in getDataService) + * @param noEmptyResponse - if true, do not generate response call if there is no return values + * @param relayVarName - name of the relay variable + * @param tracing - tracing configuration + * @param constants - list of constants + */ case class TransformConfig( getDataService: String = "getDataSrv", callbackService: String = "callbackSrv", errorHandlingService: String = "errorHandlingSrv", errorFuncName: String = "error", respFuncName: String = "response", + noEmptyResponse: Boolean = false, relayVarName: Option[String] = Some("-relay-"), tracing: Option[TransformConfig.TracingConfig] = None, constants: List[ConstantRaw] = Nil diff --git a/model/transform/src/main/scala/aqua/model/transform/pre/ResultsHandler.scala b/model/transform/src/main/scala/aqua/model/transform/pre/ResultsHandler.scala index 4345de0d6..643cddf80 100644 --- a/model/transform/src/main/scala/aqua/model/transform/pre/ResultsHandler.scala +++ b/model/transform/src/main/scala/aqua/model/transform/pre/ResultsHandler.scala @@ -1,8 +1,8 @@ package aqua.model.transform.pre -import aqua.types.Type import aqua.raw.ops.{Call, CallArrowRawTag, RawTag} import aqua.raw.value.{ValueRaw, VarRaw} +import aqua.types.Type import cats.syntax.option.* @@ -10,11 +10,14 @@ trait ResultsHandler { def handleResults(results: List[(String, Type)]): Option[RawTag.Tree] } -case class CallbackResultsHandler(callbackSrvId: ValueRaw, funcName: String) - extends ResultsHandler { +case class CallbackResultsHandler( + callbackSrvId: ValueRaw, + funcName: String, + noEmptyResponse: Boolean +) extends ResultsHandler { override def handleResults(results: List[(String, Type)]): Option[RawTag.Tree] = - if (results.isEmpty) none + if (results.isEmpty && noEmptyResponse) none else { val resultVars = results.map(VarRaw.apply.tupled) val call = Call( diff --git a/model/transform/src/test/scala/aqua/model/transform/ModelBuilder.scala b/model/transform/src/test/scala/aqua/model/transform/ModelBuilder.scala index 885d49bf4..e6fa26c62 100644 --- a/model/transform/src/test/scala/aqua/model/transform/ModelBuilder.scala +++ b/model/transform/src/test/scala/aqua/model/transform/ModelBuilder.scala @@ -1,22 +1,22 @@ package aqua.model.transform import aqua.model.* +import aqua.model.FailModel +import aqua.model.IntoIndexModel +import aqua.model.OnModel +import aqua.model.inline.raw.StreamGateInliner import aqua.raw.ops.Call import aqua.raw.value.{LiteralRaw, ValueRaw, VarRaw} -import aqua.{model, res} +import aqua.res.ResolvedOp import aqua.res.{CallRes, CallServiceRes, MakeRes} -import aqua.types.{ArrayType, LiteralType, ScalarType} import aqua.types.StreamType -import aqua.model.IntoIndexModel -import aqua.model.inline.raw.StreamGateInliner -import aqua.model.OnModel -import aqua.model.FailModel -import aqua.res.ResolvedOp +import aqua.types.{ArrayType, LiteralType, ScalarType} +import aqua.{model, res} -import scala.language.implicitConversions import cats.data.Chain import cats.data.Chain.==: import cats.syntax.option.* +import scala.language.implicitConversions object ModelBuilder { implicit def rawToValue(raw: ValueRaw): ValueModel = ValueModel.fromRaw(raw) @@ -88,15 +88,24 @@ object ModelBuilder { ) .leaf - def respCall(bc: TransformConfig, value: ValueModel, on: ValueModel = initPeer) = - res - .CallServiceRes( - ValueModel.fromRaw(bc.callbackSrvId), - bc.respFuncName, - CallRes(value :: Nil, None), - on - ) - .leaf + def respCallImpl( + config: TransformConfig, + arguments: List[ValueModel], + on: ValueModel = initPeer + ) = res + .CallServiceRes( + ValueModel.fromRaw(config.callbackSrvId), + config.respFuncName, + CallRes(arguments, None), + on + ) + .leaf + + def respCall(config: TransformConfig, value: ValueModel, on: ValueModel = initPeer) = + respCallImpl(config, value :: Nil, on) + + def emptyRespCall(config: TransformConfig, on: ValueModel = initPeer) = + respCallImpl(config, Nil) def dataCall(bc: TransformConfig, name: String, on: ValueModel = initPeer) = res diff --git a/model/transform/src/test/scala/aqua/model/transform/TransformSpec.scala b/model/transform/src/test/scala/aqua/model/transform/TransformSpec.scala index b0475e671..879104da3 100644 --- a/model/transform/src/test/scala/aqua/model/transform/TransformSpec.scala +++ b/model/transform/src/test/scala/aqua/model/transform/TransformSpec.scala @@ -4,15 +4,15 @@ import aqua.model.transform.ModelBuilder import aqua.model.transform.{Transform, TransformConfig} import aqua.model.{CallModel, FuncArrow, LiteralModel, VarModel} import aqua.raw.ops.{Call, CallArrowRawTag, FuncOp, OnTag, RawTag, SeqTag} -import aqua.raw.value.{LiteralRaw, VarRaw} -import aqua.types.{ArrowType, NilType, ProductType, ScalarType} import aqua.raw.value.{LiteralRaw, ValueRaw, VarRaw} +import aqua.raw.value.{LiteralRaw, VarRaw} import aqua.res.{CallRes, CallServiceRes, MakeRes, SeqRes, XorRes} +import aqua.types.{ArrowType, NilType, ProductType, ScalarType} -import org.scalatest.flatspec.AnyFlatSpec -import org.scalatest.matchers.should.Matchers import cats.data.Chain import cats.syntax.show.* +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers class TransformSpec extends AnyFlatSpec with Matchers { From 7d1f5fb8df426bfe82175c75fc44733d3ecf40c5 Mon Sep 17 00:00:00 2001 From: Anatolios Laskaris Date: Wed, 22 Nov 2023 10:38:57 +0200 Subject: [PATCH 05/30] chore: Fix e2e after renaming flox to fcli (#986) Fix e2e --- .github/workflows/e2e.yml | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/e2e.yml b/.github/workflows/e2e.yml index 31d65a83c..de73256dd 100644 --- a/.github/workflows/e2e.yml +++ b/.github/workflows/e2e.yml @@ -35,22 +35,22 @@ jobs: with: ref: ${{ github.ref }} - flox-snapshot: - name: "flox" + fcli-snapshot: + name: "fcli" needs: aqua uses: fluencelabs/flox/.github/workflows/snapshot.yml@main with: aqua-snapshots: "${{ needs.aqua.outputs.aqua-snapshots }}" - flox: + fcli: needs: aqua - uses: fluencelabs/fluence-cli/.github/workflows/tests.yml@main + uses: fluencelabs/cli/.github/workflows/tests.yml@main with: aqua-snapshots: "${{ needs.aqua.outputs.aqua-snapshots }}" registry: needs: - - flox-snapshot + - fcli-snapshot uses: fluencelabs/registry/.github/workflows/tests.yml@main with: - flox-version: "${{ needs.flox-snapshot.outputs.version }}" + fcli-version: "${{ needs.fcli-snapshot.outputs.version }}" From d72d72419085c4dc55011e55090a662b94a1fc33 Mon Sep 17 00:00:00 2001 From: InversionSpaces Date: Wed, 22 Nov 2023 14:44:34 +0100 Subject: [PATCH 06/30] feat(api): Use `js.UndefOr` for `defaultServiceId` (#980) Use js.UndefOr --- .github/workflows/e2e.yml | 3 ++- js/js-exports/src/main/scala/aqua/js/Definitions.scala | 7 +++++-- 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/.github/workflows/e2e.yml b/.github/workflows/e2e.yml index de73256dd..a233a47da 100644 --- a/.github/workflows/e2e.yml +++ b/.github/workflows/e2e.yml @@ -44,9 +44,10 @@ jobs: fcli: needs: aqua - uses: fluencelabs/cli/.github/workflows/tests.yml@main + uses: fluencelabs/fluence-cli/.github/workflows/tests.yml@use-js-client-0.5.0 with: aqua-snapshots: "${{ needs.aqua.outputs.aqua-snapshots }}" + ref: use-js-client-0.5.0 registry: needs: diff --git a/js/js-exports/src/main/scala/aqua/js/Definitions.scala b/js/js-exports/src/main/scala/aqua/js/Definitions.scala index 64dfafa98..226ccde9c 100644 --- a/js/js-exports/src/main/scala/aqua/js/Definitions.scala +++ b/js/js-exports/src/main/scala/aqua/js/Definitions.scala @@ -115,14 +115,17 @@ object TypeDefinitionJs { @JSExportAll case class ServiceDefJs( - defaultServiceId: Option[String], + defaultServiceId: js.UndefOr[String], functions: LabeledTypeDefJs ) object ServiceDefJs { def apply(sd: ServiceDef): ServiceDefJs = { - ServiceDefJs(sd.defaultServiceId, LabeledTypeDefJs(sd.functions)) + ServiceDefJs( + sd.defaultServiceId.getOrElse(()), + LabeledTypeDefJs(sd.functions) + ) } } From c09d7e4ca7ec36735d569143a6df06d97ac2a4e4 Mon Sep 17 00:00:00 2001 From: fluencebot <116741523+fluencebot@users.noreply.github.com> Date: Wed, 22 Nov 2023 16:21:38 +0200 Subject: [PATCH 07/30] chore(main): release aqua 0.13.0 (#944) * chore(main): release aqua 0.13.0 * chore: Bump aqua version to 0.13.0 --- .github/release-please/manifest.json | 2 +- CHANGELOG.md | 29 +++++++++++++++++++ api/api-npm/package.json | 2 +- build.sbt | 2 +- integration-tests/package.json | 2 +- .../language-server-npm/package.json | 2 +- pnpm-lock.yaml | 2 +- 7 files changed, 35 insertions(+), 6 deletions(-) diff --git a/.github/release-please/manifest.json b/.github/release-please/manifest.json index e939560f4..ed21d28cb 100644 --- a/.github/release-please/manifest.json +++ b/.github/release-please/manifest.json @@ -1,3 +1,3 @@ { - ".": "0.12.4" + ".": "0.13.0" } diff --git a/CHANGELOG.md b/CHANGELOG.md index ade49c05c..268047d51 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,34 @@ # Changelog +## [0.13.0](https://github.com/fluencelabs/aqua/compare/aqua-v0.12.4...aqua-v0.13.0) (2023-11-22) + + +### ⚠ BREAKING CHANGES + +* **compiler:** Make `nil` option bottom [LNG-279] ([#968](https://github.com/fluencelabs/aqua/issues/968)) +* **compiler:** Prohibit mutating options [LNG-277] ([#960](https://github.com/fluencelabs/aqua/issues/960)) + +### Features + +* **api:** Use `js.UndefOr` for `defaultServiceId` ([#980](https://github.com/fluencelabs/aqua/issues/980)) ([1e525fa](https://github.com/fluencelabs/aqua/commit/1e525fad35585c5401914200e8e15c50304d8688)) +* **compiler:** Allow omitting field name in struct creation [LNG-261] ([#943](https://github.com/fluencelabs/aqua/issues/943)) ([fcdb5b0](https://github.com/fluencelabs/aqua/commit/fcdb5b0fefeffc004a50bd66a6a768b36ed9d71d)) +* **compiler:** Always generate `last` argument of `fold` [LNG-265] ([#947](https://github.com/fluencelabs/aqua/issues/947)) ([78ee753](https://github.com/fluencelabs/aqua/commit/78ee753c7b3e956faf5c92f4992e51c1af4ac76a)) +* **compiler:** Generate empty calls to `responseHandlerSrv` [LNG-286] ([#979](https://github.com/fluencelabs/aqua/issues/979)) ([cee4448](https://github.com/fluencelabs/aqua/commit/cee444862a7cf2bea3f84e921b03328fd83fa108)) +* **compiler:** Make `nil` option bottom [LNG-279] ([#968](https://github.com/fluencelabs/aqua/issues/968)) ([11c8970](https://github.com/fluencelabs/aqua/commit/11c8970fd889a3acb07c0c65013d153194106e62)) +* **compiler:** Prohibit mutating options [LNG-277] ([#960](https://github.com/fluencelabs/aqua/issues/960)) ([68425ed](https://github.com/fluencelabs/aqua/commit/68425ed42a7440aefac82011bbac418ef13bd636)) + + +### Bug Fixes + +* **compiler:** Code generate wrong stream name in AIR [LNG-276] ([#958](https://github.com/fluencelabs/aqua/issues/958)) ([a1576ef](https://github.com/fluencelabs/aqua/commit/a1576efad9d6a3d5fb20c01182b0d0a948c50ec4)) +* **compiler:** Incorrect focus on error [LNG-274] ([#959](https://github.com/fluencelabs/aqua/issues/959)) ([6e6b567](https://github.com/fluencelabs/aqua/commit/6e6b567f8eac007b0e50fc6e2eb4e4918663016e)) +* **compiler:** Multiple closures in one function [LNG-262] ([#941](https://github.com/fluencelabs/aqua/issues/941)) ([45ca7bb](https://github.com/fluencelabs/aqua/commit/45ca7bbf3ed234b25d0f010c2bd5878f9bce9155)) +* **compiler:** Throw an error when comparing an alias and a named type with the same name [LNG-231] ([#946](https://github.com/fluencelabs/aqua/issues/946)) ([38f7728](https://github.com/fluencelabs/aqua/commit/38f77285f57410848a5f6f3333ca6a99fb5017f5)) +* **compiler:** Unknown service method call is ignored [LNG-273] ([#957](https://github.com/fluencelabs/aqua/issues/957)) ([5a3c5e6](https://github.com/fluencelabs/aqua/commit/5a3c5e6666f53ac222e297e69971e3d84499759f)) +* **deps:** update dependency @fluencelabs/js-client to v0.4.1 ([#945](https://github.com/fluencelabs/aqua/issues/945)) ([634b1c1](https://github.com/fluencelabs/aqua/commit/634b1c17b629b6f899bfe5ff17a9bdc81673acba)) +* **deps:** update dependency @fluencelabs/js-client to v0.4.2 ([#956](https://github.com/fluencelabs/aqua/issues/956)) ([077dc8f](https://github.com/fluencelabs/aqua/commit/077dc8ff131cb1c4cc6300b078232c1be93c39cd)) +* **deps:** update dependency @fluencelabs/js-client to v0.4.3 ([#966](https://github.com/fluencelabs/aqua/issues/966)) ([313502e](https://github.com/fluencelabs/aqua/commit/313502ecae4f0568af63158e6f518bc8e398953d)) + ## [0.12.4](https://github.com/fluencelabs/aqua/compare/aqua-v0.12.3...aqua-v0.12.4) (2023-10-23) diff --git a/api/api-npm/package.json b/api/api-npm/package.json index da075f2c7..9ac22bb13 100644 --- a/api/api-npm/package.json +++ b/api/api-npm/package.json @@ -1,6 +1,6 @@ { "name": "@fluencelabs/aqua-api", - "version": "0.12.4", + "version": "0.13.0", "description": "Aqua API", "type": "module", "main": "index.js", diff --git a/build.sbt b/build.sbt index 07f1a8ba3..ab3d2de1f 100644 --- a/build.sbt +++ b/build.sbt @@ -1,6 +1,6 @@ import BundleJS.* -val aquaVersion = "0.12.4" +val aquaVersion = "0.13.0" val scalaV = "3.3.1" val catsV = "2.10.0" diff --git a/integration-tests/package.json b/integration-tests/package.json index 107f5c5dc..25b1577a7 100644 --- a/integration-tests/package.json +++ b/integration-tests/package.json @@ -31,7 +31,7 @@ }, "prettier": {}, "devDependencies": { - "@fluencelabs/aqua-api": "0.12.4", + "@fluencelabs/aqua-api": "0.13.0", "@fluencelabs/aqua-dht": "0.2.5", "@fluencelabs/aqua-lib": "0.7.7", "@types/jest": "29.5.2", diff --git a/language-server/language-server-npm/package.json b/language-server/language-server-npm/package.json index d140218e7..9e008d5db 100644 --- a/language-server/language-server-npm/package.json +++ b/language-server/language-server-npm/package.json @@ -1,6 +1,6 @@ { "name": "@fluencelabs/aqua-language-server-api", - "version": "0.12.4", + "version": "0.13.0", "description": "Aqua Language Server API", "type": "commonjs", "files": [ diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 2268eb316..2835c62d1 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -37,7 +37,7 @@ importers: version: 1.8.1 devDependencies: '@fluencelabs/aqua-api': - specifier: 0.12.4 + specifier: 0.13.0 version: link:../api/api-npm '@fluencelabs/aqua-dht': specifier: 0.2.5 From 52f0f305b309b5aef2a0508f1d1cf646692e60d8 Mon Sep 17 00:00:00 2001 From: InversionSpaces Date: Thu, 23 Nov 2023 11:12:44 +0100 Subject: [PATCH 08/30] fix(compiler): Allow returning resolved service as ability [LNG-266] (#977) * Refactor * Refactor type system * Remove println * Fix renaming * Add unit tests * Do not convert to call arrow * Check ability * Refactor captured values resolution * Remove println * Fix fields gathering * Remove println * Remove println * Fix renaming, export ability * Rename only abilities * Fix unit tests * Fix captured arrows renaming * Add comments * Refactor * Rename only arrows * Add comments, refactor * Add comments * Rename method * Add integration test --------- Co-authored-by: Anatolios Laskaris Co-authored-by: Dima --- build.sbt | 6 +- .../scala/aqua/compiler/AquaCompiler.scala | 14 +- .../aqua/compiler/AquaCompilerSpec.scala | 63 ++++++- .../aqua/examples/abilities.aqua | 26 ++- .../src/__test__/examples.spec.ts | 6 + integration-tests/src/examples/abilityCall.ts | 25 ++- .../aqua/model/inline/ArrowInliner.scala | 167 ++++++++++++++---- .../aqua/model/inline/RawValueInliner.scala | 16 +- .../raw/ApplyPropertiesRawInliner.scala | 2 +- .../inline/raw/MakeAbilityRawInliner.scala | 9 +- .../aqua/model/inline/state/Arrows.scala | 13 +- .../aqua/model/inline/state/Exports.scala | 38 +++- .../aqua/model/inline/ArrowInlinerSpec.scala | 21 ++- .../src/main/scala/aqua/raw/ops/RawTag.scala | 3 + .../scala/aqua/raw/ops/RawTagGivens.scala | 8 +- .../main/scala/aqua/model/AquaContext.scala | 12 +- .../src/main/scala/aqua/model/ArgsCall.scala | 13 +- .../src/main/scala/aqua/model/CallModel.scala | 11 +- .../main/scala/aqua/model/ValueModel.scala | 12 +- .../scala/aqua/parser/lexer/ValueToken.scala | 84 +++++---- .../scala/aqua/semantics/CompilerState.scala | 4 +- .../scala/aqua/semantics/RawSemantics.scala | 22 +-- .../semantics/expr/func/CallArrowSem.scala | 9 +- .../aqua/semantics/expr/func/ReturnSem.scala | 39 ++-- .../aqua/semantics/header/HeaderHandler.scala | 6 +- .../aqua/semantics/rules/ValuesAlgebra.scala | 41 +++-- .../rules/abilities/AbilitiesAlgebra.scala | 2 + .../abilities/AbilitiesInterpreter.scala | 15 +- .../semantics/rules/names/NamesState.scala | 2 + .../rules/types/TypesInterpreter.scala | 54 +++--- .../scala/aqua/semantics/SemanticsSpec.scala | 71 +++++--- .../main/scala/aqua/types/CompareTypes.scala | 3 +- types/src/main/scala/aqua/types/Type.scala | 122 +++++++------ .../scala/aqua/types/TypeVarianceSpec.scala | 37 ++++ types/src/test/scala/aqua/types/package.scala | 112 ++++++++++++ 35 files changed, 779 insertions(+), 309 deletions(-) create mode 100644 types/src/test/scala/aqua/types/TypeVarianceSpec.scala create mode 100644 types/src/test/scala/aqua/types/package.scala diff --git a/build.sbt b/build.sbt index ab3d2de1f..964f26f0f 100644 --- a/build.sbt +++ b/build.sbt @@ -7,6 +7,7 @@ val catsV = "2.10.0" val catsParseV = "0.3.10" val monocleV = "3.1.0" val scalaTestV = "3.2.17" +val scalaTestScalaCheckV = "3.2.17.0" val sourcecodeV = "0.3.0" val fs2V = "3.9.3" val catsEffectV = "3.6-1f95fd7" @@ -23,8 +24,9 @@ val commons = Seq( }, scalaVersion := scalaV, libraryDependencies ++= Seq( - "com.outr" %%% "scribe" % scribeV, - "org.scalatest" %%% "scalatest" % scalaTestV % Test + "com.outr" %%% "scribe" % scribeV, + "org.scalatest" %%% "scalatest" % scalaTestV % Test, + "org.scalatestplus" %%% "scalacheck-1-17" % scalaTestScalaCheckV % Test ), scalacOptions ++= { Seq( diff --git a/compiler/src/main/scala/aqua/compiler/AquaCompiler.scala b/compiler/src/main/scala/aqua/compiler/AquaCompiler.scala index 5913e3d32..66b7c7446 100644 --- a/compiler/src/main/scala/aqua/compiler/AquaCompiler.scala +++ b/compiler/src/main/scala/aqua/compiler/AquaCompiler.scala @@ -1,7 +1,7 @@ package aqua.compiler -import aqua.compiler.AquaError.{ParserError as AquaParserError, *} import aqua.backend.Backend +import aqua.compiler.AquaError.{ParserError as AquaParserError, *} import aqua.linker.{AquaModule, Linker, Modules} import aqua.model.AquaContext import aqua.parser.lift.{LiftParser, Span} @@ -9,22 +9,22 @@ import aqua.parser.{Ast, ParserError} import aqua.raw.RawPart.Parts import aqua.raw.{RawContext, RawPart} import aqua.res.AquaRes -import aqua.semantics.{CompilerState, Semantics} import aqua.semantics.header.{HeaderHandler, HeaderSem, Picker} +import aqua.semantics.{CompilerState, Semantics} import aqua.semantics.{SemanticError, SemanticWarning} +import cats.arrow.FunctionK import cats.data.* -import cats.data.Validated.{validNec, Invalid, Valid} +import cats.data.Validated.{Invalid, Valid, validNec} import cats.parse.Parser0 import cats.syntax.applicative.* +import cats.syntax.either.* import cats.syntax.flatMap.* import cats.syntax.functor.* import cats.syntax.monoid.* -import cats.syntax.traverse.* import cats.syntax.semigroup.* -import cats.syntax.either.* -import cats.{~>, Comonad, Functor, Monad, Monoid, Order} -import cats.arrow.FunctionK +import cats.syntax.traverse.* +import cats.{Comonad, Functor, Monad, Monoid, Order, ~>} import scribe.Logging class AquaCompiler[F[_]: Monad, E, I: Order, S[_]: Comonad, C: Monoid: Picker]( diff --git a/compiler/src/test/scala/aqua/compiler/AquaCompilerSpec.scala b/compiler/src/test/scala/aqua/compiler/AquaCompilerSpec.scala index e43b5345e..a0f05157d 100644 --- a/compiler/src/test/scala/aqua/compiler/AquaCompilerSpec.scala +++ b/compiler/src/test/scala/aqua/compiler/AquaCompilerSpec.scala @@ -120,7 +120,7 @@ class AquaCompilerSpec extends AnyFlatSpec with Matchers with Inside { def getDataSrv(name: String, varName: String, t: Type) = { CallServiceRes( - LiteralModel.fromRaw(LiteralRaw.quote("getDataSrv")), + LiteralModel.quote("getDataSrv"), name, CallRes(Nil, Some(CallModel.Export(varName, t))), LiteralModel.fromRaw(ValueRaw.InitPeerId) @@ -364,7 +364,66 @@ class AquaCompilerSpec extends AnyFlatSpec with Matchers with Inside { errorCall(transformCfg, 0, initPeer) ) - insideRes(src, transformCfg = transformCfg)("main") { case main :: _ => + insideRes(src, transformCfg = transformCfg)("main") { case main :: Nil => + main.body.equalsOrShowDiff(expected) should be(true) + } + } + + it should "allow returning and passing services as abilities" in { + val src = Map( + "main.aqua" -> """ + |aqua Test + | + |export test + | + |ability Ab: + | log(log: string) + | + |service Srv("default-id"): + | log(log: string) + | + |func useAb{Ab}(): + | Ab.log("test") + | + |func genDefault() -> Ab: + | <- Srv + | + |func genResolved() -> Ab: + | Srv "resolved-id" + | <- Srv + | + |func test(): + | resolved <- genResolved() + | useAb{resolved}() + | default <- genDefault() + | useAb{default}() + |""".stripMargin + ) + + val transformCfg = TransformConfig() + + insideRes(src, transformCfg = transformCfg)("test") { case main :: Nil => + def srvCall(id: String) = + CallServiceRes( + serviceId = LiteralModel.quote(id), + funcName = "log", + call = CallRes( + List(LiteralModel.quote("test")), + None + ), + initPeer + ).leaf + + val expected = XorRes.wrap( + SeqRes.wrap( + getDataSrv("-relay-", "-relay-", ScalarType.string), + srvCall("resolved-id"), + srvCall("default-id"), + emptyRespCall(transformCfg, initPeer) + ), + errorCall(transformCfg, 0, initPeer) + ) + main.body.equalsOrShowDiff(expected) should be(true) } } diff --git a/integration-tests/aqua/examples/abilities.aqua b/integration-tests/aqua/examples/abilities.aqua index 78a18ff6f..9a02a2e79 100644 --- a/integration-tests/aqua/examples/abilities.aqua +++ b/integration-tests/aqua/examples/abilities.aqua @@ -2,7 +2,7 @@ aqua Main use DECLARE_CONST, decl_bar from "imports_exports/declare.aqua" as Declare -export handleAb, SomeService, bug214, checkAbCalls, bugLNG258_1, bugLNG258_2, bugLNG258_3, multipleAbilityWithClosure +export handleAb, SomeService, bug214, checkAbCalls, bugLNG258_1, bugLNG258_2, bugLNG258_3, multipleAbilityWithClosure, MySrv, returnSrvAsAbility service SomeService("wed"): getStr(s: string) -> string @@ -128,4 +128,28 @@ func multipleAbilityWithClosure() -> i8, i8: ab2 <- createAb(2) <- ab.arrow(), ab2.arrow() +ability MyAb: + call() -> string + +service MySrv("default-id"): + call() -> string + +func mySrvDefault() -> MyAb: + <- MySrv + +func mySrvResolved() -> MyAb: + MySrv "resolved-id" + <- MySrv + +func useMyAb{MyAb}() -> string: + <- MyAb.call() + +func returnSrvAsAbility() -> []string: + result: *string + MySrvDefault <- mySrvDefault() + MySrvResolved <- mySrvResolved() + result <- useMyAb{MySrvDefault}() + result <- useMyAb{MySrvResolved}() + <- result + diff --git a/integration-tests/src/__test__/examples.spec.ts b/integration-tests/src/__test__/examples.spec.ts index 5050bc571..23b0204df 100644 --- a/integration-tests/src/__test__/examples.spec.ts +++ b/integration-tests/src/__test__/examples.spec.ts @@ -38,6 +38,7 @@ import { bugLNG258Call2, bugLNG258Call3, multipleAbilityWithClosureCall, + returnSrvAsAbilityCall, } from "../examples/abilityCall.js"; import { nilLengthCall, @@ -579,6 +580,11 @@ describe("Testing examples", () => { expect(result1).toStrictEqual([1, 2]); }); + it("abilities.aqua return service as ability", async () => { + let result = await returnSrvAsAbilityCall(); + expect(result).toStrictEqual(["default-id", "resolved-id"]); + }); + it("functors.aqua LNG-119 bug", async () => { let result = await bugLng119Call(); expect(result).toEqual([1]); diff --git a/integration-tests/src/examples/abilityCall.ts b/integration-tests/src/examples/abilityCall.ts index 32b81e960..843cbb770 100644 --- a/integration-tests/src/examples/abilityCall.ts +++ b/integration-tests/src/examples/abilityCall.ts @@ -6,7 +6,9 @@ import { bugLNG258_1, bugLNG258_2, bugLNG258_3, - multipleAbilityWithClosure + multipleAbilityWithClosure, + registerMySrv, + returnSrvAsAbility, } from "../compiled/examples/abilities"; export async function abilityCall(): Promise<[string, string, string, number]> { @@ -39,6 +41,23 @@ export async function bugLNG258Call3(): Promise<[number, number]> { return await bugLNG258_3(); } -export async function multipleAbilityWithClosureCall(): Promise<[number, number]> { - return await multipleAbilityWithClosure() +export async function multipleAbilityWithClosureCall(): Promise< + [number, number] +> { + return await multipleAbilityWithClosure(); +} + +export async function returnSrvAsAbilityCall(): Promise { + const srv = (id: string) => { + return { + call: () => { + return id; + }, + }; + }; + + registerMySrv("default-id", srv("default-id")); + registerMySrv("resolved-id", srv("resolved-id")); + + return await returnSrvAsAbility(); } diff --git a/model/inline/src/main/scala/aqua/model/inline/ArrowInliner.scala b/model/inline/src/main/scala/aqua/model/inline/ArrowInliner.scala index 0be4c3bb2..70cbf9c3f 100644 --- a/model/inline/src/main/scala/aqua/model/inline/ArrowInliner.scala +++ b/model/inline/src/main/scala/aqua/model/inline/ArrowInliner.scala @@ -6,15 +6,17 @@ import aqua.model.* import aqua.model.inline.state.{Arrows, Exports, Mangler} import aqua.raw.ops.RawTag import aqua.raw.value.{ValueRaw, VarRaw} -import aqua.types.{AbilityType, ArrowType, CollectionType, NamedType, StreamType, Type} +import aqua.types.* import cats.data.StateT import cats.data.{Chain, IndexedStateT, State} +import cats.kernel.Semigroup import cats.syntax.applicative.* import cats.syntax.bifunctor.* import cats.syntax.foldable.* import cats.syntax.functor.* import cats.syntax.option.* +import cats.syntax.semigroup.* import cats.syntax.show.* import cats.syntax.traverse.* import cats.{Eval, Monoid} @@ -105,7 +107,9 @@ object ArrowInliner extends Logging { exports <- Exports[S].exports arrows <- Arrows[S].arrows // gather all arrows and variables from abilities - returnedAbilities = rets.collect { case VarModel(name, at: AbilityType, _) => name -> at } + returnedAbilities = rets.collect { case ValueModel.Ability(vm, at) => + vm.name -> at + } varsFromAbilities = returnedAbilities.flatMap { case (name, at) => getAbilityVars(name, None, at, exports) }.toMap @@ -138,9 +142,9 @@ object ArrowInliner extends Logging { private def getAbilityFields[T <: Type]( name: String, newName: Option[String], - `type`: NamedType, + `type`: GeneralAbilityType, exports: Map[String, ValueModel] - )(fields: NamedType => Map[String, T]): Map[String, ValueModel] = + )(fields: GeneralAbilityType => Map[String, T]): Map[String, ValueModel] = fields(`type`).flatMap { case (fName, _) => val fullName = AbilityType.fullName(name, fName) val newFullName = AbilityType.fullName(newName.getOrElse(name), fName) @@ -162,7 +166,7 @@ object ArrowInliner extends Logging { private def getAbilityVars( abilityName: String, abilityNewName: Option[String], - abilityType: AbilityType, + abilityType: GeneralAbilityType, exports: Map[String, ValueModel] ): Map[String, ValueModel] = { val get = getAbilityFields( @@ -173,7 +177,7 @@ object ArrowInliner extends Logging { ) get(_.variables) ++ get(_.arrows).flatMap { - case arrow @ (_, vm: VarModel) => + case arrow @ (_, vm @ ValueModel.Arrow(_, _)) => arrow.some case (_, m) => internalError(s"($m) cannot be an arrow") @@ -193,7 +197,7 @@ object ArrowInliner extends Logging { private def getAbilityArrows( name: String, newName: Option[String], - `type`: NamedType, + `type`: GeneralAbilityType, exports: Map[String, ValueModel], arrows: Map[String, FuncArrow] ): Map[String, FuncArrow] = { @@ -205,8 +209,8 @@ object ArrowInliner extends Logging { ) get(_.arrows).flatMap { - case (_, VarModel(name, _, _)) => - arrows.get(name).map(name -> _) + case (_, ValueModel.Arrow(vm, _)) => + arrows.get(vm.name).map(vm.name -> _) case (_, m) => internalError(s"($m) cannot be an arrow") } @@ -214,7 +218,7 @@ object ArrowInliner extends Logging { private def getAbilityArrows[S: Arrows: Exports]( name: String, - `type`: NamedType + `type`: GeneralAbilityType ): State[S, Map[String, FuncArrow]] = for { exports <- Exports[S].exports arrows <- Arrows[S].arrows @@ -225,6 +229,16 @@ object ArrowInliner extends Logging { renamed: Map[String, T] ) + given [T]: Monoid[Renamed[T]] with { + override def empty: Renamed[T] = Renamed(Map.empty, Map.empty) + + override def combine(x: Renamed[T], y: Renamed[T]): Renamed[T] = + Renamed( + x.renames ++ y.renames, + x.renamed ++ y.renamed + ) + } + // TODO: Make this extension private somehow? extension [T](vals: Map[String, T]) { @@ -250,6 +264,115 @@ object ArrowInliner extends Logging { ) } + /** + * Correctly rename captured values and arrows of a function + * + * @param fn Function + * @param exports Exports state before calling/inlining + * @param arrows Arrows state before calling/inlining + * @return Renamed values and arrows + */ + def renameCaptured[S: Mangler]( + fn: FuncArrow, + exports: Map[String, ValueModel], + arrows: Map[String, FuncArrow] + ): State[S, (Renamed[ValueModel], Renamed[FuncArrow])] = { + // Gather abilities related values + val abilitiesValues = fn.capturedValues.collect { + // Gather only top level abilities + case (name, ValueModel.Ability(vm, at)) if vm.properties.isEmpty => + name -> ( + at, + /** + * Gather all values related to `name` + * NOTE: It is important that `capturedValues` are + * populated by all values related to ability `name` + * on creation of `FuncArrow`. + */ + Exports.gatherFrom( + name :: Nil, + fn.capturedValues + ) + ) + } + // Gather all abilities related names + val abilitiesValuesKeys = abilitiesValues.flatMap { case (_, (_, values)) => + values.keySet + } + + // Gather abilities related arrows + val abilitiesArrows = abilitiesValues.toList.foldMap { case (_, (_, values)) => + Arrows.arrowsByValues(fn.capturedArrows, values).toList + }.toMap + + // Gather all other values and arrows that are not related to abilities + val otherValues = fn.capturedValues -- abilitiesValuesKeys + val otherArrows = fn.capturedArrows -- abilitiesArrows.keySet + + for { + // Calculate renaming based on abilities + valuesRenamed <- abilitiesValues.toList.traverse { case (name, (at, values)) => + Mangler[S] + .findAndForbidName(name) + .map(rename => + // Get renaming map for this ability + AbilityType + .renames(at)(name, rename) + // Add ability rename too + .updated(name, rename) + ) + .map(renames => + // This code is HACKERY!!! + val valuesRenamed = values.renamed(renames).map { + /** + * `VarModel` is sometimes used to point to an arrow. + * So if it is renamed, we should rename the `VarModel` too. + * Otherwise renamed value will be resolved + * to previous name when trying to resolve the arrow. + * But this should be done only if the name in model + * is the same as the name of the export, + * because export could point to another arrow. + */ + case (name, ValueModel.Arrow(vm, _)) if renames.contains(vm.name) => + name -> vm.copy(name = name) + /** + * `VarModel` is used to point to an ability. + * So if it is renamed, we should rename the `VarModel` too. + * Otherwise renamed value will be resolved + * to previous name when trying to resolve the ability. + */ + case (name, ValueModel.Ability(vm, _)) => + name -> vm.copy(name = name) + case v => v + } + Renamed(renames, valuesRenamed) + ) + }.map(_.combineAll) + + // Rename arrows according to values + arrowsRenamed = Renamed( + valuesRenamed.renames.filterKeys(abilitiesArrows.keySet).toMap, + abilitiesArrows.renamed(valuesRenamed.renames) + ) + + // Rename values and arrows unrelated to abilities + otherValuesRenamed <- findNewNames(otherValues) + otherArrowsValues = Arrows.arrowsByValues( + otherArrows, + otherValues + ) + otherArrowsValuesRenamed = Renamed( + otherValuesRenamed.renames.filterKeys(otherArrowsValues.keySet).toMap, + otherArrowsValues.renamed(otherValuesRenamed.renames) + ) + + otherArrowsRenamed <- findNewNames(otherArrows -- otherArrowsValues.keySet) + + values = valuesRenamed |+| otherValuesRenamed + arrows = arrowsRenamed |+| otherArrowsValuesRenamed |+| otherArrowsRenamed + } yield values -> arrows + } + /** * Prepare the function and the context for inlining * @@ -283,26 +406,8 @@ object ArrowInliner extends Logging { arrowRenames = args.arrowArgsRenames abRenames = args.abilityArgsRenames - /** - * Find new names for captured values and arrows - * to avoid collisions, then resolve them in context. - */ - capturedValues <- findNewNames(fn.capturedValues) - /** - * If arrow correspond to a value, - * rename in accordingly to the value - */ - capturedArrowValues = Arrows.arrowsByValues( - fn.capturedArrows, - fn.capturedValues - ) - capturedArrowValuesRenamed = capturedArrowValues.renamed( - capturedValues.renames - ) - /** - * Rename arrows that are not values - */ - capturedArrows <- findNewNames(fn.capturedArrows -- capturedArrowValues.keySet) + captured <- renameCaptured(fn, exports, arrows) + (capturedValues, capturedArrows) = captured /** * Function defines variables inside its body. @@ -331,7 +436,7 @@ object ArrowInliner extends Logging { * It seems that resolving whole `exports` * and `arrows` is not necessary. */ - arrowsResolved = arrows ++ capturedArrowValuesRenamed ++ capturedArrows.renamed + arrowsResolved = arrows ++ capturedArrows.renamed exportsResolved = exports ++ data.renamed ++ capturedValues.renamed tree = fn.body.rename(renaming) diff --git a/model/inline/src/main/scala/aqua/model/inline/RawValueInliner.scala b/model/inline/src/main/scala/aqua/model/inline/RawValueInliner.scala index 3caba68fc..f677af973 100644 --- a/model/inline/src/main/scala/aqua/model/inline/RawValueInliner.scala +++ b/model/inline/src/main/scala/aqua/model/inline/RawValueInliner.scala @@ -1,22 +1,22 @@ package aqua.model.inline -import aqua.model.inline.state.{Arrows, Counter, Exports, Mangler} -import aqua.model.inline.Inline.MergeMode.* import aqua.model.* +import aqua.model.inline.Inline.MergeMode.* import aqua.model.inline.raw.* +import aqua.model.inline.state.{Arrows, Counter, Exports, Mangler} import aqua.raw.ops.* import aqua.raw.value.* import aqua.types.{ArrayType, LiteralType, OptionType, StreamType} import cats.Eval -import cats.syntax.traverse.* -import cats.syntax.monoid.* -import cats.syntax.functor.* -import cats.syntax.flatMap.* -import cats.syntax.apply.* -import cats.instances.list.* import cats.data.{Chain, State, StateT} +import cats.instances.list.* import cats.syntax.applicative.* +import cats.syntax.apply.* +import cats.syntax.flatMap.* +import cats.syntax.functor.* +import cats.syntax.monoid.* +import cats.syntax.traverse.* import scribe.Logging object RawValueInliner extends Logging { diff --git a/model/inline/src/main/scala/aqua/model/inline/raw/ApplyPropertiesRawInliner.scala b/model/inline/src/main/scala/aqua/model/inline/raw/ApplyPropertiesRawInliner.scala index 43df1e0fd..7327efc82 100644 --- a/model/inline/src/main/scala/aqua/model/inline/raw/ApplyPropertiesRawInliner.scala +++ b/model/inline/src/main/scala/aqua/model/inline/raw/ApplyPropertiesRawInliner.scala @@ -223,7 +223,7 @@ object ApplyPropertiesRawInliner extends RawInliner[ApplyPropertyRaw] with Loggi State.pure((vm, prevInline.mergeWith(optimizationInline, SeqMode))) ) { case (state, property) => state.flatMap { - case (vm @ Ability(_, at, _), leftInline) => + case (vm @ Ability(_, at), leftInline) => unfoldAbilityProperty(vm, at, property.raw).map { case (vm, inl) => ( vm, diff --git a/model/inline/src/main/scala/aqua/model/inline/raw/MakeAbilityRawInliner.scala b/model/inline/src/main/scala/aqua/model/inline/raw/MakeAbilityRawInliner.scala index a880bef9b..0eae5c105 100644 --- a/model/inline/src/main/scala/aqua/model/inline/raw/MakeAbilityRawInliner.scala +++ b/model/inline/src/main/scala/aqua/model/inline/raw/MakeAbilityRawInliner.scala @@ -1,12 +1,13 @@ package aqua.model.inline.raw +import aqua.model.ValueModel.Ability import aqua.model.inline.Inline import aqua.model.inline.RawValueInliner.unfold import aqua.model.inline.state.{Arrows, Exports, Mangler} import aqua.model.{SeqModel, ValueModel, VarModel} import aqua.raw.value.AbilityRaw import aqua.types.AbilityType -import aqua.model.ValueModel.Ability + import cats.Eval import cats.data.{Chain, IndexedStateT, NonEmptyMap, State} import cats.syntax.foldable.* @@ -19,9 +20,9 @@ object MakeAbilityRawInliner extends RawInliner[AbilityRaw] { fields: NonEmptyMap[String, (ValueModel, Inline)] ): State[S, Unit] = fields.toNel.traverse { - case (n, (Ability(abilityName, _, _), _)) => + case (n, (Ability(vm, _), _)) => val leftName = AbilityType.fullName(name, n) - Exports[S].copyWithAbilityPrefix(abilityName, leftName) + Exports[S].copyWithAbilityPrefix(vm.name, leftName) case (n, (vm, _)) => Exports[S].resolveAbilityField(name, n, vm) }.as(()) @@ -31,7 +32,7 @@ object MakeAbilityRawInliner extends RawInliner[AbilityRaw] { propertiesAllowed: Boolean ): State[S, (ValueModel, Inline)] = { for { - name <- Mangler[S].findAndForbidName(raw.abilityType.name + "_ab") + name <- Mangler[S].findAndForbidName(raw.abilityType.name + "_anon") foldedFields <- raw.fieldsAndArrows.nonEmptyTraverse(unfold(_)) varModel = VarModel(name, raw.baseType) valsInline = foldedFields.toList.foldMap { case (_, inline) => inline }.desugar diff --git a/model/inline/src/main/scala/aqua/model/inline/state/Arrows.scala b/model/inline/src/main/scala/aqua/model/inline/state/Arrows.scala index 1946b2550..e6f53efc3 100644 --- a/model/inline/src/main/scala/aqua/model/inline/state/Arrows.scala +++ b/model/inline/src/main/scala/aqua/model/inline/state/Arrows.scala @@ -1,14 +1,16 @@ package aqua.model.inline.state +import aqua.model.ValueModel import aqua.model.{ArgsCall, FuncArrow} import aqua.raw.arrow.FuncRaw -import aqua.model.ValueModel +import aqua.types.* import cats.data.State import cats.instances.list.* import cats.syntax.functor.* -import cats.syntax.traverse.* +import cats.syntax.option.* import cats.syntax.show.* +import cats.syntax.traverse.* /** * State algebra for resolved arrows @@ -31,9 +33,8 @@ trait Arrows[S] extends Scoped[S] { topology: Option[String] )(using Exports[S]): State[S, Unit] = for { - exps <- Exports[S].exports arrs <- arrows - capturedVars = exps.filterKeys(arrow.capturedVars).toMap + capturedVars <- Exports[S].gather(arrow.capturedVars.toSeq) capturedArrows = arrs.filterKeys(arrow.capturedVars).toMap ++ Arrows.arrowsByValues(arrs, capturedVars) funcArrow = FuncArrow.fromRaw(arrow, capturedArrows, capturedVars, topology) @@ -108,8 +109,8 @@ object Arrows { values: Map[String, ValueModel] ): Map[String, FuncArrow] = { val arrowKeys = arrows.keySet ++ arrows.values.map(_.funcName) - val varsKeys = values.keySet ++ values.values.collect { case ValueModel.Arrow(name, _) => - name + val varsKeys = values.keySet ++ values.values.collect { case ValueModel.Arrow(vm, _) => + vm.name } val keys = arrowKeys.intersect(varsKeys) diff --git a/model/inline/src/main/scala/aqua/model/inline/state/Exports.scala b/model/inline/src/main/scala/aqua/model/inline/state/Exports.scala index 489626060..b5d98d703 100644 --- a/model/inline/src/main/scala/aqua/model/inline/state/Exports.scala +++ b/model/inline/src/main/scala/aqua/model/inline/state/Exports.scala @@ -1,8 +1,8 @@ package aqua.model.inline.state -import aqua.model.{LiteralModel, ValueModel, VarModel} import aqua.model.ValueModel.Ability -import aqua.types.{AbilityType, NamedType} +import aqua.model.{LiteralModel, ValueModel, VarModel} +import aqua.types.{AbilityType, GeneralAbilityType, NamedType} import cats.data.{NonEmptyList, State} @@ -78,6 +78,9 @@ trait Exports[S] extends Scoped[S] { */ val exports: State[S, Map[String, ValueModel]] + final def gather(names: Seq[String]): State[S, Map[String, ValueModel]] = + exports.map(Exports.gatherFrom(names, _)) + /** * Change [[S]] to [[R]] */ @@ -125,6 +128,31 @@ trait Exports[S] extends Scoped[S] { object Exports { def apply[S](using exports: Exports[S]): Exports[S] = exports + /** + * Gather all the values that are related to the given names + * (ability fields) + * + * @param names names of variables + * @param state exports state + */ + def gatherFrom( + names: Seq[String], + state: Map[String, ValueModel] + ): Map[String, ValueModel] = { + val related = for { + variable <- names + exp <- state.get(variable).toList + at <- exp.`type` match { + case at: GeneralAbilityType => at :: Nil + case _ => Nil + } + field <- at.allFields.toNel.toList + (fieldName, _) = field + } yield AbilityType.fullName(variable, fieldName) + + state.filterKeys(names.toSet ++ related).toMap + } + // Get last linked VarModel def getLastValue(name: String, state: Map[String, ValueModel]): Option[ValueModel] = { state.get(name) match { @@ -166,9 +194,9 @@ object Exports { value: ValueModel ): State[Map[String, ValueModel], Unit] = State.modify { state => value match { - case Ability(name, at, property) if property.isEmpty => - val pairs = getAbilityPairs(name, exportName, at, state) - state ++ pairs.toList.toMap + case Ability(vm, at) if vm.properties.isEmpty => + val pairs = getAbilityPairs(vm.name, exportName, at, state) + state ++ pairs.toList.toMap + (exportName -> value) case _ => state + (exportName -> value) } } diff --git a/model/inline/src/test/scala/aqua/model/inline/ArrowInlinerSpec.scala b/model/inline/src/test/scala/aqua/model/inline/ArrowInlinerSpec.scala index 124c07c85..786f479cc 100644 --- a/model/inline/src/test/scala/aqua/model/inline/ArrowInlinerSpec.scala +++ b/model/inline/src/test/scala/aqua/model/inline/ArrowInlinerSpec.scala @@ -3,20 +3,21 @@ package aqua.model.inline import aqua.model.* import aqua.model.MetaModel.CallArrowModel import aqua.model.inline.state.InliningState +import aqua.raw.arrow.{ArrowRaw, FuncRaw} import aqua.raw.ops.* import aqua.raw.value.* -import aqua.types.* import aqua.raw.value.{CallArrowRaw, ValueRaw} -import aqua.raw.arrow.{ArrowRaw, FuncRaw} +import aqua.types.* + import cats.Eval -import cats.syntax.show.* -import cats.syntax.option.* -import cats.syntax.flatMap.* -import cats.free.Cofree import cats.data.{Chain, NonEmptyList, NonEmptyMap} +import cats.free.Cofree +import cats.syntax.flatMap.* +import cats.syntax.option.* +import cats.syntax.show.* +import org.scalatest.Inside import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers -import org.scalatest.Inside class ArrowInlinerSpec extends AnyFlatSpec with Matchers with Inside { @@ -266,8 +267,8 @@ class ArrowInlinerSpec extends AnyFlatSpec with Matchers with Inside { val model = callFuncModel(newFunc) - val restrictionName = model.collect { - case RestrictionModel(name, _) => name + val restrictionName = model.collect { case RestrictionModel(name, _) => + name }.headOption restrictionName shouldBe Some(someStr.name) @@ -2610,8 +2611,6 @@ class ArrowInlinerSpec extends AnyFlatSpec with Matchers with Inside { .runA(InliningState()) .value - // TODO: Don't know for what to test here - // inliner will just log an error in case of failure model.head should not equal EmptyModel } diff --git a/model/raw/src/main/scala/aqua/raw/ops/RawTag.scala b/model/raw/src/main/scala/aqua/raw/ops/RawTag.scala index 8d4cc2a9c..347a74987 100644 --- a/model/raw/src/main/scala/aqua/raw/ops/RawTag.scala +++ b/model/raw/src/main/scala/aqua/raw/ops/RawTag.scala @@ -345,6 +345,9 @@ case class ServiceIdTag( override def exportsVarNames: Set[String] = Set(name) + override def renameExports(map: Map[String, String]): RawTag = + copy(name = map.getOrElse(name, name)) + override def mapValues(f: ValueRaw => ValueRaw): RawTag = ServiceIdTag(value.map(f), serviceType, name) } diff --git a/model/raw/src/main/scala/aqua/raw/ops/RawTagGivens.scala b/model/raw/src/main/scala/aqua/raw/ops/RawTagGivens.scala index 2738adb59..b0ff46f3d 100644 --- a/model/raw/src/main/scala/aqua/raw/ops/RawTagGivens.scala +++ b/model/raw/src/main/scala/aqua/raw/ops/RawTagGivens.scala @@ -2,13 +2,13 @@ package aqua.raw.ops import aqua.raw.value.{LiteralRaw, ValueRaw} -import cats.free.Cofree import cats.data.Chain -import cats.{Eval, Semigroup} +import cats.free.Cofree +import cats.syntax.all.* import cats.syntax.apply.* -import cats.syntax.semigroup.* import cats.syntax.foldable.* -import cats.syntax.all.* +import cats.syntax.semigroup.* +import cats.{Eval, Semigroup} trait RawTagGivens { diff --git a/model/src/main/scala/aqua/model/AquaContext.scala b/model/src/main/scala/aqua/model/AquaContext.scala index e057c36fa..9be943798 100644 --- a/model/src/main/scala/aqua/model/AquaContext.scala +++ b/model/src/main/scala/aqua/model/AquaContext.scala @@ -2,23 +2,23 @@ package aqua.model import aqua.raw.arrow.FuncRaw import aqua.raw.ops.CallArrowRawTag -import aqua.raw.value.ValueRaw import aqua.raw.value.CallArrowRaw +import aqua.raw.value.ValueRaw import aqua.raw.{ConstantRaw, RawContext, RawPart, ServiceRaw, TypeRaw} import aqua.types.{AbilityType, StructType, Type} import cats.Monoid -import cats.data.NonEmptyMap import cats.data.Chain +import cats.data.NonEmptyMap import cats.kernel.Semigroup -import cats.syntax.functor.* -import cats.syntax.foldable.* -import cats.syntax.traverse.* import cats.syntax.bifunctor.* +import cats.syntax.foldable.* +import cats.syntax.functor.* import cats.syntax.monoid.* import cats.syntax.option.* -import scribe.Logging +import cats.syntax.traverse.* import scala.collection.immutable.SortedMap +import scribe.Logging case class AquaContext( module: Option[String], diff --git a/model/src/main/scala/aqua/model/ArgsCall.scala b/model/src/main/scala/aqua/model/ArgsCall.scala index 130c06d9b..3a6d15236 100644 --- a/model/src/main/scala/aqua/model/ArgsCall.scala +++ b/model/src/main/scala/aqua/model/ArgsCall.scala @@ -1,7 +1,7 @@ package aqua.model -import aqua.model.{ValueModel, VarModel} import aqua.model.ValueModel.Ability +import aqua.model.{ValueModel, VarModel} import aqua.raw.ops.Call import aqua.raw.value.{ValueRaw, VarRaw} import aqua.types.* @@ -44,7 +44,7 @@ case class ArgsCall(args: ProductType, callWith: List[ValueModel]) { * Name of argument -> (variable passed in the call, type) */ lazy val abilityArgs: Map[String, (VarModel, NamedType)] = - zipped.collect { case ((name, _), vr @ Ability(_, t, _)) => + zipped.collect { case ((name, _), Ability(vr, t)) => name -> (vr, t) }.toMap @@ -54,14 +54,7 @@ case class ArgsCall(args: ProductType, callWith: List[ValueModel]) { */ lazy val abilityArgsRenames: Map[String, String] = abilityArgs.toList.foldMap { case (name, (vm, at)) => - at.arrows.keys - .map(arrowPath => - val fullName = AbilityType.fullName(name, arrowPath) - val newFullName = AbilityType.fullName(vm.name, arrowPath) - fullName -> newFullName - ) - .toMap - .updated(name, vm.name) + AbilityType.renames(at)(name, vm.name) } /** diff --git a/model/src/main/scala/aqua/model/CallModel.scala b/model/src/main/scala/aqua/model/CallModel.scala index 7efb92491..8912037bc 100644 --- a/model/src/main/scala/aqua/model/CallModel.scala +++ b/model/src/main/scala/aqua/model/CallModel.scala @@ -1,20 +1,19 @@ package aqua.model -import aqua.raw.ops.Call -import aqua.types.{ArrowType, NamedType, Type} import aqua.model.ValueModel.{Ability, Arrow} +import aqua.raw.ops.Call +import aqua.types.* // TODO docs case class CallModel(args: List[ValueModel], exportTo: List[CallModel.Export]) { override def toString: String = s"[${args.mkString(" ")}] ${exportTo.mkString(" ")}" def arrowArgNames: Set[String] = args.collect { case Arrow(m, _) => - m + m.name }.toSet - def abilityArgs: List[(String, NamedType)] = args.collect { case Ability(m, t, _) => - (m, t) - } + def abilityArgs: List[(String, GeneralAbilityType)] = + args.collect { case Ability(m, t) => m.name -> t } def usesVarNames: Set[String] = args.flatMap(_.usesVarNames).toSet } diff --git a/model/src/main/scala/aqua/model/ValueModel.scala b/model/src/main/scala/aqua/model/ValueModel.scala index 79a601ed1..636ef06dd 100644 --- a/model/src/main/scala/aqua/model/ValueModel.scala +++ b/model/src/main/scala/aqua/model/ValueModel.scala @@ -56,20 +56,20 @@ object ValueModel { object Arrow { - def unapply(vm: ValueModel): Option[(String, ArrowType)] = + def unapply(vm: ValueModel): Option[(VarModel, ArrowType)] = vm match { - case VarModel(name, t: ArrowType, _) => - (name, t).some + case vm @ VarModel(_, t: ArrowType, _) => + (vm, t).some case _ => none } } object Ability { - def unapply(vm: VarModel): Option[(String, NamedType, Chain[PropertyModel])] = + def unapply(vm: VarModel): Option[(VarModel, GeneralAbilityType)] = vm match { - case VarModel(name, t: (AbilityType | ServiceType), properties) => - (name, t, properties).some + case vm @ VarModel(_, t: GeneralAbilityType, _) => + (vm, t).some case _ => none } } diff --git a/parser/src/main/scala/aqua/parser/lexer/ValueToken.scala b/parser/src/main/scala/aqua/parser/lexer/ValueToken.scala index a4a818ad7..98152a097 100644 --- a/parser/src/main/scala/aqua/parser/lexer/ValueToken.scala +++ b/parser/src/main/scala/aqua/parser/lexer/ValueToken.scala @@ -2,23 +2,23 @@ package aqua.parser.lexer import aqua.parser.Expr import aqua.parser.head.FilenameExpr -import aqua.parser.lexer.Token.* import aqua.parser.lexer.NamedArg.namedArgs +import aqua.parser.lexer.Token.* import aqua.parser.lift.LiftParser import aqua.parser.lift.LiftParser.* -import aqua.types.LiteralType import aqua.parser.lift.Span import aqua.parser.lift.Span.{P0ToSpan, PToSpan, S} +import aqua.types.LiteralType +import cats.arrow.FunctionK +import cats.data.{NonEmptyList, NonEmptyMap} import cats.parse.{Numbers, Parser as P, Parser0 as P0} import cats.syntax.comonad.* -import cats.syntax.functor.* -import cats.{~>, Comonad, Functor} -import cats.data.{NonEmptyList, NonEmptyMap} import cats.syntax.foldable.* -import cats.arrow.FunctionK -import cats.syntax.traverse.* +import cats.syntax.functor.* import cats.syntax.option.* +import cats.syntax.traverse.* +import cats.{Comonad, Functor, ~>} sealed trait ValueToken[F[_]] extends Token[F] { def mapK[K[_]: Comonad](fk: F ~> K): ValueToken[K] @@ -42,38 +42,6 @@ case class PropertyToken[F[_]: Comonad]( private def isConst(name: String): Boolean = name.forall(c => !c.isLetter || c.isUpper) - /** - * This method tries to convert property token to - * call arrow token. - * - * Next properties pattern is transformed: - * (Class)+ arrow() - * ^^^^^^^ - * this part is transformed to ability name. - */ - private def toCallArrow: Option[CallArrowToken[F]] = value match { - case VarToken(name) => - val ability = properties.init.traverse { - case f @ IntoField(_) => f.value.some - case _ => none - }.map( - name.value +: _ - ).filter( - _.forall(isClass) - ).map(props => name.rename(props.mkString("."))) - - (properties.last, ability) match { - case (IntoArrow(funcName, args), Some(ability)) => - CallArrowToken( - ability.asTypeToken.some, - funcName, - args - ).some - case _ => none - } - case _ => none - } - /** * This method tries to convert property token to * property token with dotted var name inside value token. @@ -134,6 +102,38 @@ case class PropertyToken[F[_]: Comonad]( case _ => none } + /** + * This method tries to convert property token to + * call arrow token. + * + * Next properties pattern is transformed: + * (Class)+ arrow() + * ^^^^^^^ + * this part is transformed to ability name. + */ + private def toCallArrow: Option[CallArrowToken[F]] = value match { + case VarToken(name) => + val ability = properties.init.traverse { + case f @ IntoField(_) => f.value.some + case _ => none + }.map( + name.value +: _ + ).filter( + _.forall(isClass) + ).map(props => name.rename(props.mkString("."))) + + (properties.last, ability) match { + case (IntoArrow(funcName, args), Some(ability)) => + CallArrowToken( + ability.asTypeToken.some, + funcName, + args + ).some + case _ => none + } + case _ => none + } + /** * This is a hacky method to adjust parsing result * to format that was used previously. @@ -145,6 +145,12 @@ case class PropertyToken[F[_]: Comonad]( */ def adjust: Option[ValueToken[F]] = toCallArrow.orElse(toDottedName) + + lazy val leadingName: Option[NamedTypeToken[F]] = + value match { + case VarToken(name) => name.asTypeToken.some + case _ => none + } } object PropertyToken { diff --git a/semantics/src/main/scala/aqua/semantics/CompilerState.scala b/semantics/src/main/scala/aqua/semantics/CompilerState.scala index 84c57a4dd..1ea492121 100644 --- a/semantics/src/main/scala/aqua/semantics/CompilerState.scala +++ b/semantics/src/main/scala/aqua/semantics/CompilerState.scala @@ -6,10 +6,10 @@ import aqua.raw.RawContext import aqua.semantics.rules.abilities.AbilitiesState import aqua.semantics.rules.definitions.DefinitionsState import aqua.semantics.rules.locations.LocationsState -import aqua.semantics.rules.names.NamesState -import aqua.semantics.rules.types.TypesState import aqua.semantics.rules.mangler.ManglerState +import aqua.semantics.rules.names.NamesState import aqua.semantics.rules.report.ReportState +import aqua.semantics.rules.types.TypesState import cats.Semigroup import cats.data.{Chain, State} diff --git a/semantics/src/main/scala/aqua/semantics/RawSemantics.scala b/semantics/src/main/scala/aqua/semantics/RawSemantics.scala index 2d2ea61bf..922823dba 100644 --- a/semantics/src/main/scala/aqua/semantics/RawSemantics.scala +++ b/semantics/src/main/scala/aqua/semantics/RawSemantics.scala @@ -1,31 +1,31 @@ package aqua.semantics import aqua.errors.Errors.internalError +import aqua.parser.lexer.{LiteralToken, Token} +import aqua.parser.{Ast, Expr} import aqua.raw.ops.* +import aqua.raw.{Raw, RawContext, RawPart} +import aqua.semantics.header.Picker +import aqua.semantics.header.Picker.* import aqua.semantics.rules.abilities.{AbilitiesAlgebra, AbilitiesInterpreter, AbilitiesState} import aqua.semantics.rules.definitions.{DefinitionsAlgebra, DefinitionsInterpreter} import aqua.semantics.rules.locations.{DummyLocationsInterpreter, LocationsAlgebra} -import aqua.semantics.rules.names.{NamesAlgebra, NamesInterpreter} import aqua.semantics.rules.mangler.{ManglerAlgebra, ManglerInterpreter} -import aqua.semantics.rules.types.{TypesAlgebra, TypesInterpreter} +import aqua.semantics.rules.names.{NamesAlgebra, NamesInterpreter} import aqua.semantics.rules.report.{ReportAlgebra, ReportInterpreter} -import aqua.semantics.header.Picker -import aqua.semantics.header.Picker.* -import aqua.raw.{Raw, RawContext, RawPart} -import aqua.parser.{Ast, Expr} -import aqua.parser.lexer.{LiteralToken, Token} +import aqua.semantics.rules.types.{TypesAlgebra, TypesInterpreter} -import cats.{Eval, Monad} import cats.data.{Chain, EitherT, NonEmptyChain, State, StateT, ValidatedNec, Writer} import cats.syntax.applicative.* -import cats.syntax.option.* import cats.syntax.apply.* import cats.syntax.flatMap.* -import cats.syntax.functor.* import cats.syntax.foldable.* +import cats.syntax.functor.* +import cats.syntax.option.* import cats.syntax.reducible.* -import cats.syntax.traverse.* import cats.syntax.semigroup.* +import cats.syntax.traverse.* +import cats.{Eval, Monad} import scribe.Logging class RawSemantics[S[_]](using diff --git a/semantics/src/main/scala/aqua/semantics/expr/func/CallArrowSem.scala b/semantics/src/main/scala/aqua/semantics/expr/func/CallArrowSem.scala index 44f4ad1f4..6d291d130 100644 --- a/semantics/src/main/scala/aqua/semantics/expr/func/CallArrowSem.scala +++ b/semantics/src/main/scala/aqua/semantics/expr/func/CallArrowSem.scala @@ -10,14 +10,15 @@ import aqua.semantics.rules.ValuesAlgebra import aqua.semantics.rules.names.NamesAlgebra import aqua.semantics.rules.types.TypesAlgebra import aqua.types.{ProductType, StreamType, Type} + import cats.Monad -import cats.syntax.flatMap.* -import cats.syntax.functor.* -import cats.syntax.traverse.* -import cats.syntax.option.* import cats.syntax.applicative.* import cats.syntax.apply.* import cats.syntax.comonad.* +import cats.syntax.flatMap.* +import cats.syntax.functor.* +import cats.syntax.option.* +import cats.syntax.traverse.* class CallArrowSem[S[_]](val expr: CallArrowExpr[S]) extends AnyVal { diff --git a/semantics/src/main/scala/aqua/semantics/expr/func/ReturnSem.scala b/semantics/src/main/scala/aqua/semantics/expr/func/ReturnSem.scala index 4a42283ee..9f233b1b1 100644 --- a/semantics/src/main/scala/aqua/semantics/expr/func/ReturnSem.scala +++ b/semantics/src/main/scala/aqua/semantics/expr/func/ReturnSem.scala @@ -1,34 +1,37 @@ package aqua.semantics.expr.func -import aqua.raw.ops.ReturnTag +import aqua.helpers.syntax.optiont.* import aqua.parser.expr.func.ReturnExpr import aqua.raw.Raw +import aqua.raw.ops.ReturnTag import aqua.semantics.Prog import aqua.semantics.rules.ValuesAlgebra import aqua.semantics.rules.types.TypesAlgebra + +import cats.Monad +import cats.data.{NonEmptyList, OptionT} import cats.syntax.applicative.* +import cats.syntax.flatMap.* import cats.syntax.functor.* import cats.syntax.traverse.* -import cats.data.NonEmptyList -import cats.syntax.flatMap.* -import cats.Monad class ReturnSem[S[_]](val expr: ReturnExpr[S]) extends AnyVal { - def program[Alg[_]: Monad](implicit + def program[Alg[_]: Monad](using V: ValuesAlgebra[S, Alg], T: TypesAlgebra[S, Alg] - ): Prog[Alg, Raw] = - expr.values - .traverse(v => V.valueToRaw(v).map(_.map(v -> _))) - .map(_.sequence) - .flatMap { - case Some(vals) => - T.checkArrowReturn(vals).map[Raw] { - case true => ReturnTag(vals.map(_._2)).leaf.toFuncOp - case false => Raw.error("Return types validation failed") - } - case None => - Raw.error("Return types resolution failed").pure[Alg] - } + ): Prog[Alg, Raw] = ( + for { + vals <- expr.values.traverse(v => + OptionT( + V.valueToRaw(v) + ).map(v -> _) + ) + _ <- OptionT.withFilterF( + T.checkArrowReturn(vals) + ) + } yield ReturnTag(vals.map(_._2)).leaf.toFuncOp + ).getOrElse( + Raw.error("Return values validation failed") + ) } diff --git a/semantics/src/main/scala/aqua/semantics/header/HeaderHandler.scala b/semantics/src/main/scala/aqua/semantics/header/HeaderHandler.scala index 68a2672fb..4cadd52bf 100644 --- a/semantics/src/main/scala/aqua/semantics/header/HeaderHandler.scala +++ b/semantics/src/main/scala/aqua/semantics/header/HeaderHandler.scala @@ -12,13 +12,13 @@ import cats.free.Cofree import cats.instances.list.* import cats.instances.option.* import cats.kernel.Semigroup -import cats.syntax.option.* +import cats.syntax.apply.* +import cats.syntax.bifunctor.* import cats.syntax.foldable.* import cats.syntax.functor.* +import cats.syntax.option.* import cats.syntax.semigroup.* import cats.syntax.validated.* -import cats.syntax.bifunctor.* -import cats.syntax.apply.* import cats.{Comonad, Eval, Monoid} class HeaderHandler[S[_]: Comonad, C](using diff --git a/semantics/src/main/scala/aqua/semantics/rules/ValuesAlgebra.scala b/semantics/src/main/scala/aqua/semantics/rules/ValuesAlgebra.scala index 30429a31d..3753c3b1f 100644 --- a/semantics/src/main/scala/aqua/semantics/rules/ValuesAlgebra.scala +++ b/semantics/src/main/scala/aqua/semantics/rules/ValuesAlgebra.scala @@ -100,20 +100,33 @@ class ValuesAlgebra[S[_], Alg[_]: Monad](using } case prop @ PropertyToken(value, properties) => - prop.adjust.fold( - for { - valueRaw <- valueToRaw(value) - result <- valueRaw.flatTraverse(raw => - properties - .foldLeftM(raw) { case (prev, op) => - OptionT( - resolveSingleProperty(prev.`type`, op) - ).map(prop => ApplyPropertyRaw(prev, prop)) - } - .value + lazy val default = for { + valueRaw <- valueToRaw(value) + result <- valueRaw.flatTraverse(raw => + properties + .foldLeftM(raw) { case (prev, op) => + OptionT( + resolveSingleProperty(prev.`type`, op) + ).map(prop => ApplyPropertyRaw(prev, prop)) + } + .value + ) + } yield result + + /** + * This is a HACKERY!!! + * Imports have very different mechanism of resolving, + * so here we try to differentiate them and adjust property + * token accordingly. + */ + prop.leadingName.fold(default)(name => + A.isDefinedAbility(name) + .flatMap(isDefined => + prop.adjust + .filter(_ => isDefined) + .fold(default)(valueToRaw) ) - } yield result - )(valueToRaw) + ) case dvt @ NamedValueToken(typeName, fields) => (for { @@ -384,7 +397,7 @@ class ValuesAlgebra[S[_], Alg[_]: Monad](using ): OptionT[Alg, CallArrowRaw] = { lazy val nameTypeFromAbility = OptionT( N.read(ab.asName, mustBeDefined = false) - ).collect { case nt: (AbilityType | ServiceType) => ab.asName -> nt } + ).collect { case nt: GeneralAbilityType => ab.asName -> nt } lazy val nameTypeFromService = for { st <- OptionT( diff --git a/semantics/src/main/scala/aqua/semantics/rules/abilities/AbilitiesAlgebra.scala b/semantics/src/main/scala/aqua/semantics/rules/abilities/AbilitiesAlgebra.scala index 59996224d..175ba69c6 100644 --- a/semantics/src/main/scala/aqua/semantics/rules/abilities/AbilitiesAlgebra.scala +++ b/semantics/src/main/scala/aqua/semantics/rules/abilities/AbilitiesAlgebra.scala @@ -15,6 +15,8 @@ trait AbilitiesAlgebra[S[_], Alg[_]] { defaultId: Option[ValueRaw] ): Alg[Boolean] + def isDefinedAbility(name: NamedTypeToken[S]): Alg[Boolean] + def getArrow(name: NamedTypeToken[S], arrow: Name[S]): Alg[Option[ArrowType]] def renameService(name: NamedTypeToken[S]): Alg[Option[String]] diff --git a/semantics/src/main/scala/aqua/semantics/rules/abilities/AbilitiesInterpreter.scala b/semantics/src/main/scala/aqua/semantics/rules/abilities/AbilitiesInterpreter.scala index 7cfcbb129..7f3038700 100644 --- a/semantics/src/main/scala/aqua/semantics/rules/abilities/AbilitiesInterpreter.scala +++ b/semantics/src/main/scala/aqua/semantics/rules/abilities/AbilitiesInterpreter.scala @@ -4,19 +4,19 @@ import aqua.parser.lexer.{Name, NamedTypeToken, Token, ValueToken} import aqua.raw.value.ValueRaw import aqua.raw.{RawContext, ServiceRaw} import aqua.semantics.Levenshtein -import aqua.semantics.rules.report.ReportAlgebra -import aqua.semantics.rules.mangler.ManglerAlgebra import aqua.semantics.rules.locations.LocationsAlgebra -import aqua.semantics.rules.{abilities, StackInterpreter} +import aqua.semantics.rules.mangler.ManglerAlgebra +import aqua.semantics.rules.report.ReportAlgebra +import aqua.semantics.rules.{StackInterpreter, abilities} import aqua.types.{ArrowType, ServiceType} import cats.data.{NonEmptyMap, State} -import cats.syntax.functor.* +import cats.syntax.applicative.* import cats.syntax.apply.* import cats.syntax.foldable.* -import cats.syntax.traverse.* -import cats.syntax.applicative.* +import cats.syntax.functor.* import cats.syntax.option.* +import cats.syntax.traverse.* import monocle.Lens import monocle.macros.GenLens @@ -69,6 +69,9 @@ class AbilitiesInterpreter[S[_], X](using locations.pointTokenWithFieldLocation(name.value, name, arrow.value, arrow) } + override def isDefinedAbility(name: NamedTypeToken[S]): State[X, Boolean] = + getState.map(_.abilities.contains(name.value)) + override def getArrow(name: NamedTypeToken[S], arrow: Name[S]): SX[Option[ArrowType]] = getAbility(name.value).flatMap { case Some(abCtx) => diff --git a/semantics/src/main/scala/aqua/semantics/rules/names/NamesState.scala b/semantics/src/main/scala/aqua/semantics/rules/names/NamesState.scala index 873c6b107..3d62cbcf1 100644 --- a/semantics/src/main/scala/aqua/semantics/rules/names/NamesState.scala +++ b/semantics/src/main/scala/aqua/semantics/rules/names/NamesState.scala @@ -3,6 +3,7 @@ package aqua.semantics.rules.names import aqua.parser.lexer.{Name, Token} import aqua.raw.RawContext import aqua.types.{ArrowType, Type} + import cats.kernel.Monoid import cats.syntax.functor.* @@ -64,4 +65,5 @@ object NamesState { }, constants = context.allValues.map { case (s, vm) => (s, vm.`type`) } ) + } diff --git a/semantics/src/main/scala/aqua/semantics/rules/types/TypesInterpreter.scala b/semantics/src/main/scala/aqua/semantics/rules/types/TypesInterpreter.scala index 00fd16784..7df3cddd5 100644 --- a/semantics/src/main/scala/aqua/semantics/rules/types/TypesInterpreter.scala +++ b/semantics/src/main/scala/aqua/semantics/rules/types/TypesInterpreter.scala @@ -209,27 +209,32 @@ class TypesInterpreter[S[_], X](using arguments: List[ValueRaw] ): State[X, Option[PropertyRaw]] = { rootT match { - case AbilityType(name, fieldsAndArrows) => - fieldsAndArrows(op.name.value).fold( - report - .error( - op, - s"Arrow `${op.name.value}` not found in type `$name`, available: ${fieldsAndArrows.toNel.toList.map(_._1).mkString(", ")}" - ) - .as(None) - ) { - case at @ ArrowType(_, _) => - locations - .pointFieldLocation(name, op.name.value, op) - .as(Some(IntoArrowRaw(op.name.value, at, arguments))) - case _ => + case ab: GeneralAbilityType => + val name = ab.name + val fields = ab.fields + lazy val fieldNames = fields.toNel.toList.map(_._1).mkString(", ") + fields(op.name.value) + .fold( report .error( op, - s"Unexpected. `${op.name.value}` must be an arrow." + s"Arrow `${op.name.value}` not found in type `$name`, " + + s"available: $fieldNames" ) .as(None) - } + ) { + case at @ ArrowType(_, _) => + locations + .pointFieldLocation(name, op.name.value, op) + .as(Some(IntoArrowRaw(op.name.value, at, arguments))) + case _ => + report + .error( + op, + s"Unexpected. `${op.name.value}` must be an arrow." + ) + .as(None) + } case t => t.properties .get(op.name.value) @@ -348,7 +353,8 @@ class TypesInterpreter[S[_], X](using report .error( token, - s"Number of fields doesn't match the data type, expected: $expected, given: $givenType" + "Number of fields doesn't match, " + + s"expected: $expected, given: $givenType" ) .as(false) } else { @@ -544,28 +550,34 @@ class TypesInterpreter[S[_], X](using report .error( values.head._1, - "Return expression was already used in scope; you can use only one Return in an arrow declaration, use conditional return pattern if you need to return based on condition" + "Return expression was already used in scope; " + + "you can use only one Return in an arrow declaration, " + + "use conditional return pattern if you need to return based on condition" ) .as(frame -> false) else if (frame.token.res.isEmpty) report .error( values.head._1, - "No return type declared for this arrow, please remove `<- ...` expression or add `-> ...` return type(s) declaration to the arrow" + "No return type declared for this arrow, " + + "please remove `<- ...` expression " + + "or add `-> ...` return type(s) declaration to the arrow" ) .as(frame -> false) else if (frame.token.res.length > values.length) report .error( values.last._1, - s"Expected ${frame.token.res.length - values.length} more values to be returned, see return type declaration" + s"Expected ${frame.token.res.length - values.length} more " + + s"values to be returned, see return type declaration" ) .as(frame -> false) else if (frame.token.res.length < values.length) report .error( values.toList.drop(frame.token.res.length).headOption.getOrElse(values.last)._1, - s"Too many values are returned from this arrow, this one is unexpected. Defined return type: ${frame.arrowType.codomain}" + s"Too many values are returned from this arrow, " + + s"this is unexpected. Defined return type: ${frame.arrowType.codomain}" ) .as(frame -> false) else diff --git a/semantics/src/test/scala/aqua/semantics/SemanticsSpec.scala b/semantics/src/test/scala/aqua/semantics/SemanticsSpec.scala index 6d7b01339..2da479bb9 100644 --- a/semantics/src/test/scala/aqua/semantics/SemanticsSpec.scala +++ b/semantics/src/test/scala/aqua/semantics/SemanticsSpec.scala @@ -13,7 +13,7 @@ import aqua.types.* import cats.Eval import cats.data.State import cats.data.Validated -import cats.data.{Chain, EitherNec, NonEmptyChain} +import cats.data.{Chain, EitherNec, NonEmptyChain, NonEmptyMap} import cats.free.Cofree import cats.syntax.foldable.* import cats.syntax.option.* @@ -102,18 +102,29 @@ class SemanticsSpec extends AnyFlatSpec with Matchers with Inside { | """.stripMargin - def testServiceCallStr(str: String) = - CallArrowRawTag - .ability( - abilityName = "Test", - funcName = "testCallStr", - call = Call(LiteralRaw.quote(str) :: Nil, Nil), - arrowType = ArrowType( - ProductType.labelled(("s" -> ScalarType.string) :: Nil), - ProductType(ScalarType.string :: Nil) - ) + def testServiceCallStr(str: String) = { + val arrowType = ArrowType( + ProductType.labelled(("s" -> ScalarType.string) :: Nil), + ProductType(ScalarType.string :: Nil) + ) + + CallArrowRawTag( + Nil, + ApplyPropertyRaw( + VarRaw( + "Test", + ServiceType( + "Test", + NonEmptyMap.of( + "testCallStr" -> arrowType, + "testCall" -> ArrowType(NilType, NilType) + ) + ) + ), + IntoArrowRaw("testCallStr", arrowType, LiteralRaw.quote(str) :: Nil) ) - .leaf + ).leaf + } def equ(left: ValueRaw, right: ValueRaw): ApplyBinaryOpRaw = ApplyBinaryOpRaw(ApplyBinaryOpRaw.Op.Eq, left, right, ScalarType.bool) @@ -152,14 +163,21 @@ class SemanticsSpec extends AnyFlatSpec with Matchers with Inside { insideBody(script) { body => val arrowType = ArrowType(NilType, ConsType.cons(ScalarType.string, NilType)) - val serviceCall = CallArrowRawTag - .ability( - abilityName = "A", - funcName = "fn1", - call = emptyCall, - arrowType = arrowType + val serviceCall = CallArrowRawTag( + Nil, + ApplyPropertyRaw( + VarRaw( + "A", + ServiceType( + "A", + NonEmptyMap.of( + "fn1" -> arrowType + ) + ) + ), + IntoArrowRaw("fn1", arrowType, Nil) ) - .leaf + ).leaf val expected = ParTag.wrap( @@ -880,13 +898,14 @@ class SemanticsSpec extends AnyFlatSpec with Matchers with Inside { |""".stripMargin insideBody(script) { body => - matchSubtree(body) { case (CallArrowRawTag(_, ca: CallArrowRaw), _) => - inside(ca.arguments) { case (c: CollectionRaw) :: Nil => - c.values.exists { - case VarRaw(name, _) => name == "stream" - case _ => false - } should be(true) - } + matchSubtree(body) { + case (CallArrowRawTag(_, ApplyPropertyRaw(_, IntoArrowRaw("consume", _, args))), _) => + inside(args) { case (c: CollectionRaw) :: Nil => + c.values.exists { + case VarRaw(name, _) => name == "stream" + case _ => false + } should be(true) + } } } } diff --git a/types/src/main/scala/aqua/types/CompareTypes.scala b/types/src/main/scala/aqua/types/CompareTypes.scala index 3dc276b72..1b7a293a0 100644 --- a/types/src/main/scala/aqua/types/CompareTypes.scala +++ b/types/src/main/scala/aqua/types/CompareTypes.scala @@ -124,7 +124,8 @@ object CompareTypes { case (x: OptionType, y: StreamType) => apply(x.element, y.element) case (x: OptionType, y: ArrayType) => apply(x.element, y.element) case (x: StreamType, y: StreamType) => apply(x.element, y.element) - case (lnt: AbilityType, rnt: AbilityType) => compareNamed(lnt.fields, rnt.fields) + case (lnt: GeneralAbilityType, rnt: GeneralAbilityType) => + compareNamed(lnt.fields, rnt.fields) case (lnt: StructType, rnt: StructType) => compareNamed(lnt.fields, rnt.fields) // Literals and scalars diff --git a/types/src/main/scala/aqua/types/Type.scala b/types/src/main/scala/aqua/types/Type.scala index 5df10c500..c9b0bfc63 100644 --- a/types/src/main/scala/aqua/types/Type.scala +++ b/types/src/main/scala/aqua/types/Type.scala @@ -1,6 +1,7 @@ package aqua.types import aqua.errors.Errors.internalError +import aqua.types.* import aqua.types.Type.* import cats.Eval @@ -8,9 +9,11 @@ import cats.PartialOrder import cats.data.NonEmptyList import cats.data.NonEmptyMap import cats.syntax.applicative.* +import cats.syntax.foldable.* import cats.syntax.option.* import cats.syntax.partialOrder.* import cats.syntax.traverse.* +import scala.collection.immutable.SortedMap sealed trait Type { @@ -340,72 +343,64 @@ sealed trait NamedType extends Type { def fields: NonEmptyMap[String, Type] /** - * Get all arrows defined in this type and its sub-abilities. - * Paths to arrows are returned **without** type name + * Get all fields defined in this type and its fields of named type. + * Paths to fields are returned **without** type name * to allow renaming on call site. */ - lazy val arrows: Map[String, ArrowType] = { - def getArrowsEval(path: Option[String], nt: NamedType): Eval[List[(String, ArrowType)]] = - nt.fields.toNel.toList.flatTraverse { - // sub-arrows could be in abilities or services - case (innerName, innerType: (ServiceType | AbilityType)) => - val newPath = path.fold(innerName)(AbilityType.fullName(_, innerName)) - getArrowsEval(newPath.some, innerType) - case (aName, aType: ArrowType) => - val newPath = path.fold(aName)(AbilityType.fullName(_, aName)) - List(newPath -> aType).pure - case _ => Nil.pure + final def allFields: NonEmptyMap[String, Type] = { + def allEval(path: Option[String], nt: NamedType): Eval[List[(String, Type)]] = { + val qualified = (name: String) => path.fold(name)(AbilityType.fullName(_, name)) + val fieldsList = nt.fields.toNel.toList + val currentFields = fieldsList.map { case (name, t) => + qualified(name) -> t } + fieldsList.flatTraverse { + case (name, t: NamedType) => + allEval(qualified(name).some, t) + case _ => Eval.now(Nil) + }.map(currentFields ++ _) + } - getArrowsEval(None, this).value.toMap + allEval(none, this) + .map(l => + /** + * As fields are `NonEmptyMap`, this + * operation should be safe + */ + NonEmptyMap.fromMapUnsafe(SortedMap.from(l)) + ) + .value } + /** + * Get all arrows defined in this type and its sub-abilities. + * Paths to arrows are returned **without** type name + * to allow renaming on call site. + */ + lazy val arrows: Map[String, ArrowType] = + allFields.toSortedMap.toMap.collect { case (name, at: ArrowType) => + name -> at + } + /** * Get all abilities defined in this type and its sub-abilities. * Paths to abilities are returned **without** type name * to allow renaming on call site. */ - lazy val abilities: Map[String, AbilityType] = { - def getAbilitiesEval( - path: Option[String], - nt: NamedType - ): Eval[List[(String, AbilityType)]] = - nt.fields.toNel.toList.flatTraverse { - // sub-abilities could be only in abilities - case (abName, abType: AbilityType) => - val fullName = path.fold(abName)(AbilityType.fullName(_, abName)) - getAbilitiesEval(fullName.some, abType).map( - (fullName -> abType) :: _ - ) - case _ => Nil.pure - } - - getAbilitiesEval(None, this).value.toMap - } + lazy val abilities: Map[String, AbilityType] = + allFields.toSortedMap.toMap.collect { case (name, at: AbilityType) => + name -> at + } /** * Get all variables defined in this type and its sub-abilities. * Paths to variables are returned **without** type name * to allow renaming on call site. */ - lazy val variables: Map[String, DataType] = { - def getVariablesEval( - path: Option[String], - nt: NamedType - ): Eval[List[(String, DataType)]] = - nt.fields.toNel.toList.flatTraverse { - // sub-variables could be only in abilities - case (abName, abType: AbilityType) => - val newPath = path.fold(abName)(AbilityType.fullName(_, abName)) - getVariablesEval(newPath.some, abType) - case (dName, dType: DataType) => - val newPath = path.fold(dName)(AbilityType.fullName(_, dName)) - List(newPath -> dType).pure - case _ => Nil.pure - } - - getVariablesEval(None, this).value.toMap - } + lazy val variables: Map[String, DataType] = + allFields.toSortedMap.toMap.collect { case (name, at: DataType) => + name -> at + } } // Struct is an unordered collection of labelled types @@ -443,7 +438,16 @@ case class StreamType(override val element: DataType) extends MutableStreamType override def withElement(t: DataType): CollectionType = copy(element = t) } -case class ServiceType(name: String, fields: NonEmptyMap[String, ArrowType]) extends NamedType { +/** + * This type unites types that work as abilities, + * namely `ServiceType` and `AbilityType` + */ +sealed trait GeneralAbilityType extends NamedType + +case class ServiceType( + name: String, + fields: NonEmptyMap[String, ArrowType] +) extends GeneralAbilityType { override val specifier: String = "service" @@ -452,7 +456,10 @@ case class ServiceType(name: String, fields: NonEmptyMap[String, ArrowType]) ext } // Ability is an unordered collection of labelled types and arrows -case class AbilityType(name: String, fields: NonEmptyMap[String, Type]) extends NamedType { +case class AbilityType( + name: String, + fields: NonEmptyMap[String, Type] +) extends GeneralAbilityType { override val specifier: String = "ability" @@ -462,6 +469,19 @@ case class AbilityType(name: String, fields: NonEmptyMap[String, Type]) extends object AbilityType { def fullName(name: String, field: String) = s"$name.$field" + + def renames(at: NamedType)( + name: String, + newName: String + ): Map[String, String] = + at.allFields.keys.toList + .map(path => + val fullName = AbilityType.fullName(name, path) + val newFullName = AbilityType.fullName(newName, path) + fullName -> newFullName + ) + .toMap + .updated(name, newName) } /** diff --git a/types/src/test/scala/aqua/types/TypeVarianceSpec.scala b/types/src/test/scala/aqua/types/TypeVarianceSpec.scala new file mode 100644 index 000000000..2aee2d5fb --- /dev/null +++ b/types/src/test/scala/aqua/types/TypeVarianceSpec.scala @@ -0,0 +1,37 @@ +package aqua.types + +import aqua.types.* + +import cats.data.NonEmptyMap +import org.scalacheck.* +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers +import org.scalatestplus.scalacheck.ScalaCheckPropertyChecks +import scala.collection.immutable.SortedMap + +class TypeVarianceSpec extends AnyFlatSpec with ScalaCheckPropertyChecks with Matchers { + + "ServiceType" should "be subtype of AbilityType" in { + given Arbitrary[NonEmptyMap[String, ArrowType]] = Arbitrary( + Gen + .nonEmptyListOf( + for { + name <- anyName + typ <- arrowOf(Arbitrary.arbitrary[DataType]) + } yield name -> typ + ) + .map(m => NonEmptyMap.fromMapUnsafe(SortedMap.from(m))) + ) + + forAll { + ( + arrows: NonEmptyMap[String, ArrowType], + abName: String, + srvName: String + ) => + AbilityType(abName, arrows).acceptsValueOf( + ServiceType(srvName, arrows) + ) shouldBe true + } + } +} diff --git a/types/src/test/scala/aqua/types/package.scala b/types/src/test/scala/aqua/types/package.scala new file mode 100644 index 000000000..fba26edda --- /dev/null +++ b/types/src/test/scala/aqua/types/package.scala @@ -0,0 +1,112 @@ +package aqua + +import cats.data.NonEmptyMap +import org.scalacheck.* +import scala.collection.immutable.SortedMap + +package object types { + + val anyName: Gen[String] = for { + first <- Gen.alphaChar + other <- Gen.alphaStr + } yield first + other + + def productOf[T <: Type](gen: Gen[T]): Gen[ProductType] = + Gen.sized(size => Gen.resize(size / 2, Gen.listOf(gen).map(ProductType.apply))) + + def labeledProductOf[T <: Type](gen: Gen[T]): Gen[ProductType] = + Gen.sized(size => + Gen + .resize( + size / 2, + Gen.listOf(for { + name <- anyName + typ <- gen + } yield name -> typ) + ) + .map(ProductType.labelled) + ) + + def arrowOf[T <: Type](gen: Gen[T]): Gen[ArrowType] = + Gen.sized(size => + for { + input <- Gen.resize(size / 2, labeledProductOf(gen)) + output <- Gen.resize(size / 2, productOf(gen)) + } yield ArrowType(input, output) + ) + + given Arbitrary[ScalarType] = Arbitrary(Gen.oneOf(ScalarType.all)) + + given Arbitrary[LiteralType] = Arbitrary( + Gen.oneOf( + LiteralType.bool, + LiteralType.unsigned, + LiteralType.signed, + LiteralType.float, + LiteralType.number, + LiteralType.string + ) + ) + + private def fromData[T](f: DataType => T): Arbitrary[T] = + Arbitrary( + Gen.sized(size => + Gen + .resize( + size / 2, + Arbitrary.arbitrary[DataType] + ) + .map(f) + ) + ) + + given Arbitrary[OptionType] = + fromData(OptionType.apply) + + given Arbitrary[ArrayType] = + fromData(ArrayType.apply) + + given Arbitrary[CanonStreamType] = + fromData(CanonStreamType.apply) + + given Arbitrary[StructType] = Arbitrary( + Gen.sized(size => + for { + name <- anyName + fields <- Gen + .nonEmptyMap( + for { + name <- anyName + typ <- Gen.resize( + size / 2, + Arbitrary.arbitrary[DataType] + ) + } yield name -> typ + ) + .map(m => NonEmptyMap.fromMapUnsafe(SortedMap.from(m))) + } yield StructType(name, fields) + ) + ) + + given Arbitrary[DataType] = Arbitrary( + Gen.sized(size => + if (size <= 0) + Gen.oneOf( + Arbitrary.arbitrary[ScalarType], + Arbitrary.arbitrary[LiteralType] + ) + else + Gen.resize( + size / 2, + Gen.oneOf( + Arbitrary.arbitrary[ScalarType], + Arbitrary.arbitrary[LiteralType], + Arbitrary.arbitrary[OptionType], + Arbitrary.arbitrary[ArrayType], + Arbitrary.arbitrary[CanonStreamType], + Arbitrary.arbitrary[StructType] + ) + ) + ) + ) +} From f090412185fb4d5fb7c4e2148289f58d7863d009 Mon Sep 17 00:00:00 2001 From: Anatolios Laskaris Date: Mon, 27 Nov 2023 12:51:37 +0200 Subject: [PATCH 09/30] chore: Fix e2e (#997) Fix e2e --- .github/workflows/e2e.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.github/workflows/e2e.yml b/.github/workflows/e2e.yml index a233a47da..8683d3561 100644 --- a/.github/workflows/e2e.yml +++ b/.github/workflows/e2e.yml @@ -44,10 +44,9 @@ jobs: fcli: needs: aqua - uses: fluencelabs/fluence-cli/.github/workflows/tests.yml@use-js-client-0.5.0 + uses: fluencelabs/fluence-cli/.github/workflows/tests.yml@main with: aqua-snapshots: "${{ needs.aqua.outputs.aqua-snapshots }}" - ref: use-js-client-0.5.0 registry: needs: From e999bc08b125a618b18d300e1b51d44882539222 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 27 Nov 2023 11:07:13 +0000 Subject: [PATCH 10/30] chore(deps): update dependency @fluencelabs/interfaces to v0.9.0 (#990) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- api/api-npm/package.json | 2 +- pnpm-lock.yaml | 10 ++++++++-- 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/api/api-npm/package.json b/api/api-npm/package.json index 9ac22bb13..94e60bcf4 100644 --- a/api/api-npm/package.json +++ b/api/api-npm/package.json @@ -26,7 +26,7 @@ }, "homepage": "https://github.com/fluencelabs/aqua#readme", "devDependencies": { - "@fluencelabs/interfaces": "0.8.2", + "@fluencelabs/interfaces": "0.9.0", "prettier": "3.0.0" } } diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 2835c62d1..7de16d005 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -15,8 +15,8 @@ importers: api/api-npm: devDependencies: '@fluencelabs/interfaces': - specifier: 0.8.2 - version: 0.8.2 + specifier: 0.9.0 + version: 0.9.0 prettier: specifier: 3.0.0 version: 3.0.0 @@ -519,6 +519,12 @@ packages: /@fluencelabs/interfaces@0.8.2: resolution: {integrity: sha512-WJ7o51jaBSzUsYENxCEJpv91KiodH1nQ6uKdAn5chWsOQzDu57d3pa6IW1Lu/wh4jtefnNen+jF1esENQCc0BA==} engines: {node: '>=10', pnpm: '>=3'} + dev: false + + /@fluencelabs/interfaces@0.9.0: + resolution: {integrity: sha512-k3VGi1ziX1SC0/DtKNDr2sWAlQ1MspuQOd2TZ5ex1SlwhRAPTSOoKWHpujSto5RKCQ3wiyQiNBVgkTRVd17gfg==} + engines: {node: '>=10', pnpm: '>=3'} + dev: true /@fluencelabs/js-client-isomorphic@0.2.2: resolution: {integrity: sha512-sFXVqOnS+FmHfXXAamZiKq0qnCJTl5T48+cieUtdX1dWYixk1mEXyW2olRwmzEun0L6P7U5MQ+qA/aj+t7CEJQ==} From 5dd4a399a758c817f3182d3212a74e41e177e944 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 27 Nov 2023 14:18:54 +0300 Subject: [PATCH 11/30] chore(deps): update dependency com.eed3si9n:sbt-assembly to v2.1.5 (#991) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index a7626a603..edb88a303 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -1,4 +1,4 @@ -addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "2.1.4") +addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "2.1.5") addSbtPlugin("org.scala-js" % "sbt-scalajs" % "1.13.2") addSbtPlugin("org.portable-scala" % "sbt-scalajs-crossproject" % "1.3.2") addSbtPlugin("com.eed3si9n" % "sbt-buildinfo" % "0.11.0") From 9ccb062c00ad6eb7d3e416aaa5b9ff04821d57d8 Mon Sep 17 00:00:00 2001 From: InversionSpaces Date: Mon, 27 Nov 2023 12:20:13 +0100 Subject: [PATCH 12/30] fix(e2e): Use `main` branch for `fcli` (#987) Use main branch --- .github/workflows/e2e.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/e2e.yml b/.github/workflows/e2e.yml index 8683d3561..de73256dd 100644 --- a/.github/workflows/e2e.yml +++ b/.github/workflows/e2e.yml @@ -44,7 +44,7 @@ jobs: fcli: needs: aqua - uses: fluencelabs/fluence-cli/.github/workflows/tests.yml@main + uses: fluencelabs/cli/.github/workflows/tests.yml@main with: aqua-snapshots: "${{ needs.aqua.outputs.aqua-snapshots }}" From f980231748a40aec4e6ee33ac5fee618f7f2f776 Mon Sep 17 00:00:00 2001 From: InversionSpaces Date: Mon, 27 Nov 2023 12:35:15 +0100 Subject: [PATCH 13/30] fix(tests): Fix imports in integration tests (#998) Fix imports --- integration-tests/src/__test__/examples.spec.ts | 2 +- integration-tests/src/examples/abilityCall.ts | 2 +- integration-tests/src/examples/structuralTypingCall.ts | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/integration-tests/src/__test__/examples.spec.ts b/integration-tests/src/__test__/examples.spec.ts index 23b0204df..2d40c25fc 100644 --- a/integration-tests/src/__test__/examples.spec.ts +++ b/integration-tests/src/__test__/examples.spec.ts @@ -88,7 +88,7 @@ import { modifyStreamCall, } from "../examples/streamArgsCall.js"; import { streamResultsCall } from "../examples/streamResultsCall.js"; -import { structuralTypingCall } from "../examples/structuralTypingCall"; +import { structuralTypingCall } from "../examples/structuralTypingCall.js"; import { servicesAsAbilitiesCall, expectedServiceResults, diff --git a/integration-tests/src/examples/abilityCall.ts b/integration-tests/src/examples/abilityCall.ts index 843cbb770..735668221 100644 --- a/integration-tests/src/examples/abilityCall.ts +++ b/integration-tests/src/examples/abilityCall.ts @@ -9,7 +9,7 @@ import { multipleAbilityWithClosure, registerMySrv, returnSrvAsAbility, -} from "../compiled/examples/abilities"; +} from "../compiled/examples/abilities.js"; export async function abilityCall(): Promise<[string, string, string, number]> { registerSomeService({ diff --git a/integration-tests/src/examples/structuralTypingCall.ts b/integration-tests/src/examples/structuralTypingCall.ts index cbef5b8ef..93a8224c9 100644 --- a/integration-tests/src/examples/structuralTypingCall.ts +++ b/integration-tests/src/examples/structuralTypingCall.ts @@ -1,4 +1,4 @@ -import { structuralTypingTest } from "../compiled/examples/structuraltyping"; +import { structuralTypingTest } from "../compiled/examples/structuraltyping.js"; export async function structuralTypingCall(): Promise { return await structuralTypingTest(); From 38e4e10bb8bea2da79af87099fb548c568756b30 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 27 Nov 2023 11:48:09 +0000 Subject: [PATCH 14/30] fix(deps): update dependency @fluencelabs/js-client to v0.5.3 (#978) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- integration-tests/package.json | 2 +- pnpm-lock.yaml | 84 +++++++--------------------------- 2 files changed, 17 insertions(+), 69 deletions(-) diff --git a/integration-tests/package.json b/integration-tests/package.json index 25b1577a7..64649fa70 100644 --- a/integration-tests/package.json +++ b/integration-tests/package.json @@ -44,7 +44,7 @@ }, "dependencies": { "@fluencelabs/fluence-network-environment": "1.1.2", - "@fluencelabs/js-client": "0.4.3", + "@fluencelabs/js-client": "0.5.3", "deep-equal": "2.2.1", "loglevel": "1.8.1" }, diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 7de16d005..79a45e539 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -27,8 +27,8 @@ importers: specifier: 1.1.2 version: 1.1.2 '@fluencelabs/js-client': - specifier: 0.4.3 - version: 0.4.3 + specifier: 0.5.3 + version: 0.5.3 deep-equal: specifier: 2.2.1 version: 2.2.1 @@ -516,37 +516,31 @@ packages: resolution: {integrity: sha512-1Bp2gBy3oMEILMynFpOIFK/q2Pj792xpnb3AJs5QcTQAaHz9V2nrEI8OOPwBAFTmjmLBirXBqQQX63O+ePH7yg==} dev: false - /@fluencelabs/interfaces@0.8.2: - resolution: {integrity: sha512-WJ7o51jaBSzUsYENxCEJpv91KiodH1nQ6uKdAn5chWsOQzDu57d3pa6IW1Lu/wh4jtefnNen+jF1esENQCc0BA==} - engines: {node: '>=10', pnpm: '>=3'} - dev: false - /@fluencelabs/interfaces@0.9.0: resolution: {integrity: sha512-k3VGi1ziX1SC0/DtKNDr2sWAlQ1MspuQOd2TZ5ex1SlwhRAPTSOoKWHpujSto5RKCQ3wiyQiNBVgkTRVd17gfg==} engines: {node: '>=10', pnpm: '>=3'} - dev: true - /@fluencelabs/js-client-isomorphic@0.2.2: - resolution: {integrity: sha512-sFXVqOnS+FmHfXXAamZiKq0qnCJTl5T48+cieUtdX1dWYixk1mEXyW2olRwmzEun0L6P7U5MQ+qA/aj+t7CEJQ==} + /@fluencelabs/js-client-isomorphic@0.3.0: + resolution: {integrity: sha512-gCRaa3VTTYesGPahOrVk/oagkp376rHiww+NShWJH1qOvAUblZA+1dkv4pWUA2K4LH1jgsL3kN3bsSfIdw+E6w==} dependencies: '@fluencelabs/avm': 0.54.0 - '@fluencelabs/marine-js': 0.7.2 - '@fluencelabs/marine-worker': 0.4.2 + '@fluencelabs/marine-js': 0.8.0 + '@fluencelabs/marine-worker': 0.5.0 '@fluencelabs/threads': 2.0.0 transitivePeerDependencies: - supports-color dev: false - /@fluencelabs/js-client@0.4.3: - resolution: {integrity: sha512-eb+dH7X2uHaDYolvm7pZtRsG2jFJck+e2dzvRwk0f2x6BvMH4cB8oxByoGuU7RvEh88mFfuy8L31RKKr+rhCDA==} + /@fluencelabs/js-client@0.5.3: + resolution: {integrity: sha512-C6kzuJ3gsOB6LRc134gHI1BolpbR2IoeqeRKUVZn2IX97eKmzkhRODXS3cLWpaCxMiPC+3F+hWOf8YMkEOCUog==} engines: {node: '>=10', pnpm: '>=8'} dependencies: '@chainsafe/libp2p-noise': 13.0.0 '@chainsafe/libp2p-yamux': 5.0.0 '@fluencelabs/avm': 0.54.0 - '@fluencelabs/interfaces': 0.8.2 - '@fluencelabs/js-client-isomorphic': 0.2.2 - '@fluencelabs/marine-worker': 0.4.2 + '@fluencelabs/interfaces': 0.9.0 + '@fluencelabs/js-client-isomorphic': 0.3.0 + '@fluencelabs/marine-worker': 0.5.0 '@fluencelabs/threads': 2.0.0 '@libp2p/crypto': 2.0.3 '@libp2p/interface': 0.1.2 @@ -554,8 +548,6 @@ packages: '@libp2p/peer-id-factory': 3.0.3 '@libp2p/websockets': 7.0.4 '@multiformats/multiaddr': 11.3.0 - assert: 2.1.0 - async: 3.2.4 bs58: 5.0.0 buffer: 6.0.3 debug: 4.3.4 @@ -566,7 +558,6 @@ packages: libp2p: 0.46.6 multiformats: 11.0.1 rxjs: 7.5.5 - ts-pattern: 3.3.3 uint8arrays: 4.0.3 uuid: 8.3.2 zod: 3.22.4 @@ -576,18 +567,18 @@ packages: - utf-8-validate dev: false - /@fluencelabs/marine-js@0.7.2: - resolution: {integrity: sha512-etjbXDgzyZkK82UZvtuIU3bfy5f52siDUy1m+T5Y5r70k82xYdZZ8vgWVgB6ivi2f3aDyQjgNTfzWQjKFpAReQ==} + /@fluencelabs/marine-js@0.8.0: + resolution: {integrity: sha512-exxp0T0Dk69dxnbpAiVc/qp66s8Jq/P71TRB9aeQZLZy3EQtVAMCBJvwQY8LzVVlYEyVjmqQkFG/N0rAeYU1vg==} dependencies: '@wasmer/wasi': 0.12.0 '@wasmer/wasmfs': 0.12.0 default-import: 1.1.5 dev: false - /@fluencelabs/marine-worker@0.4.2: - resolution: {integrity: sha512-z0RizqN77nJhRsk+XEnbDjAf8OVLSmKt6+PDPQOF5SJlXekzE7bB8cScInGyEQ8KCGxKjMM7OaQiQzQREkslEA==} + /@fluencelabs/marine-worker@0.5.0: + resolution: {integrity: sha512-lkDSNp9H45q1XgptAxNFRps/tG4MRMwZVq63l5fzl2dJ5y1IMHqSTTp0LeOZGk47nPbT4Zq2/7oCZShuC9+0FQ==} dependencies: - '@fluencelabs/marine-js': 0.7.2 + '@fluencelabs/marine-js': 0.8.0 '@fluencelabs/threads': 2.0.0 observable-fns: 0.6.1 transitivePeerDependencies: @@ -1423,20 +1414,6 @@ packages: is-array-buffer: 3.0.2 dev: false - /assert@2.1.0: - resolution: {integrity: sha512-eLHpSK/Y4nhMJ07gDaAzoX/XAKS8PSaojml3M0DM4JpV1LAi5JOJ/p6H/XWrl8L+DzVEvVCW1z3vWAaB9oTsQw==} - dependencies: - call-bind: 1.0.2 - is-nan: 1.3.2 - object-is: 1.1.5 - object.assign: 4.1.4 - util: 0.12.5 - dev: false - - /async@3.2.4: - resolution: {integrity: sha512-iAB+JbDEGXhyIUavoDl9WP/Jj106Kz9DEn1DPgYw5ruDn0e3Wgi3sKFm55sASdGBNOQB8F59d9qQ7deqrHA8wQ==} - dev: false - /available-typed-arrays@1.0.5: resolution: {integrity: sha512-DMD0KiN46eipeziST1LPP/STfDU0sufISXmjSgvVsoU2tqxctQeASejWcfNtxYKqETM1UxQ8sp2OrSBWpHY6sw==} engines: {node: '>= 0.4'} @@ -2265,13 +2242,6 @@ packages: engines: {node: '>=6'} dev: true - /is-generator-function@1.0.10: - resolution: {integrity: sha512-jsEjy9l3yiXEQ+PsXdmBwEPcOxaXWLspKdplFUVI9vq1iZgIekeC0L167qeu86czQaxed3q/Uzuw0swL0irL8A==} - engines: {node: '>= 0.4'} - dependencies: - has-tostringtag: 1.0.0 - dev: false - /is-loopback-addr@2.0.1: resolution: {integrity: sha512-SEsepLbdWFb13B6U0tt6dYcUM0iK/U7XOC43N70Z4Qb88WpNtp+ospyNI9ddpqncs7Z7brAEsVBTQpaqSNntIw==} dev: false @@ -2280,14 +2250,6 @@ packages: resolution: {integrity: sha512-cOZFQQozTha1f4MxLFzlgKYPTyj26picdZTx82hbc/Xf4K/tZOOXSCkMvU4pKioRXGDLJRn0GM7Upe7kR721yg==} dev: false - /is-nan@1.3.2: - resolution: {integrity: sha512-E+zBKpQ2t6MEo1VsonYmluk9NxGrbzpeeLC2xIViuO2EjU2xsXsBPwTr3Ykv9l08UYEVEdWeRZNouaZqF6RN0w==} - engines: {node: '>= 0.4'} - dependencies: - call-bind: 1.0.2 - define-properties: 1.2.0 - dev: false - /is-number-object@1.0.7: resolution: {integrity: sha512-k1U0IRzLMo7ZlYIfzRu23Oh6MiIFasgpb9X76eqfFZAqwH44UI4KTBvBYIZ1dSL9ZzChTB9ShHfLkR4pdW5krQ==} engines: {node: '>= 0.4'} @@ -3900,10 +3862,6 @@ packages: yn: 3.1.1 dev: true - /ts-pattern@3.3.3: - resolution: {integrity: sha512-Z5EFi6g6wyX3uDFHqxF5W5c5h663oZg9O6aOiAT7fqNu0HPSfCxtHzrQ7SblTy738Mrg2Ezorky8H5aUOm8Pvg==} - dev: false - /tslib@2.5.3: resolution: {integrity: sha512-mSxlJJwl3BMEQCUNnxXBU9jP4JBktcEGhURcPR6VQVlnP0FdDEsIaz0C35dXNGLyRfrATNofF0F5p2KPxQgB+w==} dev: false @@ -3987,16 +3945,6 @@ packages: resolution: {integrity: sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==} dev: false - /util@0.12.5: - resolution: {integrity: sha512-kZf/K6hEIrWHI6XqOFUiiMa+79wE/D8Q+NCNAWclkyg3b4d2k7s0QGepNjiABc+aR3N1PAyHL7p6UcLY6LmrnA==} - dependencies: - inherits: 2.0.4 - is-arguments: 1.1.1 - is-generator-function: 1.0.10 - is-typed-array: 1.1.10 - which-typed-array: 1.1.9 - dev: false - /uuid@8.3.2: resolution: {integrity: sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==} hasBin: true From c0ac0bf8aa69b363354d577c2410a658d20db612 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 27 Nov 2023 12:04:23 +0000 Subject: [PATCH 15/30] chore(deps): update dependency @fluencelabs/aqua-lib to v0.8.1 (#976) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> Co-authored-by: Dima --- integration-tests/package.json | 2 +- pnpm-lock.yaml | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/integration-tests/package.json b/integration-tests/package.json index 64649fa70..cbb6536c5 100644 --- a/integration-tests/package.json +++ b/integration-tests/package.json @@ -33,7 +33,7 @@ "devDependencies": { "@fluencelabs/aqua-api": "0.13.0", "@fluencelabs/aqua-dht": "0.2.5", - "@fluencelabs/aqua-lib": "0.7.7", + "@fluencelabs/aqua-lib": "0.8.1", "@types/jest": "29.5.2", "@types/node": "18.11.18", "jest": "29.5.0", diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 79a45e539..0ad304d14 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -43,8 +43,8 @@ importers: specifier: 0.2.5 version: 0.2.5 '@fluencelabs/aqua-lib': - specifier: 0.7.7 - version: 0.7.7 + specifier: 0.8.1 + version: 0.8.1 '@types/jest': specifier: 29.5.2 version: 29.5.2 @@ -504,8 +504,8 @@ packages: resolution: {integrity: sha512-H2Q4gIvociUxc4J2mwmH0D+mrU2N2Z+enKCHgBCanMVEE2wZDsZ80GTbDKsQjEq+gpqbnJIk8lJBYW6lyvLJTg==} dev: true - /@fluencelabs/aqua-lib@0.7.7: - resolution: {integrity: sha512-/RvIwaLGmNuzkoFTRXOp66BTiVVkmljiY1Do3f/wRnYWZO8jdjQ/bmO+v1rhGZ7+DYzTL1YT54Vxt5/ralnPvA==} + /@fluencelabs/aqua-lib@0.8.1: + resolution: {integrity: sha512-VLslkhi3hsNLWkgsoCyceCediqkicWphMVHZ+9eEkgMumepvo7TcqiYC14bl2LpZjn7YZ6y/OzK+Ffy8ADfKdA==} dev: true /@fluencelabs/avm@0.54.0: From aade33a5eca40f16e12458d716dca10351510625 Mon Sep 17 00:00:00 2001 From: DieMyst Date: Tue, 28 Nov 2023 16:57:55 +0700 Subject: [PATCH 16/30] fix TokenInfo --- aqua-src/antithesis.aqua | 2 +- .../aqua/semantics/rules/locations/LocationsAlgebra.scala | 2 ++ .../aqua/semantics/rules/locations/LocationsState.scala | 2 +- .../scala/aqua/semantics/rules/types/TypesInterpreter.scala | 6 +++--- 4 files changed, 7 insertions(+), 5 deletions(-) diff --git a/aqua-src/antithesis.aqua b/aqua-src/antithesis.aqua index 17c7db1de..4167dd60e 100644 --- a/aqua-src/antithesis.aqua +++ b/aqua-src/antithesis.aqua @@ -1,2 +1,2 @@ -func arr(strs: []string) -> []string +func arr(strs: []string) -> []string: <- strs diff --git a/semantics/src/main/scala/aqua/semantics/rules/locations/LocationsAlgebra.scala b/semantics/src/main/scala/aqua/semantics/rules/locations/LocationsAlgebra.scala index c17d81458..876c68905 100644 --- a/semantics/src/main/scala/aqua/semantics/rules/locations/LocationsAlgebra.scala +++ b/semantics/src/main/scala/aqua/semantics/rules/locations/LocationsAlgebra.scala @@ -2,6 +2,8 @@ package aqua.semantics.rules.locations import aqua.parser.lexer.Token import aqua.types.Type +case class TokenInfo[S[_]](token: Token[S], `type`: Type) + trait LocationsAlgebra[S[_], Alg[_]] { def addToken(name: String, tokenInfo: TokenInfo[S]): Alg[Unit] def addTokenWithFields(name: String, token: Token[S], fields: List[(String, Token[S])]): Alg[Unit] diff --git a/semantics/src/main/scala/aqua/semantics/rules/locations/LocationsState.scala b/semantics/src/main/scala/aqua/semantics/rules/locations/LocationsState.scala index cb0b37d0b..4e2ca9f21 100644 --- a/semantics/src/main/scala/aqua/semantics/rules/locations/LocationsState.scala +++ b/semantics/src/main/scala/aqua/semantics/rules/locations/LocationsState.scala @@ -5,7 +5,7 @@ import aqua.semantics.rules.types.TypesState import cats.kernel.Monoid case class LocationsState[S[_]]( - tokens: Map[String, Token[S]] = Map.empty[String, Token[S]], + tokens: Map[String, TokenInfo[S]] = Map.empty[String, TokenInfo[S]], locations: List[(Token[S], Token[S])] = Nil, stack: List[LocationsState[S]] = Nil ) { diff --git a/semantics/src/main/scala/aqua/semantics/rules/types/TypesInterpreter.scala b/semantics/src/main/scala/aqua/semantics/rules/types/TypesInterpreter.scala index 7df3cddd5..a5be36aed 100644 --- a/semantics/src/main/scala/aqua/semantics/rules/types/TypesInterpreter.scala +++ b/semantics/src/main/scala/aqua/semantics/rules/types/TypesInterpreter.scala @@ -3,14 +3,14 @@ package aqua.semantics.rules.types import aqua.parser.lexer.* import aqua.raw.value.* import aqua.semantics.rules.StackInterpreter -import aqua.semantics.rules.locations.LocationsAlgebra +import aqua.semantics.rules.locations.{LocationsAlgebra, TokenInfo} import aqua.semantics.rules.report.ReportAlgebra import aqua.semantics.rules.types.TypeResolution.TypeResolutionError import aqua.types.* import aqua.types.Type.* +import cats.data.* import cats.data.Validated.{Invalid, Valid} -import cats.data.{Chain, NonEmptyList, NonEmptyMap, OptionT, State} import cats.syntax.applicative.* import cats.syntax.apply.* import cats.syntax.flatMap.* @@ -170,7 +170,7 @@ class TypesInterpreter[S[_], X](using case Some(_) => report.error(name, s"Type `${name.value}` was already defined").as(false) case None => modify(_.defineType(name, target)) - .productL(locations.addToken(name.value, name)) + .productL(locations.addToken(name.value, TokenInfo(name.asName, target))) .as(true) } From c4d46e03993bf39a974d5fc74d9de4a0fd3f182b Mon Sep 17 00:00:00 2001 From: DieMyst Date: Wed, 29 Nov 2023 17:31:06 +0700 Subject: [PATCH 17/30] return TokenInfo with CompilationResult --- .../.js/src/main/scala/aqua/lsp/AquaLSP.scala | 41 +++++++++++++++---- .../language-server-npm/aqua-lsp-api.d.ts | 8 +++- types/src/main/scala/aqua/types/Type.scala | 7 +--- 3 files changed, 42 insertions(+), 14 deletions(-) diff --git a/language-server/language-server-api/.js/src/main/scala/aqua/lsp/AquaLSP.scala b/language-server/language-server-api/.js/src/main/scala/aqua/lsp/AquaLSP.scala index 2c1453e54..d7c337906 100644 --- a/language-server/language-server-api/.js/src/main/scala/aqua/lsp/AquaLSP.scala +++ b/language-server/language-server-api/.js/src/main/scala/aqua/lsp/AquaLSP.scala @@ -10,31 +10,36 @@ import aqua.parser.lift.FileSpan.F import aqua.parser.lift.{FileSpan, Span} import aqua.parser.{ArrowReturnError, BlockIndentError, LexerError, ParserError} import aqua.raw.ConstantRaw +import aqua.semantics.rules.locations.TokenInfo import aqua.semantics.{HeaderError, RulesViolated, SemanticWarning, WrongAST} +import aqua.types.{LiteralType, ScalarType} import aqua.{AquaIO, SpanParser} -import cats.data.Validated.{invalidNec, validNec, Invalid, Valid} -import cats.data.{NonEmptyChain, Validated} +import cats.data.Validated +import cats.data.Validated.{Invalid, Valid} import cats.effect.IO -import cats.syntax.option.* import cats.effect.unsafe.implicits.global +import cats.syntax.option.* import fs2.io.file.{Files, Path} -import scribe.Logging import scala.concurrent.ExecutionContext.Implicits.global -import scala.concurrent.Future import scala.scalajs.js import scala.scalajs.js.JSConverters.* import scala.scalajs.js.annotation.* -import scala.scalajs.js.{undefined, UndefOr} +import scala.scalajs.js.{UndefOr, undefined} +import scribe.Logging @JSExportAll case class CompilationResult( errors: js.Array[ErrorInfo], warnings: js.Array[WarningInfo] = js.Array(), locations: js.Array[TokenLink] = js.Array(), - importLocations: js.Array[TokenImport] = js.Array() + importLocations: js.Array[TokenImport] = js.Array(), + tokens: js.Array[TokenInfoJs] = js.Array() ) +@JSExportAll +case class TokenInfoJs(location: TokenLocation, `type`: String) + @JSExportAll case class TokenLocation(name: String, startLine: Int, startCol: Int, endLine: Int, endCol: Int) @@ -196,6 +201,25 @@ object AquaLSP extends App with Logging { logger.debug("Compilation done.") + def tokensToJs(tokens: List[TokenInfo[FileSpan.F]]): js.Array[TokenInfoJs] = { + tokens.flatMap { ti => + TokenLocation.fromSpan(ti.token.unit._1).map { tl => + val typeName = ti.`type` match { + case LiteralType(oneOf, _) if oneOf == ScalarType.integer => + "u32" + case LiteralType(oneOf, _) if oneOf == ScalarType.float => + "f32" + case LiteralType(oneOf, _) if oneOf == ScalarType.string => + "string" + case LiteralType(oneOf, _) if oneOf == ScalarType.bool => + "bool" + case t => t.toString + } + TokenInfoJs(tl, typeName) + } + }.toJSArray + } + def locationsToJs( locations: List[(Token[FileSpan.F], Token[FileSpan.F])] ): js.Array[TokenLink] = { @@ -236,7 +260,8 @@ object AquaLSP extends App with Logging { errors.toJSArray, warnings.toJSArray, locationsToJs(lsp.locations), - importsToTokenImport(lsp.importTokens) + importsToTokenImport(lsp.importTokens), + tokensToJs(lsp.tokens.values.toList) ) case Invalid(e) => val errors = e.toChain.toList.flatMap(errorToInfo) diff --git a/language-server/language-server-npm/aqua-lsp-api.d.ts b/language-server/language-server-npm/aqua-lsp-api.d.ts index c59ec8271..bf705eeee 100644 --- a/language-server/language-server-npm/aqua-lsp-api.d.ts +++ b/language-server/language-server-npm/aqua-lsp-api.d.ts @@ -6,6 +6,11 @@ export interface TokenLocation { endCol: number } +export interface TokenInfo { + location: TokenLocation, + type: string +} + export interface TokenLink { current: TokenLocation, definition: TokenLocation @@ -36,7 +41,8 @@ export interface CompilationResult { errors: ErrorInfo[], warnings: WarningInfo[], locations: TokenLink[], - importLocations: TokenImport[] + importLocations: TokenImport[], + tokens: TokenInfo[] } export class Compiler { diff --git a/types/src/main/scala/aqua/types/Type.scala b/types/src/main/scala/aqua/types/Type.scala index c9b0bfc63..2b2317a43 100644 --- a/types/src/main/scala/aqua/types/Type.scala +++ b/types/src/main/scala/aqua/types/Type.scala @@ -4,15 +4,12 @@ import aqua.errors.Errors.internalError import aqua.types.* import aqua.types.Type.* -import cats.Eval -import cats.PartialOrder -import cats.data.NonEmptyList -import cats.data.NonEmptyMap -import cats.syntax.applicative.* +import cats.data.{NonEmptyList, NonEmptyMap} import cats.syntax.foldable.* import cats.syntax.option.* import cats.syntax.partialOrder.* import cats.syntax.traverse.* +import cats.{Eval, PartialOrder} import scala.collection.immutable.SortedMap sealed trait Type { From 7980e81ce9a78b5e42372a6ab016194a377f26a9 Mon Sep 17 00:00:00 2001 From: DieMyst Date: Fri, 1 Dec 2023 15:04:51 +0700 Subject: [PATCH 18/30] add token info for named types, handle tokens with same names, refactoring --- aqua-src/antithesis.aqua | 9 +- .../.js/src/main/scala/aqua/lsp/AquaLSP.scala | 213 +----------------- .../src/main/scala/aqua/lsp/OutputTypes.scala | 88 ++++++++ .../main/scala/aqua/lsp/ResultHelper.scala | 136 +++++++++++ .../src/main/scala/aqua/lsp/TypeShow.scala | 51 +++++ .../scala/aqua/lsp/LocationsInterpreter.scala | 46 ++-- .../src/main/scala/aqua/lsp/LspContext.scala | 2 +- .../aqua/semantics/expr/ServiceSem.scala | 2 +- .../abilities/AbilitiesInterpreter.scala | 5 - .../definitions/DefinitionsInterpreter.scala | 8 +- .../locations/DummyLocationsInterpreter.scala | 10 +- .../rules/locations/LocationsAlgebra.scala | 2 +- .../rules/locations/LocationsState.scala | 2 +- .../rules/types/TypesInterpreter.scala | 19 +- 14 files changed, 337 insertions(+), 256 deletions(-) create mode 100644 language-server/language-server-api/.js/src/main/scala/aqua/lsp/OutputTypes.scala create mode 100644 language-server/language-server-api/.js/src/main/scala/aqua/lsp/ResultHelper.scala create mode 100644 language-server/language-server-api/.js/src/main/scala/aqua/lsp/TypeShow.scala diff --git a/aqua-src/antithesis.aqua b/aqua-src/antithesis.aqua index 4167dd60e..66da671de 100644 --- a/aqua-src/antithesis.aqua +++ b/aqua-src/antithesis.aqua @@ -1,2 +1,9 @@ func arr(strs: []string) -> []string: - <- strs + n = "str" + arr = [n] + <- arr + +func ppp() -> []u32: + n = 123 + arr = [123] + <- arr \ No newline at end of file diff --git a/language-server/language-server-api/.js/src/main/scala/aqua/lsp/AquaLSP.scala b/language-server/language-server-api/.js/src/main/scala/aqua/lsp/AquaLSP.scala index d7c337906..7c579470f 100644 --- a/language-server/language-server-api/.js/src/main/scala/aqua/lsp/AquaLSP.scala +++ b/language-server/language-server-api/.js/src/main/scala/aqua/lsp/AquaLSP.scala @@ -1,18 +1,12 @@ package aqua.lsp import aqua.compiler.* -import aqua.compiler.AquaError.{ParserError as AquaParserError, *} -import aqua.compiler.AquaWarning.* +import aqua.compiler.AquaError.SourcesError import aqua.files.{AquaFileSources, AquaFilesIO, FileModuleId} import aqua.io.* -import aqua.parser.lexer.{LiteralToken, Token} +import aqua.parser.lift.FileSpan import aqua.parser.lift.FileSpan.F -import aqua.parser.lift.{FileSpan, Span} -import aqua.parser.{ArrowReturnError, BlockIndentError, LexerError, ParserError} import aqua.raw.ConstantRaw -import aqua.semantics.rules.locations.TokenInfo -import aqua.semantics.{HeaderError, RulesViolated, SemanticWarning, WrongAST} -import aqua.types.{LiteralType, ScalarType} import aqua.{AquaIO, SpanParser} import cats.data.Validated @@ -25,152 +19,12 @@ import scala.concurrent.ExecutionContext.Implicits.global import scala.scalajs.js import scala.scalajs.js.JSConverters.* import scala.scalajs.js.annotation.* -import scala.scalajs.js.{UndefOr, undefined} import scribe.Logging -@JSExportAll -case class CompilationResult( - errors: js.Array[ErrorInfo], - warnings: js.Array[WarningInfo] = js.Array(), - locations: js.Array[TokenLink] = js.Array(), - importLocations: js.Array[TokenImport] = js.Array(), - tokens: js.Array[TokenInfoJs] = js.Array() -) - -@JSExportAll -case class TokenInfoJs(location: TokenLocation, `type`: String) - -@JSExportAll -case class TokenLocation(name: String, startLine: Int, startCol: Int, endLine: Int, endCol: Int) - -@JSExportAll -case class TokenLink(current: TokenLocation, definition: TokenLocation) - -@JSExportAll -case class TokenImport(current: TokenLocation, path: String) - -object TokenLocation { - - def fromSpan(span: FileSpan): Option[TokenLocation] = { - val start = span.locationMap.value.toLineCol(span.span.startIndex) - val end = span.locationMap.value.toLineCol(span.span.endIndex) - - for { - startLC <- start - endLC <- end - } yield { - TokenLocation(span.name, startLC._1, startLC._2, endLC._1, endLC._2) - } - - } -} - -@JSExportAll -case class ErrorInfo( - start: Int, - end: Int, - message: String, - location: UndefOr[String] -) { - // Used to distinguish from WarningInfo in TS - val infoType: String = "error" -} - -object ErrorInfo { - - def apply(fileSpan: FileSpan, message: String): ErrorInfo = { - val start = fileSpan.span.startIndex - val end = fileSpan.span.endIndex - ErrorInfo(start, end, message, fileSpan.name) - } - - def applyOp(start: Int, end: Int, message: String, location: Option[String]): ErrorInfo = { - ErrorInfo(start, end, message, location.getOrElse(undefined)) - } -} - -@JSExportAll -case class WarningInfo( - start: Int, - end: Int, - message: String, - location: UndefOr[String] -) { - // Used to distinguish from ErrorInfo in TS - val infoType: String = "warning" -} - -object WarningInfo { - - def apply(fileSpan: FileSpan, message: String): WarningInfo = { - val start = fileSpan.span.startIndex - val end = fileSpan.span.endIndex - WarningInfo(start, end, message, fileSpan.name) - } -} - @JSExportTopLevel("AquaLSP") object AquaLSP extends App with Logging { - private def errorToInfo( - error: AquaError[FileModuleId, AquaFileError, FileSpan.F] - ): List[ErrorInfo] = error match { - case AquaParserError(err) => - err match { - case BlockIndentError(indent, message) => - ErrorInfo(indent._1, message) :: Nil - case ArrowReturnError(point, message) => - ErrorInfo(point._1, message) :: Nil - case LexerError((span, e)) => - e.expected.toList - .groupBy(_.offset) - .map { case (offset, exps) => - val localSpan = Span(offset, offset + 1) - val fSpan = FileSpan(span.name, span.locationMap, localSpan) - val errorMessages = exps.flatMap(exp => ParserError.expectationToString(exp)) - val msg = s"${errorMessages.head}" :: errorMessages.tail.map(t => "OR " + t) - (offset, ErrorInfo(fSpan, msg.mkString("\n"))) - } - .toList - .sortBy(_._1) - .map(_._2) - .reverse - } - case SourcesError(err) => - ErrorInfo.applyOp(0, 0, err.showForConsole, None) :: Nil - case ResolveImportsError(_, token, err) => - ErrorInfo(token.unit._1, err.showForConsole) :: Nil - case ImportError(token) => - ErrorInfo(token.unit._1, "Cannot resolve import") :: Nil - case CycleError(modules) => - ErrorInfo.applyOp( - 0, - 0, - s"Cycle loops detected in imports: ${modules.map(_.file.fileName)}", - None - ) :: Nil - case CompileError(err) => - err match { - case RulesViolated(token, messages) => - ErrorInfo(token.unit._1, messages.mkString("\n")) :: Nil - case HeaderError(token, message) => - ErrorInfo(token.unit._1, message) :: Nil - case WrongAST(ast) => - ErrorInfo.applyOp(0, 0, "Semantic error: wrong AST", None) :: Nil - - } - case OutputError(_, err) => - ErrorInfo.applyOp(0, 0, err.showForConsole, None) :: Nil - case AirValidationError(errors) => - errors.toChain.toList.map(ErrorInfo.applyOp(0, 0, _, None)) - } - - private def warningToInfo( - warning: AquaWarning[FileSpan.F] - ): List[WarningInfo] = warning match { - case CompileWarning(SemanticWarning(token, messages)) => - WarningInfo(token.unit._1, messages.mkString("\n")) :: Nil - } + import ResultHelper.* @JSExport def compile( @@ -201,68 +55,9 @@ object AquaLSP extends App with Logging { logger.debug("Compilation done.") - def tokensToJs(tokens: List[TokenInfo[FileSpan.F]]): js.Array[TokenInfoJs] = { - tokens.flatMap { ti => - TokenLocation.fromSpan(ti.token.unit._1).map { tl => - val typeName = ti.`type` match { - case LiteralType(oneOf, _) if oneOf == ScalarType.integer => - "u32" - case LiteralType(oneOf, _) if oneOf == ScalarType.float => - "f32" - case LiteralType(oneOf, _) if oneOf == ScalarType.string => - "string" - case LiteralType(oneOf, _) if oneOf == ScalarType.bool => - "bool" - case t => t.toString - } - TokenInfoJs(tl, typeName) - } - }.toJSArray - } - - def locationsToJs( - locations: List[(Token[FileSpan.F], Token[FileSpan.F])] - ): js.Array[TokenLink] = { - locations.flatMap { case (from, to) => - val fromOp = TokenLocation.fromSpan(from.unit._1) - val toOp = TokenLocation.fromSpan(to.unit._1) - - val link = for { - from <- fromOp - to <- toOp - } yield TokenLink(from, to) - - if (link.isEmpty) - logger.warn(s"Incorrect coordinates for token '${from.unit._1.name}'") - - link.toList - }.toJSArray - } - - def importsToTokenImport(imports: List[LiteralToken[FileSpan.F]]): js.Array[TokenImport] = - imports.flatMap { lt => - val (span, str) = lt.valueToken - val unquoted = str.substring(1, str.length - 1) - TokenLocation.fromSpan(span).map(l => TokenImport(l, unquoted)) - }.toJSArray - val result = fileRes match { case Valid(lsp) => - val errors = lsp.errors.map(CompileError.apply).flatMap(errorToInfo) - val warnings = lsp.warnings.map(CompileWarning.apply).flatMap(warningToInfo) - errors match - case Nil => - logger.debug("No errors on compilation.") - case errs => - logger.debug("Errors: " + errs.mkString("\n")) - - CompilationResult( - errors.toJSArray, - warnings.toJSArray, - locationsToJs(lsp.locations), - importsToTokenImport(lsp.importTokens), - tokensToJs(lsp.tokens.values.toList) - ) + lspToCompilationResult(lsp) case Invalid(e) => val errors = e.toChain.toList.flatMap(errorToInfo) logger.debug("Errors: " + errors.mkString("\n")) diff --git a/language-server/language-server-api/.js/src/main/scala/aqua/lsp/OutputTypes.scala b/language-server/language-server-api/.js/src/main/scala/aqua/lsp/OutputTypes.scala new file mode 100644 index 000000000..6b37ccf70 --- /dev/null +++ b/language-server/language-server-api/.js/src/main/scala/aqua/lsp/OutputTypes.scala @@ -0,0 +1,88 @@ +package aqua.lsp + +import aqua.parser.lift.FileSpan + +import scala.scalajs.js +import scala.scalajs.js.annotation.JSExportAll +import scala.scalajs.js.{UndefOr, undefined} + +@JSExportAll +case class CompilationResult( + errors: js.Array[ErrorInfo], + warnings: js.Array[WarningInfo] = js.Array(), + locations: js.Array[TokenLink] = js.Array(), + importLocations: js.Array[TokenImport] = js.Array(), + tokens: js.Array[TokenInfoJs] = js.Array() +) + +@JSExportAll +case class TokenInfoJs(location: TokenLocation, `type`: String) + +@JSExportAll +case class TokenLocation(name: String, startLine: Int, startCol: Int, endLine: Int, endCol: Int) + +@JSExportAll +case class TokenLink(current: TokenLocation, definition: TokenLocation) + +@JSExportAll +case class TokenImport(current: TokenLocation, path: String) + +object TokenLocation { + + def fromSpan(span: FileSpan): Option[TokenLocation] = { + val start = span.locationMap.value.toLineCol(span.span.startIndex) + val end = span.locationMap.value.toLineCol(span.span.endIndex) + + for { + startLC <- start + endLC <- end + } yield { + TokenLocation(span.name, startLC._1, startLC._2, endLC._1, endLC._2) + } + + } +} + +@JSExportAll +case class ErrorInfo( + start: Int, + end: Int, + message: String, + location: UndefOr[String] +) { + // Used to distinguish from WarningInfo in TS + val infoType: String = "error" +} + +object ErrorInfo { + + def apply(fileSpan: FileSpan, message: String): ErrorInfo = { + val start = fileSpan.span.startIndex + val end = fileSpan.span.endIndex + ErrorInfo(start, end, message, fileSpan.name) + } + + def applyOp(start: Int, end: Int, message: String, location: Option[String]): ErrorInfo = { + ErrorInfo(start, end, message, location.getOrElse(undefined)) + } +} + +@JSExportAll +case class WarningInfo( + start: Int, + end: Int, + message: String, + location: UndefOr[String] +) { + // Used to distinguish from ErrorInfo in TS + val infoType: String = "warning" +} + +object WarningInfo { + + def apply(fileSpan: FileSpan, message: String): WarningInfo = { + val start = fileSpan.span.startIndex + val end = fileSpan.span.endIndex + WarningInfo(start, end, message, fileSpan.name) + } +} diff --git a/language-server/language-server-api/.js/src/main/scala/aqua/lsp/ResultHelper.scala b/language-server/language-server-api/.js/src/main/scala/aqua/lsp/ResultHelper.scala new file mode 100644 index 000000000..99eb4156d --- /dev/null +++ b/language-server/language-server-api/.js/src/main/scala/aqua/lsp/ResultHelper.scala @@ -0,0 +1,136 @@ +package aqua.lsp + +import aqua.compiler.AquaError.{ParserError as AquaParserError, *} +import aqua.compiler.AquaWarning.CompileWarning +import aqua.compiler.{AquaError, AquaWarning} +import aqua.files.FileModuleId +import aqua.io.AquaFileError +import aqua.lsp.AquaLSP.logger +import aqua.parser.lexer.{LiteralToken, Token} +import aqua.parser.lift.{FileSpan, Span} +import aqua.parser.{ArrowReturnError, BlockIndentError, LexerError, ParserError} +import aqua.semantics.rules.locations.TokenInfo +import aqua.semantics.{HeaderError, RulesViolated, SemanticWarning, WrongAST} + +import cats.syntax.show.* +import scala.scalajs.js +import scala.scalajs.js.JSConverters.* +import scribe.Logging + +object ResultHelper extends Logging { + + import TypeShow.given + + def warningToInfo( + warning: AquaWarning[FileSpan.F] + ): List[WarningInfo] = warning match { + case CompileWarning(SemanticWarning(token, messages)) => + WarningInfo(token.unit._1, messages.mkString("\n")) :: Nil + } + + def errorToInfo( + error: AquaError[FileModuleId, AquaFileError, FileSpan.F] + ): List[ErrorInfo] = error match { + case AquaParserError(err) => + err match { + case BlockIndentError(indent, message) => + ErrorInfo(indent._1, message) :: Nil + case ArrowReturnError(point, message) => + ErrorInfo(point._1, message) :: Nil + case LexerError((span, e)) => + e.expected.toList + .groupBy(_.offset) + .map { case (offset, exps) => + val localSpan = Span(offset, offset + 1) + val fSpan = FileSpan(span.name, span.locationMap, localSpan) + val errorMessages = exps.flatMap(exp => ParserError.expectationToString(exp)) + val msg = s"${errorMessages.head}" :: errorMessages.tail.map(t => "OR " + t) + (offset, ErrorInfo(fSpan, msg.mkString("\n"))) + } + .toList + .sortBy(_._1) + .map(_._2) + .reverse + } + case SourcesError(err) => + ErrorInfo.applyOp(0, 0, err.showForConsole, None) :: Nil + case ResolveImportsError(_, token, err) => + ErrorInfo(token.unit._1, err.showForConsole) :: Nil + case ImportError(token) => + ErrorInfo(token.unit._1, "Cannot resolve import") :: Nil + case CycleError(modules) => + ErrorInfo.applyOp( + 0, + 0, + s"Cycle loops detected in imports: ${modules.map(_.file.fileName)}", + None + ) :: Nil + case CompileError(err) => + err match { + case RulesViolated(token, messages) => + ErrorInfo(token.unit._1, messages.mkString("\n")) :: Nil + case HeaderError(token, message) => + ErrorInfo(token.unit._1, message) :: Nil + case WrongAST(ast) => + ErrorInfo.applyOp(0, 0, "Semantic error: wrong AST", None) :: Nil + + } + case OutputError(_, err) => + ErrorInfo.applyOp(0, 0, err.showForConsole, None) :: Nil + case AirValidationError(errors) => + errors.toChain.toList.map(ErrorInfo.applyOp(0, 0, _, None)) + } + + private def tokensToJs(tokens: List[TokenInfo[FileSpan.F]]): js.Array[TokenInfoJs] = { + tokens.flatMap { ti => + TokenLocation.fromSpan(ti.token.unit._1).map { tl => + val typeName = ti.`type`.show + TokenInfoJs(tl, typeName) + } + }.toJSArray + } + + private def locationsToJs( + locations: List[(Token[FileSpan.F], Token[FileSpan.F])] + ): js.Array[TokenLink] = { + locations.flatMap { case (from, to) => + val fromOp = TokenLocation.fromSpan(from.unit._1) + val toOp = TokenLocation.fromSpan(to.unit._1) + + val link = for { + from <- fromOp + to <- toOp + } yield TokenLink(from, to) + + if (link.isEmpty) + logger.warn(s"Incorrect coordinates for token '${from.unit._1.name}'") + + link.toList + }.toJSArray + } + + private def importsToTokenImport(imports: List[LiteralToken[FileSpan.F]]): js.Array[TokenImport] = + imports.flatMap { lt => + val (span, str) = lt.valueToken + val unquoted = str.substring(1, str.length - 1) + TokenLocation.fromSpan(span).map(l => TokenImport(l, unquoted)) + }.toJSArray + + def lspToCompilationResult(lsp: LspContext[FileSpan.F]): CompilationResult = { + val errors = lsp.errors.map(CompileError.apply).flatMap(errorToInfo) + val warnings = lsp.warnings.map(CompileWarning.apply).flatMap(warningToInfo) + errors match + case Nil => + logger.debug("No errors on compilation.") + case errs => + logger.debug("Errors: " + errs.mkString("\n")) + + CompilationResult( + errors.toJSArray, + warnings.toJSArray, + locationsToJs(lsp.locations), + importsToTokenImport(lsp.importTokens), + tokensToJs(lsp.tokens.map(_._2)) + ) + } +} diff --git a/language-server/language-server-api/.js/src/main/scala/aqua/lsp/TypeShow.scala b/language-server/language-server-api/.js/src/main/scala/aqua/lsp/TypeShow.scala new file mode 100644 index 000000000..2119a996e --- /dev/null +++ b/language-server/language-server-api/.js/src/main/scala/aqua/lsp/TypeShow.scala @@ -0,0 +1,51 @@ +package aqua.lsp + +import aqua.types.* + +import cats.Show +import cats.syntax.show.* + +object TypeShow { + given Show[DataType] = { + case LiteralType.signed => + "i32" + case LiteralType.unsigned => + "u32" + case LiteralType.number => + "u32" + case LiteralType.float => + "f32" + case LiteralType.string => + "string" + case LiteralType.bool => + "bool" + case t => + t.toString + } + + given Show[Type] = { + case ArrayType(el) => + s"[]${el.show}" + case OptionType(el) => + s"?${el.show}" + case StreamType(el) => + s"*${el.show}" + case ArrowType(domain, codomain) => + val domainStr = domain match { + case _: LabeledConsType => + domain.toLabelledList().map { case (s, t) => s"$s: ${t.show}" }.mkString("(", ", ", ")") + case _ => domain.toList.mkString("(", ", ", ")") + } + val codomainStr = codomain.toList match { + case Nil => "" + case l => " -> " + l.mkString(", ") + } + domainStr + codomainStr + case nt: NamedType => + s"${nt.fullName}(${nt.fields.map(_.show).toNel.toList.map(kv => kv._1 + ": " + kv._2).mkString(", ")})" + case t: DataType => + t.show + case t => + t.toString + } +} diff --git a/language-server/language-server-api/src/main/scala/aqua/lsp/LocationsInterpreter.scala b/language-server/language-server-api/src/main/scala/aqua/lsp/LocationsInterpreter.scala index 4c129e8c0..c522e25d2 100644 --- a/language-server/language-server-api/src/main/scala/aqua/lsp/LocationsInterpreter.scala +++ b/language-server/language-server-api/src/main/scala/aqua/lsp/LocationsInterpreter.scala @@ -3,7 +3,6 @@ package aqua.lsp import aqua.parser.lexer.Token import aqua.semantics.rules.StackInterpreter import aqua.semantics.rules.locations.{LocationsAlgebra, LocationsState, TokenInfo} -import aqua.types.{BottomType, Type} import cats.data.State import monocle.Lens @@ -22,23 +21,22 @@ class LocationsInterpreter[S[_], X](using import stack.* - override def addToken(name: String, tokenInfo: TokenInfo[S]): State[X, Unit] = modify { - st => - st.copy(tokens = st.tokens.updated(name, tokenInfo)) + override def addToken(name: String, tokenInfo: TokenInfo[S]): State[X, Unit] = modify { st => + st.copy(tokens = (name, tokenInfo) +: st.tokens) } private def combineFieldName(name: String, field: String): String = name + "." + field override def addTokenWithFields( name: String, - token: Token[S], - fields: List[(String, Token[S])] - ): State[X, Unit] = modify { st => - st.copy(tokens = - st.tokens ++ ((name, TokenInfo(token, BottomType)) +: fields.map(kv => - (combineFieldName(name, kv._1), TokenInfo(kv._2, BottomType)) - )).toMap - ) + token: TokenInfo[S], + fields: List[(String, TokenInfo[S])] + ): State[X, Unit] = { + val allTokens = + ((name, token) +: fields.map(kv => (combineFieldName(name, kv._1), kv._2))).toMap + modify { st => + st.copy(tokens = st.tokens ++ allTokens) + } } def pointFieldLocation(typeName: String, fieldName: String, token: Token[S]): State[X, Unit] = @@ -56,27 +54,29 @@ class LocationsInterpreter[S[_], X](using } yield {} } + private def findTokenByName( + st: LocationsState[S], + name: String, + token: Token[S] + ): Option[(Token[S], Token[S])] = + (st.stack.view.flatMap(_.tokens).collectFirst { + case (s, t) if s == name => t + } orElse st.tokens.find(_._1 == name).map(_._2)).map(token -> _.token) + override def pointLocation(name: String, token: Token[S]): State[X, Unit] = { modify { st => - val newLoc: Option[Token[S]] = st.stack.collectFirst { - case frame if frame.tokens.contains(name) => frame.tokens(name).token - } orElse st.tokens.get(name).map(_.token) - st.copy(locations = st.locations ++ newLoc.map(token -> _).toList) + val newLoc = findTokenByName(st, name, token) + st.copy(locations = st.locations ++ newLoc.toList) } } - def pointLocations(locations: List[(String, Token[S])]): State[X, Unit] = { + def pointLocations(locations: List[(String, Token[S])]): State[X, Unit] = modify { st => - val newLocs = locations.flatMap { case (name, token) => - (st.stack.collectFirst { - case frame if frame.tokens.contains(name) => frame.tokens(name).token - } orElse st.tokens.get(name).map(_.token)).map(token -> _) + findTokenByName(st, name, token) } - st.copy(locations = st.locations ++ newLocs) } - } private def modify(f: LocationsState[S] => LocationsState[S]): SX[Unit] = State.modify(lens.modify(f)) diff --git a/language-server/language-server-api/src/main/scala/aqua/lsp/LspContext.scala b/language-server/language-server-api/src/main/scala/aqua/lsp/LspContext.scala index 612ce8b45..1deb4d794 100644 --- a/language-server/language-server-api/src/main/scala/aqua/lsp/LspContext.scala +++ b/language-server/language-server-api/src/main/scala/aqua/lsp/LspContext.scala @@ -15,7 +15,7 @@ case class LspContext[S[_]]( abDefinitions: Map[String, NamedTypeToken[S]] = Map.empty[String, NamedTypeToken[S]], rootArrows: Map[String, ArrowType] = Map.empty[String, ArrowType], constants: Map[String, Type] = Map.empty[String, Type], - tokens: Map[String, TokenInfo[S]] = Map.empty[String, TokenInfo[S]], + tokens: List[(String, TokenInfo[S])] = Nil, locations: List[(Token[S], Token[S])] = Nil, importTokens: List[LiteralToken[S]] = Nil, errors: List[SemanticError[S]] = Nil, diff --git a/semantics/src/main/scala/aqua/semantics/expr/ServiceSem.scala b/semantics/src/main/scala/aqua/semantics/expr/ServiceSem.scala index 0d8162ff4..febb0d38d 100644 --- a/semantics/src/main/scala/aqua/semantics/expr/ServiceSem.scala +++ b/semantics/src/main/scala/aqua/semantics/expr/ServiceSem.scala @@ -45,7 +45,7 @@ class ServiceSem[S[_]](val expr: ServiceExpr[S]) extends AnyVal { ) ) serviceType <- EitherT.fromOptionF( - T.defineServiceType(expr.name, arrowsByName.toSortedMap.toMap), + T.defineServiceType(expr.name, arrowsByName.toSortedMap), Raw.error("Failed to define service type") ) arrowsDefs = arrows.map { case (name, _) => name.value -> name }.toNem diff --git a/semantics/src/main/scala/aqua/semantics/rules/abilities/AbilitiesInterpreter.scala b/semantics/src/main/scala/aqua/semantics/rules/abilities/AbilitiesInterpreter.scala index 7f3038700..a73adc335 100644 --- a/semantics/src/main/scala/aqua/semantics/rules/abilities/AbilitiesInterpreter.scala +++ b/semantics/src/main/scala/aqua/semantics/rules/abilities/AbilitiesInterpreter.scala @@ -56,11 +56,6 @@ class AbilitiesInterpreter[S[_], X](using case false => for { _ <- modify(_.defineService(name, defaultId)) - _ <- locations.addTokenWithFields( - name.value, - name, - arrowDefs.toNel.toList - ) } yield true } diff --git a/semantics/src/main/scala/aqua/semantics/rules/definitions/DefinitionsInterpreter.scala b/semantics/src/main/scala/aqua/semantics/rules/definitions/DefinitionsInterpreter.scala index 0103b5986..41a06156e 100644 --- a/semantics/src/main/scala/aqua/semantics/rules/definitions/DefinitionsInterpreter.scala +++ b/semantics/src/main/scala/aqua/semantics/rules/definitions/DefinitionsInterpreter.scala @@ -4,10 +4,9 @@ import aqua.parser.lexer.{Name, NamedTypeToken, Token} import aqua.semantics.rules.StackInterpreter import aqua.semantics.rules.report.ReportAlgebra import aqua.semantics.rules.abilities.AbilitiesState -import aqua.semantics.rules.locations.{LocationsAlgebra, LocationsState} +import aqua.semantics.rules.locations.{LocationsAlgebra, LocationsState, TokenInfo} import aqua.semantics.rules.types.TypesState import aqua.types.{ArrowType, Type} - import cats.data.{NonEmptyList, NonEmptyMap, State} import monocle.Lens import monocle.macros.GenLens @@ -59,12 +58,7 @@ class DefinitionsInterpreter[S[_], X](implicit token: NamedTypeToken[S] ): SX[Map[String, DefinitionsState.Def[S]]] = getState.map(_.definitions).flatMap { defs => - val names = defs.view.mapValues(_.name) - for { - _ <- locations - .addTokenWithFields(token.value, token, names.toList) - .whenA(defs.nonEmpty) _ <- modify(_.copy(definitions = Map.empty)) } yield defs } diff --git a/semantics/src/main/scala/aqua/semantics/rules/locations/DummyLocationsInterpreter.scala b/semantics/src/main/scala/aqua/semantics/rules/locations/DummyLocationsInterpreter.scala index 448d81190..a77700478 100644 --- a/semantics/src/main/scala/aqua/semantics/rules/locations/DummyLocationsInterpreter.scala +++ b/semantics/src/main/scala/aqua/semantics/rules/locations/DummyLocationsInterpreter.scala @@ -3,9 +3,10 @@ package aqua.semantics.rules.locations import aqua.parser.lexer.Token import aqua.semantics.rules.StackInterpreter import aqua.semantics.rules.types.TypesState + +import cats.data.{NonEmptyList, NonEmptyMap, State} import monocle.Lens import monocle.macros.GenLens -import cats.data.{NonEmptyList, NonEmptyMap, State} class DummyLocationsInterpreter[S[_], X] extends LocationsAlgebra[S, State[X, *]] { @@ -13,11 +14,12 @@ class DummyLocationsInterpreter[S[_], X] extends LocationsAlgebra[S, State[X, *] def addTokenWithFields( name: String, - token: Token[S], - fields: List[(String, Token[S])] + token: TokenInfo[S], + fields: List[(String, TokenInfo[S])] ): State[X, Unit] = State.pure(()) - def pointFieldLocation(typeName: String, fieldName: String, token: Token[S]): State[X, Unit] = State.pure(()) + def pointFieldLocation(typeName: String, fieldName: String, token: Token[S]): State[X, Unit] = + State.pure(()) def pointTokenWithFieldLocation( typeName: String, diff --git a/semantics/src/main/scala/aqua/semantics/rules/locations/LocationsAlgebra.scala b/semantics/src/main/scala/aqua/semantics/rules/locations/LocationsAlgebra.scala index 876c68905..f2a4d78c9 100644 --- a/semantics/src/main/scala/aqua/semantics/rules/locations/LocationsAlgebra.scala +++ b/semantics/src/main/scala/aqua/semantics/rules/locations/LocationsAlgebra.scala @@ -6,7 +6,7 @@ case class TokenInfo[S[_]](token: Token[S], `type`: Type) trait LocationsAlgebra[S[_], Alg[_]] { def addToken(name: String, tokenInfo: TokenInfo[S]): Alg[Unit] - def addTokenWithFields(name: String, token: Token[S], fields: List[(String, Token[S])]): Alg[Unit] + def addTokenWithFields(name: String, token: TokenInfo[S], fields: List[(String, TokenInfo[S])]): Alg[Unit] def pointTokenWithFieldLocation(typeName: String, typeToken: Token[S], fieldName: String, token: Token[S]): Alg[Unit] def pointFieldLocation(typeName: String, fieldName: String, token: Token[S]): Alg[Unit] diff --git a/semantics/src/main/scala/aqua/semantics/rules/locations/LocationsState.scala b/semantics/src/main/scala/aqua/semantics/rules/locations/LocationsState.scala index 4e2ca9f21..6d5cf08b2 100644 --- a/semantics/src/main/scala/aqua/semantics/rules/locations/LocationsState.scala +++ b/semantics/src/main/scala/aqua/semantics/rules/locations/LocationsState.scala @@ -5,7 +5,7 @@ import aqua.semantics.rules.types.TypesState import cats.kernel.Monoid case class LocationsState[S[_]]( - tokens: Map[String, TokenInfo[S]] = Map.empty[String, TokenInfo[S]], + tokens: List[(String, TokenInfo[S])] = Nil, locations: List[(Token[S], Token[S])] = Nil, stack: List[LocationsState[S]] = Nil ) { diff --git a/semantics/src/main/scala/aqua/semantics/rules/types/TypesInterpreter.scala b/semantics/src/main/scala/aqua/semantics/rules/types/TypesInterpreter.scala index a5be36aed..6c9598d02 100644 --- a/semantics/src/main/scala/aqua/semantics/rules/types/TypesInterpreter.scala +++ b/semantics/src/main/scala/aqua/semantics/rules/types/TypesInterpreter.scala @@ -97,7 +97,8 @@ class TypesInterpreter[S[_], X](using nonEmptyFields => val `type` = AbilityType(name.value, nonEmptyFields) - modify(_.defineType(name, `type`)).as(`type`.some) + locateNamedType(name, `type`, fields) >> modify(_.defineType(name, `type`)) + .as(`type`.some) ) } @@ -131,10 +132,21 @@ class TypesInterpreter[S[_], X](using ).semiflatMap(nonEmptyArrows => val `type` = ServiceType(name.value, nonEmptyArrows) - modify(_.defineType(name, `type`)).as(`type`) + locateNamedType(name, `type`, fields) >> modify(_.defineType(name, `type`)).as(`type`) ).value ) + private def locateNamedType( + name: NamedTypeToken[S], + t: NamedType, + fields: Map[String, (Name[S], Type)] + ) = + locations.addTokenWithFields( + name.value, + TokenInfo[S](name, t), + fields.view.mapValues(TokenInfo[S].apply).toList + ) + override def defineStructType( name: NamedTypeToken[S], fields: Map[String, (Name[S], Type)] @@ -159,7 +171,8 @@ class TypesInterpreter[S[_], X](using )(nonEmptyFields => val `type` = StructType(name.value, nonEmptyFields) - modify(_.defineType(name, `type`)).as(`type`.some) + locateNamedType(name, `type`, fields) >> modify(_.defineType(name, `type`)) + .as(`type`.some) ) ) ) From 63d79edc50e7d48b85b0c5e1309bdbdf438b60b6 Mon Sep 17 00:00:00 2001 From: DieMyst Date: Fri, 1 Dec 2023 15:31:44 +0700 Subject: [PATCH 19/30] small updates --- .../src/main/scala/aqua/lsp/LSPCompiler.scala | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/language-server/language-server-api/src/main/scala/aqua/lsp/LSPCompiler.scala b/language-server/language-server-api/src/main/scala/aqua/lsp/LSPCompiler.scala index f0eb0dc83..00a6659ea 100644 --- a/language-server/language-server-api/src/main/scala/aqua/lsp/LSPCompiler.scala +++ b/language-server/language-server-api/src/main/scala/aqua/lsp/LSPCompiler.scala @@ -6,15 +6,12 @@ import aqua.raw.RawContext import aqua.semantics.header.{HeaderHandler, HeaderSem} import cats.data.Validated.validNec -import cats.syntax.semigroup.* -import cats.syntax.applicative.* -import cats.syntax.flatMap.* +import cats.data.{Chain, Validated, ValidatedNec} +import cats.syntax.either.* import cats.syntax.functor.* import cats.syntax.monoid.* -import cats.syntax.traverse.* -import cats.syntax.either.* +import cats.syntax.semigroup.* import cats.{Comonad, Monad, Monoid, Order} -import cats.data.{Chain, NonEmptyChain, Validated, ValidatedNec} object LSPCompiler { From afb2a22e29e00ab66c646ebf4512b2f9eb51955e Mon Sep 17 00:00:00 2001 From: DieMyst Date: Fri, 1 Dec 2023 16:41:20 +0700 Subject: [PATCH 20/30] PR fixes --- build.sbt | 2 +- .../.js/src/main/scala/aqua/lsp/AquaLSP.scala | 5 ++--- .../.js/src/main/scala/aqua/lsp/OutputTypes.scala | 4 +--- .../.js/src/main/scala/aqua/lsp/ResultHelper.scala | 6 ++---- 4 files changed, 6 insertions(+), 11 deletions(-) diff --git a/build.sbt b/build.sbt index 964f26f0f..cae193337 100644 --- a/build.sbt +++ b/build.sbt @@ -80,7 +80,7 @@ lazy val `language-server-api` = crossProject(JSPlatform, JVMPlatform) lazy val `language-server-apiJS` = `language-server-api`.js .settings( scalaJSLinkerConfig ~= (_.withModuleKind(ModuleKind.CommonJSModule)), - scalaJSUseMainModuleInitializer := true + scalaJSUseMainModuleInitializer := false ) .settings(addBundleJS("../../language-server-npm/aqua-lsp-api.js")) .enablePlugins(ScalaJSPlugin) diff --git a/language-server/language-server-api/.js/src/main/scala/aqua/lsp/AquaLSP.scala b/language-server/language-server-api/.js/src/main/scala/aqua/lsp/AquaLSP.scala index 7c579470f..6acefc546 100644 --- a/language-server/language-server-api/.js/src/main/scala/aqua/lsp/AquaLSP.scala +++ b/language-server/language-server-api/.js/src/main/scala/aqua/lsp/AquaLSP.scala @@ -22,7 +22,7 @@ import scala.scalajs.js.annotation.* import scribe.Logging @JSExportTopLevel("AquaLSP") -object AquaLSP extends App with Logging { +object AquaLSP extends Logging { import ResultHelper.* @@ -55,7 +55,7 @@ object AquaLSP extends App with Logging { logger.debug("Compilation done.") - val result = fileRes match { + fileRes match { case Valid(lsp) => lspToCompilationResult(lsp) case Invalid(e) => @@ -63,7 +63,6 @@ object AquaLSP extends App with Logging { logger.debug("Errors: " + errors.mkString("\n")) CompilationResult(errors.toJSArray) } - result } proc.unsafeToFuture().toJSPromise diff --git a/language-server/language-server-api/.js/src/main/scala/aqua/lsp/OutputTypes.scala b/language-server/language-server-api/.js/src/main/scala/aqua/lsp/OutputTypes.scala index 6b37ccf70..967c144db 100644 --- a/language-server/language-server-api/.js/src/main/scala/aqua/lsp/OutputTypes.scala +++ b/language-server/language-server-api/.js/src/main/scala/aqua/lsp/OutputTypes.scala @@ -36,9 +36,7 @@ object TokenLocation { for { startLC <- start endLC <- end - } yield { - TokenLocation(span.name, startLC._1, startLC._2, endLC._1, endLC._2) - } + } yield TokenLocation(span.name, startLC._1, startLC._2, endLC._1, endLC._2) } } diff --git a/language-server/language-server-api/.js/src/main/scala/aqua/lsp/ResultHelper.scala b/language-server/language-server-api/.js/src/main/scala/aqua/lsp/ResultHelper.scala index 99eb4156d..959480546 100644 --- a/language-server/language-server-api/.js/src/main/scala/aqua/lsp/ResultHelper.scala +++ b/language-server/language-server-api/.js/src/main/scala/aqua/lsp/ResultHelper.scala @@ -81,18 +81,17 @@ object ResultHelper extends Logging { errors.toChain.toList.map(ErrorInfo.applyOp(0, 0, _, None)) } - private def tokensToJs(tokens: List[TokenInfo[FileSpan.F]]): js.Array[TokenInfoJs] = { + private def tokensToJs(tokens: List[TokenInfo[FileSpan.F]]): js.Array[TokenInfoJs] = tokens.flatMap { ti => TokenLocation.fromSpan(ti.token.unit._1).map { tl => val typeName = ti.`type`.show TokenInfoJs(tl, typeName) } }.toJSArray - } private def locationsToJs( locations: List[(Token[FileSpan.F], Token[FileSpan.F])] - ): js.Array[TokenLink] = { + ): js.Array[TokenLink] = locations.flatMap { case (from, to) => val fromOp = TokenLocation.fromSpan(from.unit._1) val toOp = TokenLocation.fromSpan(to.unit._1) @@ -107,7 +106,6 @@ object ResultHelper extends Logging { link.toList }.toJSArray - } private def importsToTokenImport(imports: List[LiteralToken[FileSpan.F]]): js.Array[TokenImport] = imports.flatMap { lt => From a99fb7fbbb190acad0e1bcaa4fb2d4b45b8c7d17 Mon Sep 17 00:00:00 2001 From: DieMyst Date: Fri, 1 Dec 2023 16:46:59 +0700 Subject: [PATCH 21/30] ignore integration tests in publishing snapshots --- .github/workflows/snapshot.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/snapshot.yml b/.github/workflows/snapshot.yml index c1653c37e..7148b4432 100644 --- a/.github/workflows/snapshot.yml +++ b/.github/workflows/snapshot.yml @@ -78,12 +78,12 @@ jobs: registry-url: "https://npm.fluence.dev" cache: "pnpm" - - run: pnpm -r i + - run: pnpm --filter='!integration-tests' -r i - name: Set package version run: node ci.cjs bump-version ${{ steps.version.outputs.id }} - - run: pnpm -r build + - run: pnpm --filter='!integration-tests' -r build - name: Publish snapshot id: snapshot From 402e30b9d19df198746371707259b7e2cbf932bd Mon Sep 17 00:00:00 2001 From: DieMyst Date: Mon, 4 Dec 2023 16:02:25 +0700 Subject: [PATCH 22/30] fix PR comments --- .../src/main/scala/aqua/lsp/OutputTypes.scala | 4 +- .../main/scala/aqua/lsp/ResultHelper.scala | 8 +-- .../src/main/scala/aqua/lsp/TypeShow.scala | 51 ------------------- .../scala/aqua/lsp/LocationsInterpreter.scala | 38 ++++---------- .../src/main/scala/aqua/lsp/LspContext.scala | 8 +-- .../src/main/scala/aqua/semantics/Prog.scala | 8 --- .../aqua/semantics/expr/func/ArrowSem.scala | 3 -- .../aqua/semantics/expr/func/CatchSem.scala | 2 - .../expr/func/ElseOtherwiseSem.scala | 1 - .../aqua/semantics/expr/func/IfSem.scala | 1 - .../aqua/semantics/expr/func/TrySem.scala | 1 - .../definitions/DefinitionsInterpreter.scala | 2 +- .../locations/DummyLocationsInterpreter.scala | 6 +-- .../rules/locations/LocationsAlgebra.scala | 10 ++-- .../rules/locations/LocationsState.scala | 27 ++++++---- .../rules/names/NamesInterpreter.scala | 10 ++-- .../rules/types/TypesInterpreter.scala | 8 +-- types/src/main/scala/aqua/types/Type.scala | 51 +++++++++++++++++-- 18 files changed, 101 insertions(+), 138 deletions(-) delete mode 100644 language-server/language-server-api/.js/src/main/scala/aqua/lsp/TypeShow.scala diff --git a/language-server/language-server-api/.js/src/main/scala/aqua/lsp/OutputTypes.scala b/language-server/language-server-api/.js/src/main/scala/aqua/lsp/OutputTypes.scala index 967c144db..63c6ed129 100644 --- a/language-server/language-server-api/.js/src/main/scala/aqua/lsp/OutputTypes.scala +++ b/language-server/language-server-api/.js/src/main/scala/aqua/lsp/OutputTypes.scala @@ -12,11 +12,11 @@ case class CompilationResult( warnings: js.Array[WarningInfo] = js.Array(), locations: js.Array[TokenLink] = js.Array(), importLocations: js.Array[TokenImport] = js.Array(), - tokens: js.Array[TokenInfoJs] = js.Array() + tokens: js.Array[ExprInfoJs] = js.Array() ) @JSExportAll -case class TokenInfoJs(location: TokenLocation, `type`: String) +case class ExprInfoJs(location: TokenLocation, `type`: String) @JSExportAll case class TokenLocation(name: String, startLine: Int, startCol: Int, endLine: Int, endCol: Int) diff --git a/language-server/language-server-api/.js/src/main/scala/aqua/lsp/ResultHelper.scala b/language-server/language-server-api/.js/src/main/scala/aqua/lsp/ResultHelper.scala index 959480546..a43ac8e71 100644 --- a/language-server/language-server-api/.js/src/main/scala/aqua/lsp/ResultHelper.scala +++ b/language-server/language-server-api/.js/src/main/scala/aqua/lsp/ResultHelper.scala @@ -9,7 +9,7 @@ import aqua.lsp.AquaLSP.logger import aqua.parser.lexer.{LiteralToken, Token} import aqua.parser.lift.{FileSpan, Span} import aqua.parser.{ArrowReturnError, BlockIndentError, LexerError, ParserError} -import aqua.semantics.rules.locations.TokenInfo +import aqua.semantics.rules.locations.ExprInfo import aqua.semantics.{HeaderError, RulesViolated, SemanticWarning, WrongAST} import cats.syntax.show.* @@ -19,7 +19,7 @@ import scribe.Logging object ResultHelper extends Logging { - import TypeShow.given + import aqua.types.Type.given def warningToInfo( warning: AquaWarning[FileSpan.F] @@ -81,11 +81,11 @@ object ResultHelper extends Logging { errors.toChain.toList.map(ErrorInfo.applyOp(0, 0, _, None)) } - private def tokensToJs(tokens: List[TokenInfo[FileSpan.F]]): js.Array[TokenInfoJs] = + private def tokensToJs(tokens: List[ExprInfo[FileSpan.F]]): js.Array[ExprInfoJs] = tokens.flatMap { ti => TokenLocation.fromSpan(ti.token.unit._1).map { tl => val typeName = ti.`type`.show - TokenInfoJs(tl, typeName) + ExprInfoJs(tl, typeName) } }.toJSArray diff --git a/language-server/language-server-api/.js/src/main/scala/aqua/lsp/TypeShow.scala b/language-server/language-server-api/.js/src/main/scala/aqua/lsp/TypeShow.scala deleted file mode 100644 index 2119a996e..000000000 --- a/language-server/language-server-api/.js/src/main/scala/aqua/lsp/TypeShow.scala +++ /dev/null @@ -1,51 +0,0 @@ -package aqua.lsp - -import aqua.types.* - -import cats.Show -import cats.syntax.show.* - -object TypeShow { - given Show[DataType] = { - case LiteralType.signed => - "i32" - case LiteralType.unsigned => - "u32" - case LiteralType.number => - "u32" - case LiteralType.float => - "f32" - case LiteralType.string => - "string" - case LiteralType.bool => - "bool" - case t => - t.toString - } - - given Show[Type] = { - case ArrayType(el) => - s"[]${el.show}" - case OptionType(el) => - s"?${el.show}" - case StreamType(el) => - s"*${el.show}" - case ArrowType(domain, codomain) => - val domainStr = domain match { - case _: LabeledConsType => - domain.toLabelledList().map { case (s, t) => s"$s: ${t.show}" }.mkString("(", ", ", ")") - case _ => domain.toList.mkString("(", ", ", ")") - } - val codomainStr = codomain.toList match { - case Nil => "" - case l => " -> " + l.mkString(", ") - } - domainStr + codomainStr - case nt: NamedType => - s"${nt.fullName}(${nt.fields.map(_.show).toNel.toList.map(kv => kv._1 + ": " + kv._2).mkString(", ")})" - case t: DataType => - t.show - case t => - t.toString - } -} diff --git a/language-server/language-server-api/src/main/scala/aqua/lsp/LocationsInterpreter.scala b/language-server/language-server-api/src/main/scala/aqua/lsp/LocationsInterpreter.scala index c522e25d2..33644fb4c 100644 --- a/language-server/language-server-api/src/main/scala/aqua/lsp/LocationsInterpreter.scala +++ b/language-server/language-server-api/src/main/scala/aqua/lsp/LocationsInterpreter.scala @@ -1,12 +1,10 @@ package aqua.lsp import aqua.parser.lexer.Token -import aqua.semantics.rules.StackInterpreter -import aqua.semantics.rules.locations.{LocationsAlgebra, LocationsState, TokenInfo} +import aqua.semantics.rules.locations.{ExprInfo, LocationsAlgebra, LocationsState} import cats.data.State import monocle.Lens -import monocle.macros.GenLens import scribe.Logging class LocationsInterpreter[S[_], X](using @@ -15,13 +13,7 @@ class LocationsInterpreter[S[_], X](using type SX[A] = State[X, A] - val stack = new StackInterpreter[S, X, LocationsState[S], LocationsState[S]]( - GenLens[LocationsState[S]](_.stack) - ) - - import stack.* - - override def addToken(name: String, tokenInfo: TokenInfo[S]): State[X, Unit] = modify { st => + override def addToken(name: String, tokenInfo: ExprInfo[S]): State[X, Unit] = modify { st => st.copy(tokens = (name, tokenInfo) +: st.tokens) } @@ -29,11 +21,13 @@ class LocationsInterpreter[S[_], X](using override def addTokenWithFields( name: String, - token: TokenInfo[S], - fields: List[(String, TokenInfo[S])] + token: ExprInfo[S], + fields: List[(String, ExprInfo[S])] ): State[X, Unit] = { val allTokens = - ((name, token) +: fields.map(kv => (combineFieldName(name, kv._1), kv._2))).toMap + ((name, token) +: fields.map { case (fieldName, info) => + combineFieldName(name, fieldName) -> info + }).toMap modify { st => st.copy(tokens = st.tokens ++ allTokens) } @@ -54,18 +48,9 @@ class LocationsInterpreter[S[_], X](using } yield {} } - private def findTokenByName( - st: LocationsState[S], - name: String, - token: Token[S] - ): Option[(Token[S], Token[S])] = - (st.stack.view.flatMap(_.tokens).collectFirst { - case (s, t) if s == name => t - } orElse st.tokens.find(_._1 == name).map(_._2)).map(token -> _.token) - override def pointLocation(name: String, token: Token[S]): State[X, Unit] = { modify { st => - val newLoc = findTokenByName(st, name, token) + val newLoc = st.findTokenByName(name, token) st.copy(locations = st.locations ++ newLoc.toList) } } @@ -73,16 +58,11 @@ class LocationsInterpreter[S[_], X](using def pointLocations(locations: List[(String, Token[S])]): State[X, Unit] = modify { st => val newLocs = locations.flatMap { case (name, token) => - findTokenByName(st, name, token) + st.findTokenByName(name, token) } st.copy(locations = st.locations ++ newLocs) } private def modify(f: LocationsState[S] => LocationsState[S]): SX[Unit] = State.modify(lens.modify(f)) - - override def beginScope(): SX[Unit] = - stack.beginScope(LocationsState[S]()) - - override def endScope(): SX[Unit] = stack.endScope } diff --git a/language-server/language-server-api/src/main/scala/aqua/lsp/LspContext.scala b/language-server/language-server-api/src/main/scala/aqua/lsp/LspContext.scala index 1deb4d794..2b10eafe2 100644 --- a/language-server/language-server-api/src/main/scala/aqua/lsp/LspContext.scala +++ b/language-server/language-server-api/src/main/scala/aqua/lsp/LspContext.scala @@ -2,10 +2,11 @@ package aqua.lsp import aqua.parser.lexer.{LiteralToken, NamedTypeToken, Token} import aqua.raw.{RawContext, RawPart} -import aqua.semantics.{SemanticError, SemanticWarning} import aqua.semantics.header.Picker -import aqua.semantics.rules.locations.TokenInfo +import aqua.semantics.rules.locations.ExprInfo +import aqua.semantics.{SemanticError, SemanticWarning} import aqua.types.{ArrowType, Type} + import cats.syntax.monoid.* import cats.{Monoid, Semigroup} @@ -15,7 +16,7 @@ case class LspContext[S[_]]( abDefinitions: Map[String, NamedTypeToken[S]] = Map.empty[String, NamedTypeToken[S]], rootArrows: Map[String, ArrowType] = Map.empty[String, ArrowType], constants: Map[String, Type] = Map.empty[String, Type], - tokens: List[(String, TokenInfo[S])] = Nil, + tokens: List[(String, ExprInfo[S])] = Nil, locations: List[(Token[S], Token[S])] = Nil, importTokens: List[LiteralToken[S]] = Nil, errors: List[SemanticError[S]] = Nil, @@ -34,6 +35,7 @@ object LspContext { rootArrows = x.rootArrows ++ y.rootArrows, constants = x.constants ++ y.constants, locations = x.locations ++ y.locations, + importTokens = x.importTokens ++ y.importTokens, tokens = x.tokens ++ y.tokens, errors = x.errors ++ y.errors, warnings = x.warnings ++ y.warnings diff --git a/semantics/src/main/scala/aqua/semantics/Prog.scala b/semantics/src/main/scala/aqua/semantics/Prog.scala index aadddaf44..813a000b1 100644 --- a/semantics/src/main/scala/aqua/semantics/Prog.scala +++ b/semantics/src/main/scala/aqua/semantics/Prog.scala @@ -38,14 +38,6 @@ sealed abstract class Prog[Alg[_]: Monad, A] extends (Alg[A] => Alg[A]) { (_: Unit, m: A) => N.endScope() as m ) ) - - def locationsScope[S[_]]()(implicit L: LocationsAlgebra[S, Alg]): Prog[Alg, A] = - wrap( - RunAround( - L.beginScope(), - (_: Unit, m: A) => L.endScope() as m - ) - ) } case class RunAfter[Alg[_]: Monad, A](prog: Alg[A]) extends Prog[Alg, A] { diff --git a/semantics/src/main/scala/aqua/semantics/expr/func/ArrowSem.scala b/semantics/src/main/scala/aqua/semantics/expr/func/ArrowSem.scala index ac8e77713..abcaa290e 100644 --- a/semantics/src/main/scala/aqua/semantics/expr/func/ArrowSem.scala +++ b/semantics/src/main/scala/aqua/semantics/expr/func/ArrowSem.scala @@ -137,7 +137,6 @@ class ArrowSem[S[_]](val expr: ArrowExpr[S]) extends AnyVal { T: TypesAlgebra[S, Alg], N: NamesAlgebra[S, Alg], A: AbilitiesAlgebra[S, Alg], - L: LocationsAlgebra[S, Alg], M: ManglerAlgebra[Alg] ): Prog[Alg, Raw] = Prog @@ -147,6 +146,4 @@ class ArrowSem[S[_]](val expr: ArrowExpr[S]) extends AnyVal { ) .abilitiesScope(expr.arrowTypeExpr) .namesScope(expr.arrowTypeExpr) - .locationsScope() - } diff --git a/semantics/src/main/scala/aqua/semantics/expr/func/CatchSem.scala b/semantics/src/main/scala/aqua/semantics/expr/func/CatchSem.scala index 7ac8b160f..9a815542d 100644 --- a/semantics/src/main/scala/aqua/semantics/expr/func/CatchSem.scala +++ b/semantics/src/main/scala/aqua/semantics/expr/func/CatchSem.scala @@ -42,6 +42,4 @@ class CatchSem[S[_]](val expr: CatchExpr[S]) extends AnyVal { ) .abilitiesScope[S](expr.token) .namesScope(expr.token) - .locationsScope() - } diff --git a/semantics/src/main/scala/aqua/semantics/expr/func/ElseOtherwiseSem.scala b/semantics/src/main/scala/aqua/semantics/expr/func/ElseOtherwiseSem.scala index 2dc27746f..df98dd711 100644 --- a/semantics/src/main/scala/aqua/semantics/expr/func/ElseOtherwiseSem.scala +++ b/semantics/src/main/scala/aqua/semantics/expr/func/ElseOtherwiseSem.scala @@ -39,5 +39,4 @@ class ElseOtherwiseSem[S[_]](val expr: ElseOtherwiseExpr[S]) extends AnyVal { ) .abilitiesScope(expr.token) .namesScope(expr.token) - .locationsScope() } diff --git a/semantics/src/main/scala/aqua/semantics/expr/func/IfSem.scala b/semantics/src/main/scala/aqua/semantics/expr/func/IfSem.scala index 43acdabe1..bd25334bf 100644 --- a/semantics/src/main/scala/aqua/semantics/expr/func/IfSem.scala +++ b/semantics/src/main/scala/aqua/semantics/expr/func/IfSem.scala @@ -56,5 +56,4 @@ class IfSem[S[_]](val expr: IfExpr[S]) extends AnyVal { ) .abilitiesScope[S](expr.token) .namesScope[S](expr.token) - .locationsScope() } diff --git a/semantics/src/main/scala/aqua/semantics/expr/func/TrySem.scala b/semantics/src/main/scala/aqua/semantics/expr/func/TrySem.scala index 1b6297f71..d5b286a3a 100644 --- a/semantics/src/main/scala/aqua/semantics/expr/func/TrySem.scala +++ b/semantics/src/main/scala/aqua/semantics/expr/func/TrySem.scala @@ -37,5 +37,4 @@ class TrySem[S[_]](val expr: TryExpr[S]) extends AnyVal { ) .abilitiesScope(expr.token) .namesScope(expr.token) - .locationsScope() } diff --git a/semantics/src/main/scala/aqua/semantics/rules/definitions/DefinitionsInterpreter.scala b/semantics/src/main/scala/aqua/semantics/rules/definitions/DefinitionsInterpreter.scala index 41a06156e..72b63f3f7 100644 --- a/semantics/src/main/scala/aqua/semantics/rules/definitions/DefinitionsInterpreter.scala +++ b/semantics/src/main/scala/aqua/semantics/rules/definitions/DefinitionsInterpreter.scala @@ -4,7 +4,7 @@ import aqua.parser.lexer.{Name, NamedTypeToken, Token} import aqua.semantics.rules.StackInterpreter import aqua.semantics.rules.report.ReportAlgebra import aqua.semantics.rules.abilities.AbilitiesState -import aqua.semantics.rules.locations.{LocationsAlgebra, LocationsState, TokenInfo} +import aqua.semantics.rules.locations.{LocationsAlgebra, LocationsState, ExprInfo} import aqua.semantics.rules.types.TypesState import aqua.types.{ArrowType, Type} import cats.data.{NonEmptyList, NonEmptyMap, State} diff --git a/semantics/src/main/scala/aqua/semantics/rules/locations/DummyLocationsInterpreter.scala b/semantics/src/main/scala/aqua/semantics/rules/locations/DummyLocationsInterpreter.scala index a77700478..d038e0b8d 100644 --- a/semantics/src/main/scala/aqua/semantics/rules/locations/DummyLocationsInterpreter.scala +++ b/semantics/src/main/scala/aqua/semantics/rules/locations/DummyLocationsInterpreter.scala @@ -10,12 +10,12 @@ import monocle.macros.GenLens class DummyLocationsInterpreter[S[_], X] extends LocationsAlgebra[S, State[X, *]] { - def addToken(name: String, tokenInfo: TokenInfo[S]): State[X, Unit] = State.pure(()) + def addToken(name: String, tokenInfo: ExprInfo[S]): State[X, Unit] = State.pure(()) def addTokenWithFields( name: String, - token: TokenInfo[S], - fields: List[(String, TokenInfo[S])] + token: ExprInfo[S], + fields: List[(String, ExprInfo[S])] ): State[X, Unit] = State.pure(()) def pointFieldLocation(typeName: String, fieldName: String, token: Token[S]): State[X, Unit] = diff --git a/semantics/src/main/scala/aqua/semantics/rules/locations/LocationsAlgebra.scala b/semantics/src/main/scala/aqua/semantics/rules/locations/LocationsAlgebra.scala index f2a4d78c9..247750729 100644 --- a/semantics/src/main/scala/aqua/semantics/rules/locations/LocationsAlgebra.scala +++ b/semantics/src/main/scala/aqua/semantics/rules/locations/LocationsAlgebra.scala @@ -2,18 +2,14 @@ package aqua.semantics.rules.locations import aqua.parser.lexer.Token import aqua.types.Type -case class TokenInfo[S[_]](token: Token[S], `type`: Type) +case class ExprInfo[S[_]](token: Token[S], `type`: Type) trait LocationsAlgebra[S[_], Alg[_]] { - def addToken(name: String, tokenInfo: TokenInfo[S]): Alg[Unit] - def addTokenWithFields(name: String, token: TokenInfo[S], fields: List[(String, TokenInfo[S])]): Alg[Unit] + def addToken(name: String, tokenInfo: ExprInfo[S]): Alg[Unit] + def addTokenWithFields(name: String, token: ExprInfo[S], fields: List[(String, ExprInfo[S])]): Alg[Unit] def pointTokenWithFieldLocation(typeName: String, typeToken: Token[S], fieldName: String, token: Token[S]): Alg[Unit] def pointFieldLocation(typeName: String, fieldName: String, token: Token[S]): Alg[Unit] def pointLocation(name: String, token: Token[S]): Alg[Unit] def pointLocations(locations: List[(String, Token[S])]): Alg[Unit] - - def beginScope(): Alg[Unit] - - def endScope(): Alg[Unit] } diff --git a/semantics/src/main/scala/aqua/semantics/rules/locations/LocationsState.scala b/semantics/src/main/scala/aqua/semantics/rules/locations/LocationsState.scala index 6d5cf08b2..c2085fbc5 100644 --- a/semantics/src/main/scala/aqua/semantics/rules/locations/LocationsState.scala +++ b/semantics/src/main/scala/aqua/semantics/rules/locations/LocationsState.scala @@ -2,25 +2,32 @@ package aqua.semantics.rules.locations import aqua.parser.lexer.Token import aqua.semantics.rules.types.TypesState + import cats.kernel.Monoid case class LocationsState[S[_]]( - tokens: List[(String, TokenInfo[S])] = Nil, - locations: List[(Token[S], Token[S])] = Nil, - stack: List[LocationsState[S]] = Nil + tokens: List[(String, ExprInfo[S])] = Nil, + locations: List[(Token[S], Token[S])] = Nil ) { lazy val allLocations: List[(Token[S], Token[S])] = locations + + def findTokenByName( + name: String, + token: Token[S] + ): Option[(Token[S], Token[S])] = + tokens.find(_._1 == name).map(_._2).map(token -> _.token) } object LocationsState { - implicit def locationsStateMonoid[S[_]]: Monoid[LocationsState[S]] = new Monoid[LocationsState[S]] { - override def empty: LocationsState[S] = LocationsState() + implicit def locationsStateMonoid[S[_]]: Monoid[LocationsState[S]] = + new Monoid[LocationsState[S]] { + override def empty: LocationsState[S] = LocationsState() - override def combine(x: LocationsState[S], y: LocationsState[S]): LocationsState[S] = - LocationsState( - tokens = x.tokens ++ y.tokens - ) - } + override def combine(x: LocationsState[S], y: LocationsState[S]): LocationsState[S] = + LocationsState( + tokens = x.tokens ++ y.tokens + ) + } } diff --git a/semantics/src/main/scala/aqua/semantics/rules/names/NamesInterpreter.scala b/semantics/src/main/scala/aqua/semantics/rules/names/NamesInterpreter.scala index dfc15355b..76ee5a78a 100644 --- a/semantics/src/main/scala/aqua/semantics/rules/names/NamesInterpreter.scala +++ b/semantics/src/main/scala/aqua/semantics/rules/names/NamesInterpreter.scala @@ -4,7 +4,7 @@ import aqua.errors.Errors.internalError import aqua.parser.lexer.{Name, Token} import aqua.semantics.Levenshtein import aqua.semantics.rules.StackInterpreter -import aqua.semantics.rules.locations.{LocationsAlgebra, TokenInfo} +import aqua.semantics.rules.locations.{LocationsAlgebra, ExprInfo} import aqua.semantics.rules.report.ReportAlgebra import aqua.types.{ArrowType, StreamType, Type} import cats.data.{OptionT, State} @@ -116,13 +116,13 @@ class NamesInterpreter[S[_], X](using case None => mapStackHeadM(report.error(name, "Cannot define a variable in the root scope").as(false))( fr => (fr.addName(name, `type`) -> true).pure - ) <* locations.addToken(name.value, TokenInfo(name, `type`)) + ) <* locations.addToken(name.value, ExprInfo(name, `type`)) } override def derive(name: Name[S], `type`: Type, derivedFrom: Set[String]): State[X, Boolean] = define(name, `type`).flatTap(defined => mapStackHead_(_.derived(name, derivedFrom)).whenA(defined) - ) <* locations.addToken(name.value, TokenInfo(name, `type`)) + ) <* locations.addToken(name.value, ExprInfo(name, `type`)) override def getDerivedFrom(fromNames: List[Set[String]]): State[X, List[Set[String]]] = mapStackHead(Nil)(frame => @@ -141,7 +141,7 @@ class NamesInterpreter[S[_], X](using constants = st.constants.updated(name.value, `type`) ) ).as(true) - }.flatTap(_ => locations.addToken(name.value, TokenInfo(name, `type`))) + }.flatTap(_ => locations.addToken(name.value, ExprInfo(name, `type`))) override def defineArrow(name: Name[S], arrowType: ArrowType, isRoot: Boolean): SX[Boolean] = readName(name.value).flatMap { @@ -166,7 +166,7 @@ class NamesInterpreter[S[_], X](using .error(name, "Cannot define a variable in the root scope") .as(false) )(fr => (fr.addArrow(name, arrowType) -> true).pure) - }.flatTap(_ => locations.addToken(name.value, TokenInfo[S](name, arrowType))) + }.flatTap(_ => locations.addToken(name.value, ExprInfo[S](name, arrowType))) override def streamsDefinedWithinScope(): SX[Map[String, StreamType]] = mapStackHead(Map.empty) { frame => diff --git a/semantics/src/main/scala/aqua/semantics/rules/types/TypesInterpreter.scala b/semantics/src/main/scala/aqua/semantics/rules/types/TypesInterpreter.scala index 6c9598d02..787cec488 100644 --- a/semantics/src/main/scala/aqua/semantics/rules/types/TypesInterpreter.scala +++ b/semantics/src/main/scala/aqua/semantics/rules/types/TypesInterpreter.scala @@ -3,7 +3,7 @@ package aqua.semantics.rules.types import aqua.parser.lexer.* import aqua.raw.value.* import aqua.semantics.rules.StackInterpreter -import aqua.semantics.rules.locations.{LocationsAlgebra, TokenInfo} +import aqua.semantics.rules.locations.{ExprInfo, LocationsAlgebra} import aqua.semantics.rules.report.ReportAlgebra import aqua.semantics.rules.types.TypeResolution.TypeResolutionError import aqua.types.* @@ -143,8 +143,8 @@ class TypesInterpreter[S[_], X](using ) = locations.addTokenWithFields( name.value, - TokenInfo[S](name, t), - fields.view.mapValues(TokenInfo[S].apply).toList + ExprInfo[S](name, t), + fields.view.mapValues(ExprInfo[S].apply).toList ) override def defineStructType( @@ -183,7 +183,7 @@ class TypesInterpreter[S[_], X](using case Some(_) => report.error(name, s"Type `${name.value}` was already defined").as(false) case None => modify(_.defineType(name, target)) - .productL(locations.addToken(name.value, TokenInfo(name.asName, target))) + .productL(locations.addToken(name.value, ExprInfo(name.asName, target))) .as(true) } diff --git a/types/src/main/scala/aqua/types/Type.scala b/types/src/main/scala/aqua/types/Type.scala index e5f6bf7eb..1ddd8058a 100644 --- a/types/src/main/scala/aqua/types/Type.scala +++ b/types/src/main/scala/aqua/types/Type.scala @@ -4,15 +4,15 @@ import aqua.errors.Errors.internalError import aqua.types.* import aqua.types.Type.* -import cats.data.NonEmptyList import cats.data.NonEmptyMap import cats.syntax.applicative.* import cats.syntax.foldable.* import cats.syntax.functor.* import cats.syntax.option.* import cats.syntax.partialOrder.* +import cats.syntax.show.* import cats.syntax.traverse.* -import cats.{Eval, Foldable, Functor, PartialOrder, Traverse} +import cats.{Eval, Foldable, Functor, PartialOrder, Show, Traverse} import scala.collection.immutable.SortedMap sealed trait Type { @@ -282,7 +282,8 @@ object CollectionType { .map[Type] { case StreamType(el) => ArrayType(el) case dt: DataType => dt - }.reduceLeftOption(_ `∩` _) + } + .reduceLeftOption(_ `∩` _) .map { // In case we mix values of uncomparable types, intersection returns bottom, meaning "uninhabited type". // But we want to get to TopType instead: this would mean that intersection is empty, and you cannot @@ -516,4 +517,48 @@ object Type { given PartialOrder[Type] = CompareTypes.partialOrder + + given Show[DataType] = { + case LiteralType.signed => + "i32" + case LiteralType.unsigned => + "u32" + case LiteralType.number => + "u32" + case LiteralType.float => + "f32" + case LiteralType.string => + "string" + case LiteralType.bool => + "bool" + case t => + t.toString + } + + // pretty print for Type + given Show[Type] = { + case ArrayType(el) => + s"[]${el.show}" + case OptionType(el) => + s"?${el.show}" + case StreamType(el) => + s"*${el.show}" + case ArrowType(domain, codomain) => + val domainStr = domain match { + case _: LabeledConsType => + domain.toLabelledList().map { case (s, t) => s"$s: ${t.show}" }.mkString("(", ", ", ")") + case _ => domain.toList.mkString("(", ", ", ")") + } + val codomainStr = codomain.toList match { + case Nil => "" + case l => " -> " + l.mkString(", ") + } + domainStr + codomainStr + case nt: NamedType => + s"${nt.fullName}(${nt.fields.map(_.show).toNel.toList.map(kv => kv._1 + ": " + kv._2).mkString(", ")})" + case t: DataType => + t.show + case t => + t.toString + } } From 995e4d19b24fc79c868b7d41953030d3699cf297 Mon Sep 17 00:00:00 2001 From: DieMyst Date: Mon, 4 Dec 2023 19:03:25 +0700 Subject: [PATCH 23/30] delete duplications, add tests --- .../scala/aqua/lsp/LocationsInterpreter.scala | 8 +- .../src/test/scala/aqua/lsp/AquaLSPSpec.scala | 200 ++++++++++++++++++ .../main/scala/aqua/parser/lexer/Token.scala | 2 - .../rules/locations/LocationsState.scala | 7 + 4 files changed, 211 insertions(+), 6 deletions(-) create mode 100644 language-server/language-server-api/src/test/scala/aqua/lsp/AquaLSPSpec.scala diff --git a/language-server/language-server-api/src/main/scala/aqua/lsp/LocationsInterpreter.scala b/language-server/language-server-api/src/main/scala/aqua/lsp/LocationsInterpreter.scala index 33644fb4c..bba5feaca 100644 --- a/language-server/language-server-api/src/main/scala/aqua/lsp/LocationsInterpreter.scala +++ b/language-server/language-server-api/src/main/scala/aqua/lsp/LocationsInterpreter.scala @@ -14,7 +14,7 @@ class LocationsInterpreter[S[_], X](using type SX[A] = State[X, A] override def addToken(name: String, tokenInfo: ExprInfo[S]): State[X, Unit] = modify { st => - st.copy(tokens = (name, tokenInfo) +: st.tokens) + st.addToken(name, tokenInfo) } private def combineFieldName(name: String, field: String): String = name + "." + field @@ -25,11 +25,11 @@ class LocationsInterpreter[S[_], X](using fields: List[(String, ExprInfo[S])] ): State[X, Unit] = { val allTokens = - ((name, token) +: fields.map { case (fieldName, info) => + (name, token) +: fields.map { case (fieldName, info) => combineFieldName(name, fieldName) -> info - }).toMap + } modify { st => - st.copy(tokens = st.tokens ++ allTokens) + st.addTokens(allTokens) } } diff --git a/language-server/language-server-api/src/test/scala/aqua/lsp/AquaLSPSpec.scala b/language-server/language-server-api/src/test/scala/aqua/lsp/AquaLSPSpec.scala new file mode 100644 index 000000000..543801cea --- /dev/null +++ b/language-server/language-server-api/src/test/scala/aqua/lsp/AquaLSPSpec.scala @@ -0,0 +1,200 @@ +package aqua.lsp + +import aqua.compiler.{AquaCompilerConf, AquaError, AquaSources} +import aqua.parser.Parser +import aqua.parser.lift.Span +import aqua.parser.lift.Span.S +import aqua.raw.ConstantRaw +import aqua.types.* + +import cats.Id +import cats.data.* +import cats.instances.string.* +import org.scalatest.Inside +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers + +class AquaLSPSpec extends AnyFlatSpec with Matchers with Inside { + + extension (c: LspContext[Span.S]) { + + def checkLocations( + defStart: Int, + defEnd: Int, + useStart: Int, + useEnd: Int + ): Boolean = + c.locations.exists { case (useT, defT) => + val defSpan = defT.unit._1 + val useSpan = useT.unit._1 + defSpan.startIndex == defStart && defSpan.endIndex == defEnd && useSpan.startIndex == useStart && useSpan.endIndex == useEnd + } + + def checkTokenLoc( + checkName: String, + start: Int, + end: Int, + `type`: Type + ): Boolean = { + val res = c.tokens.exists { case (name, expr) => + val span = expr.token.unit._1 + name == checkName && span.startIndex == start && span.endIndex == end && expr.`type` == `type` + } + + /*println(tokens.filter(v => v._1 == checkName && v._2.`type` == `type`).map { + case (name, expr) => + val span = expr.token.unit._1 + println(s"$name(${span.startIndex}:${span.endIndex}) ${expr.`type`}") + })*/ + + res + } + } + + private def aquaSource(src: Map[String, String], imports: Map[String, String]) = { + new AquaSources[Id, String, String] { + + override def sources: Id[ValidatedNec[String, Chain[(String, String)]]] = + Validated.validNec(Chain.fromSeq(src.toSeq)) + + override def resolveImport(from: String, imp: String): Id[ValidatedNec[String, String]] = + Validated.validNec(imp) + + override def load(file: String): Id[ValidatedNec[String, String]] = + Validated.fromEither( + (imports ++ src) + .get(file) + .toRight(NonEmptyChain.one(s"Cannot load imported file $file")) + ) + } + } + + def compile( + src: Map[String, String], + imports: Map[String, String] = Map.empty + ): ValidatedNec[AquaError[String, String, S], Map[String, LspContext[S]]] = { + LSPCompiler + .compileToLsp[Id, String, String, Span.S]( + aquaSource(src, imports), + id => txt => Parser.parse(Parser.parserSchema)(txt), + AquaCompilerConf(ConstantRaw.defaultConstants(None)) + ) + } + + it should "return right tokens" in { + val src = Map( + "index.aqua" -> + """module Import + |import foo, str, num from "export2.aqua" + | + |import "../gen/OneMore.aqua" + | + |func foo_wrapper() -> string: + | z <- foo() + | if 1 == 1: + | a = "aaa" + | str(a) + | else: + | a = 123 + | num(a) + | OneMore z + | OneMore.more_call() + |""".stripMargin + ) + + val imports = Map( + "export2.aqua" -> + """module Export declares str, num, foo + | + |func str(a: string) -> string: + | <- a + | + |func num(a: u32) -> u32: + | <- a + | + |func foo() -> string: + | <- "I am MyFooBar foo" + | + |""".stripMargin, + "../gen/OneMore.aqua" -> + """ + |service OneMore: + | more_call() + | consume(s: string) + |""".stripMargin + ) + + val res = compile(src, imports).toOption.get.values.head + + val serviceType = ServiceType( + "OneMore", + NonEmptyMap.of( + ("more_call", ArrowType(NilType, NilType)), + ("consume", ArrowType(ProductType.labelled(("s", ScalarType.string) :: Nil), NilType)) + ) + ) + + println(res.locations.map { case (l, r) => + val lSpan = l.unit._1 + val rSpan = r.unit._1 + s"($l($lSpan):$r($rSpan))" + }) + + // inside `foo_wrapper` func + res.checkTokenLoc("z", 120, 121, ScalarType.string) shouldBe true + res.checkLocations(120, 121, 224, 225) shouldBe true + + res.checkTokenLoc("a", 152, 153, LiteralType.string) shouldBe true + res.checkLocations(152, 153, 172, 173) shouldBe true + res.checkTokenLoc("a", 191, 192, LiteralType.unsigned) shouldBe true + res.checkLocations(191, 192, 209, 210) shouldBe true + + // num usage + res.checkLocations(84, 87, 205, 208) shouldBe true + // str usage + res.checkLocations(43, 46, 168, 171) shouldBe true + + // this is tokens from imports, if we will use `FileSpan.F` file names will be different + // OneMore service + res.checkTokenLoc("OneMore", 9, 16, serviceType) shouldBe true + res.checkTokenLoc("OneMore.more_call", 20, 29, ArrowType(NilType, NilType)) shouldBe true + res.checkTokenLoc( + "OneMore.consume", + 34, + 41, + ArrowType(ProductType.labelled(("s", ScalarType.string) :: Nil), NilType) + ) shouldBe true + + // str function and argument + res.checkTokenLoc( + "str", + 43, + 46, + ArrowType( + ProductType.labelled(("a", ScalarType.string) :: Nil), + ProductType(ScalarType.string :: Nil) + ) + ) shouldBe true + res.checkTokenLoc("a", 47, 48, ScalarType.string) shouldBe true + + // num function and argument + res.checkTokenLoc( + "num", + 84, + 87, + ArrowType( + ProductType.labelled(("a", ScalarType.u32) :: Nil), + ProductType(ScalarType.u32 :: Nil) + ) + ) shouldBe true + res.checkTokenLoc("a", 88, 89, ScalarType.u32) shouldBe true + + // foo function + res.checkTokenLoc( + "foo", + 119, + 122, + ArrowType(NilType, ProductType(ScalarType.string :: Nil)) + ) shouldBe true + } +} diff --git a/parser/src/main/scala/aqua/parser/lexer/Token.scala b/parser/src/main/scala/aqua/parser/lexer/Token.scala index 82f1f841e..318e35bfe 100644 --- a/parser/src/main/scala/aqua/parser/lexer/Token.scala +++ b/parser/src/main/scala/aqua/parser/lexer/Token.scala @@ -1,7 +1,5 @@ package aqua.parser.lexer -import aqua.parser.lift.Span.S - import cats.data.NonEmptyList import cats.parse.{Accumulator0, Parser as P, Parser0 as P0} import cats.syntax.functor.* diff --git a/semantics/src/main/scala/aqua/semantics/rules/locations/LocationsState.scala b/semantics/src/main/scala/aqua/semantics/rules/locations/LocationsState.scala index c2085fbc5..0d8154617 100644 --- a/semantics/src/main/scala/aqua/semantics/rules/locations/LocationsState.scala +++ b/semantics/src/main/scala/aqua/semantics/rules/locations/LocationsState.scala @@ -12,6 +12,13 @@ case class LocationsState[S[_]]( lazy val allLocations: List[(Token[S], Token[S])] = locations + // TODO: optimize distinction + def addTokens(newTokens: List[(String, ExprInfo[S])]): LocationsState[S] = + copy(tokens = (newTokens ++ tokens).distinct) + + def addToken(newToken: (String, ExprInfo[S])): LocationsState[S] = + copy(tokens = (newToken +: tokens).distinct) + def findTokenByName( name: String, token: Token[S] From ce241a88a5853c038e378cc5d73e7e18ea935ae1 Mon Sep 17 00:00:00 2001 From: DieMyst Date: Wed, 6 Dec 2023 15:07:18 +0700 Subject: [PATCH 24/30] all to list --- .../main/scala/aqua/lsp/ResultHelper.scala | 8 +-- .../scala/aqua/lsp/LocationsInterpreter.scala | 27 ++++------ .../src/main/scala/aqua/lsp/LspContext.scala | 31 ++++++----- .../main/scala/aqua/lsp/LspSemantics.scala | 5 +- .../src/test/scala/aqua/lsp/AquaLSPSpec.scala | 11 ++-- .../definitions/DefinitionsInterpreter.scala | 12 ++--- .../locations/DummyLocationsInterpreter.scala | 9 ++-- .../rules/locations/LocationsAlgebra.scala | 9 ++-- .../rules/locations/LocationsState.scala | 51 ++++++++++++++----- .../rules/names/NamesInterpreter.scala | 10 ++-- .../rules/types/TypesInterpreter.scala | 11 ++-- 11 files changed, 103 insertions(+), 81 deletions(-) diff --git a/language-server/language-server-api/.js/src/main/scala/aqua/lsp/ResultHelper.scala b/language-server/language-server-api/.js/src/main/scala/aqua/lsp/ResultHelper.scala index a43ac8e71..50da17ada 100644 --- a/language-server/language-server-api/.js/src/main/scala/aqua/lsp/ResultHelper.scala +++ b/language-server/language-server-api/.js/src/main/scala/aqua/lsp/ResultHelper.scala @@ -9,7 +9,7 @@ import aqua.lsp.AquaLSP.logger import aqua.parser.lexer.{LiteralToken, Token} import aqua.parser.lift.{FileSpan, Span} import aqua.parser.{ArrowReturnError, BlockIndentError, LexerError, ParserError} -import aqua.semantics.rules.locations.ExprInfo +import aqua.semantics.rules.locations.DefinitionInfo import aqua.semantics.{HeaderError, RulesViolated, SemanticWarning, WrongAST} import cats.syntax.show.* @@ -81,7 +81,7 @@ object ResultHelper extends Logging { errors.toChain.toList.map(ErrorInfo.applyOp(0, 0, _, None)) } - private def tokensToJs(tokens: List[ExprInfo[FileSpan.F]]): js.Array[ExprInfoJs] = + private def tokensToJs(tokens: List[DefinitionInfo[FileSpan.F]]): js.Array[ExprInfoJs] = tokens.flatMap { ti => TokenLocation.fromSpan(ti.token.unit._1).map { tl => val typeName = ti.`type`.show @@ -126,9 +126,9 @@ object ResultHelper extends Logging { CompilationResult( errors.toJSArray, warnings.toJSArray, - locationsToJs(lsp.locations), + locationsToJs(lsp.variables.flatMap(v => v.allLocations)), importsToTokenImport(lsp.importTokens), - tokensToJs(lsp.tokens.map(_._2)) + tokensToJs(lsp.variables.map(_.definition)) ) } } diff --git a/language-server/language-server-api/src/main/scala/aqua/lsp/LocationsInterpreter.scala b/language-server/language-server-api/src/main/scala/aqua/lsp/LocationsInterpreter.scala index bba5feaca..5964f671b 100644 --- a/language-server/language-server-api/src/main/scala/aqua/lsp/LocationsInterpreter.scala +++ b/language-server/language-server-api/src/main/scala/aqua/lsp/LocationsInterpreter.scala @@ -1,7 +1,7 @@ package aqua.lsp import aqua.parser.lexer.Token -import aqua.semantics.rules.locations.{ExprInfo, LocationsAlgebra, LocationsState} +import aqua.semantics.rules.locations.{DefinitionInfo, LocationsAlgebra, LocationsState} import cats.data.State import monocle.Lens @@ -13,23 +13,22 @@ class LocationsInterpreter[S[_], X](using type SX[A] = State[X, A] - override def addToken(name: String, tokenInfo: ExprInfo[S]): State[X, Unit] = modify { st => - st.addToken(name, tokenInfo) + override def addDefinition(definition: DefinitionInfo[S]): State[X, Unit] = modify { st => + st.addDefinition(definition) } private def combineFieldName(name: String, field: String): String = name + "." + field - override def addTokenWithFields( - name: String, - token: ExprInfo[S], - fields: List[(String, ExprInfo[S])] + override def addDefinitionWithFields( + definition: DefinitionInfo[S], + fields: List[DefinitionInfo[S]] ): State[X, Unit] = { val allTokens = - (name, token) +: fields.map { case (fieldName, info) => - combineFieldName(name, fieldName) -> info + definition +: fields.map { fieldDef => + fieldDef.copy(name = combineFieldName(definition.name, fieldDef.name)) } modify { st => - st.addTokens(allTokens) + st.addDefinitions(allTokens) } } @@ -50,17 +49,13 @@ class LocationsInterpreter[S[_], X](using override def pointLocation(name: String, token: Token[S]): State[X, Unit] = { modify { st => - val newLoc = st.findTokenByName(name, token) - st.copy(locations = st.locations ++ newLoc.toList) + st.addLocation(name, token) } } def pointLocations(locations: List[(String, Token[S])]): State[X, Unit] = modify { st => - val newLocs = locations.flatMap { case (name, token) => - st.findTokenByName(name, token) - } - st.copy(locations = st.locations ++ newLocs) + st.addLocations(locations) } private def modify(f: LocationsState[S] => LocationsState[S]): SX[Unit] = diff --git a/language-server/language-server-api/src/main/scala/aqua/lsp/LspContext.scala b/language-server/language-server-api/src/main/scala/aqua/lsp/LspContext.scala index 2b10eafe2..99d71d4b7 100644 --- a/language-server/language-server-api/src/main/scala/aqua/lsp/LspContext.scala +++ b/language-server/language-server-api/src/main/scala/aqua/lsp/LspContext.scala @@ -3,7 +3,7 @@ package aqua.lsp import aqua.parser.lexer.{LiteralToken, NamedTypeToken, Token} import aqua.raw.{RawContext, RawPart} import aqua.semantics.header.Picker -import aqua.semantics.rules.locations.ExprInfo +import aqua.semantics.rules.locations.VariableInfo import aqua.semantics.{SemanticError, SemanticWarning} import aqua.types.{ArrowType, Type} @@ -16,12 +16,13 @@ case class LspContext[S[_]]( abDefinitions: Map[String, NamedTypeToken[S]] = Map.empty[String, NamedTypeToken[S]], rootArrows: Map[String, ArrowType] = Map.empty[String, ArrowType], constants: Map[String, Type] = Map.empty[String, Type], - tokens: List[(String, ExprInfo[S])] = Nil, - locations: List[(Token[S], Token[S])] = Nil, + variables: List[VariableInfo[S]] = Nil, importTokens: List[LiteralToken[S]] = Nil, errors: List[SemanticError[S]] = Nil, warnings: List[SemanticWarning[S]] = Nil -) +) { + lazy val allLocations: List[(Token[S], Token[S])] = variables.flatMap(_.allLocations) +} object LspContext { @@ -34,9 +35,8 @@ object LspContext { abDefinitions = x.abDefinitions ++ y.abDefinitions, rootArrows = x.rootArrows ++ y.rootArrows, constants = x.constants ++ y.constants, - locations = x.locations ++ y.locations, importTokens = x.importTokens ++ y.importTokens, - tokens = x.tokens ++ y.tokens, + variables = x.variables ++ y.variables, errors = x.errors ++ y.errors, warnings = x.warnings ++ y.warnings ) @@ -92,7 +92,9 @@ object LspContext { val prefix = name + "." ctx.copy( raw = ctx.raw.setAbility(name, ctxAb.raw), - tokens = ctx.tokens ++ ctxAb.tokens.map(kv => (prefix + kv._1) -> kv._2) + variables = ctx.variables ++ ctxAb.variables.map(v => + v.copy(definition = v.definition.copy(name = prefix + v.definition.name)) + ) ) override def setModule( @@ -115,13 +117,16 @@ object LspContext { declared: Boolean ): Option[LspContext[S]] = // rename tokens from one context with prefix addition - val newTokens = rename.map { renameStr => - ctx.tokens.map { - case (tokenName, token) if tokenName.startsWith(name) => - tokenName.replaceFirst(name, renameStr) -> token + val newVariables = rename.map { renameStr => + ctx.variables.map { + case v if v.definition.name.startsWith(name) => + v.copy(definition = + v.definition.copy(name = v.definition.name.replaceFirst(v.definition.name, renameStr)) + ) + case kv => kv } - }.getOrElse(ctx.tokens) + }.getOrElse(ctx.variables) ctx.raw .pick(name, rename, declared) @@ -134,7 +139,7 @@ object LspContext { ctx.rootArrows.get(name).fold(Map.empty)(t => Map(rename.getOrElse(name) -> t)), constants = ctx.constants.get(name).fold(Map.empty)(t => Map(rename.getOrElse(name) -> t)), - tokens = newTokens + variables = newVariables ) ) diff --git a/language-server/language-server-api/src/main/scala/aqua/lsp/LspSemantics.scala b/language-server/language-server-api/src/main/scala/aqua/lsp/LspSemantics.scala index b412e7989..b83700235 100644 --- a/language-server/language-server-api/src/main/scala/aqua/lsp/LspSemantics.scala +++ b/language-server/language-server-api/src/main/scala/aqua/lsp/LspSemantics.scala @@ -49,7 +49,7 @@ class LspSemantics[S[_]] extends Semantics[S, LspContext[S]] { definitions = rawState.abilities.definitions ++ init.abDefinitions ), locations = rawState.locations.copy( - tokens = rawState.locations.tokens ++ init.tokens + variables = rawState.locations.variables ++ init.variables ) ) @@ -69,9 +69,8 @@ class LspSemantics[S[_]] extends Semantics[S, LspContext[S]] { rootArrows = state.names.rootArrows, constants = state.names.constants, abDefinitions = state.abilities.definitions, - locations = state.locations.allLocations, importTokens = importTokens, - tokens = state.locations.tokens, + variables = state.locations.variables, errors = state.errors.toList, warnings = state.warnings.toList ).pure[Result] diff --git a/language-server/language-server-api/src/test/scala/aqua/lsp/AquaLSPSpec.scala b/language-server/language-server-api/src/test/scala/aqua/lsp/AquaLSPSpec.scala index 543801cea..166fa0d82 100644 --- a/language-server/language-server-api/src/test/scala/aqua/lsp/AquaLSPSpec.scala +++ b/language-server/language-server-api/src/test/scala/aqua/lsp/AquaLSPSpec.scala @@ -5,6 +5,7 @@ import aqua.parser.Parser import aqua.parser.lift.Span import aqua.parser.lift.Span.S import aqua.raw.ConstantRaw +import aqua.semantics.rules.locations.VariableInfo import aqua.types.* import cats.Id @@ -24,7 +25,7 @@ class AquaLSPSpec extends AnyFlatSpec with Matchers with Inside { useStart: Int, useEnd: Int ): Boolean = - c.locations.exists { case (useT, defT) => + c.allLocations.exists { case (useT, defT) => val defSpan = defT.unit._1 val useSpan = useT.unit._1 defSpan.startIndex == defStart && defSpan.endIndex == defEnd && useSpan.startIndex == useStart && useSpan.endIndex == useEnd @@ -36,9 +37,9 @@ class AquaLSPSpec extends AnyFlatSpec with Matchers with Inside { end: Int, `type`: Type ): Boolean = { - val res = c.tokens.exists { case (name, expr) => - val span = expr.token.unit._1 - name == checkName && span.startIndex == start && span.endIndex == end && expr.`type` == `type` + val res = c.variables.exists { case VariableInfo(definition, _) => + val span = definition.token.unit._1 + definition.name == checkName && span.startIndex == start && span.endIndex == end && definition.`type` == `type` } /*println(tokens.filter(v => v._1 == checkName && v._2.`type` == `type`).map { @@ -134,7 +135,7 @@ class AquaLSPSpec extends AnyFlatSpec with Matchers with Inside { ) ) - println(res.locations.map { case (l, r) => + println(res.allLocations.map { case (l, r) => val lSpan = l.unit._1 val rSpan = r.unit._1 s"($l($lSpan):$r($rSpan))" diff --git a/semantics/src/main/scala/aqua/semantics/rules/definitions/DefinitionsInterpreter.scala b/semantics/src/main/scala/aqua/semantics/rules/definitions/DefinitionsInterpreter.scala index 72b63f3f7..8abf21730 100644 --- a/semantics/src/main/scala/aqua/semantics/rules/definitions/DefinitionsInterpreter.scala +++ b/semantics/src/main/scala/aqua/semantics/rules/definitions/DefinitionsInterpreter.scala @@ -2,26 +2,24 @@ package aqua.semantics.rules.definitions import aqua.parser.lexer.{Name, NamedTypeToken, Token} import aqua.semantics.rules.StackInterpreter -import aqua.semantics.rules.report.ReportAlgebra import aqua.semantics.rules.abilities.AbilitiesState -import aqua.semantics.rules.locations.{LocationsAlgebra, LocationsState, ExprInfo} +import aqua.semantics.rules.report.ReportAlgebra import aqua.semantics.rules.types.TypesState import aqua.types.{ArrowType, Type} + import cats.data.{NonEmptyList, NonEmptyMap, State} -import monocle.Lens -import monocle.macros.GenLens import cats.syntax.applicative.* import cats.syntax.apply.* import cats.syntax.flatMap.* import cats.syntax.functor.* import cats.syntax.option.* - +import monocle.Lens +import monocle.macros.GenLens import scala.collection.immutable.SortedMap class DefinitionsInterpreter[S[_], X](implicit lens: Lens[X, DefinitionsState[S]], - report: ReportAlgebra[S, State[X, *]], - locations: LocationsAlgebra[S, State[X, *]] + report: ReportAlgebra[S, State[X, *]] ) extends DefinitionsAlgebra[S, State[X, *]] { type SX[A] = State[X, A] diff --git a/semantics/src/main/scala/aqua/semantics/rules/locations/DummyLocationsInterpreter.scala b/semantics/src/main/scala/aqua/semantics/rules/locations/DummyLocationsInterpreter.scala index d038e0b8d..fee4c1af1 100644 --- a/semantics/src/main/scala/aqua/semantics/rules/locations/DummyLocationsInterpreter.scala +++ b/semantics/src/main/scala/aqua/semantics/rules/locations/DummyLocationsInterpreter.scala @@ -10,12 +10,11 @@ import monocle.macros.GenLens class DummyLocationsInterpreter[S[_], X] extends LocationsAlgebra[S, State[X, *]] { - def addToken(name: String, tokenInfo: ExprInfo[S]): State[X, Unit] = State.pure(()) + def addDefinition(definition: DefinitionInfo[S]): State[X, Unit] = State.pure(()) - def addTokenWithFields( - name: String, - token: ExprInfo[S], - fields: List[(String, ExprInfo[S])] + def addDefinitionWithFields( + definition: DefinitionInfo[S], + fields: List[DefinitionInfo[S]] ): State[X, Unit] = State.pure(()) def pointFieldLocation(typeName: String, fieldName: String, token: Token[S]): State[X, Unit] = diff --git a/semantics/src/main/scala/aqua/semantics/rules/locations/LocationsAlgebra.scala b/semantics/src/main/scala/aqua/semantics/rules/locations/LocationsAlgebra.scala index 247750729..d52b4c2ee 100644 --- a/semantics/src/main/scala/aqua/semantics/rules/locations/LocationsAlgebra.scala +++ b/semantics/src/main/scala/aqua/semantics/rules/locations/LocationsAlgebra.scala @@ -2,11 +2,14 @@ package aqua.semantics.rules.locations import aqua.parser.lexer.Token import aqua.types.Type -case class ExprInfo[S[_]](token: Token[S], `type`: Type) +case class DefinitionInfo[S[_]](name: String, token: Token[S], `type`: Type) +case class VariableInfo[S[_]](definition: DefinitionInfo[S], occurrences: List[Token[S]] = Nil) { + def allLocations: List[(Token[S], Token[S])] = occurrences.map(_ -> definition.token) +} trait LocationsAlgebra[S[_], Alg[_]] { - def addToken(name: String, tokenInfo: ExprInfo[S]): Alg[Unit] - def addTokenWithFields(name: String, token: ExprInfo[S], fields: List[(String, ExprInfo[S])]): Alg[Unit] + def addDefinition(definition: DefinitionInfo[S]): Alg[Unit] + def addDefinitionWithFields(definition: DefinitionInfo[S], fields: List[DefinitionInfo[S]]): Alg[Unit] def pointTokenWithFieldLocation(typeName: String, typeToken: Token[S], fieldName: String, token: Token[S]): Alg[Unit] def pointFieldLocation(typeName: String, fieldName: String, token: Token[S]): Alg[Unit] diff --git a/semantics/src/main/scala/aqua/semantics/rules/locations/LocationsState.scala b/semantics/src/main/scala/aqua/semantics/rules/locations/LocationsState.scala index 0d8154617..5b5b8503f 100644 --- a/semantics/src/main/scala/aqua/semantics/rules/locations/LocationsState.scala +++ b/semantics/src/main/scala/aqua/semantics/rules/locations/LocationsState.scala @@ -4,26 +4,49 @@ import aqua.parser.lexer.Token import aqua.semantics.rules.types.TypesState import cats.kernel.Monoid +import scala.annotation.{tailrec, unused} +import scribe.Logging case class LocationsState[S[_]]( - tokens: List[(String, ExprInfo[S])] = Nil, - locations: List[(Token[S], Token[S])] = Nil -) { + variables: List[VariableInfo[S]] = Nil +) extends Logging { - lazy val allLocations: List[(Token[S], Token[S])] = locations + def addDefinitions(newDefinitions: List[DefinitionInfo[S]]): LocationsState[S] = { + copy(variables = newDefinitions.map(d => VariableInfo(d)) ++ variables) + } - // TODO: optimize distinction - def addTokens(newTokens: List[(String, ExprInfo[S])]): LocationsState[S] = - copy(tokens = (newTokens ++ tokens).distinct) + def addDefinition(newDef: DefinitionInfo[S]): LocationsState[S] = + copy(variables = VariableInfo(newDef) +: variables) - def addToken(newToken: (String, ExprInfo[S])): LocationsState[S] = - copy(tokens = (newToken +: tokens).distinct) - - def findTokenByName( + private def addOccurrenceToFirst( + vars: List[VariableInfo[S]], + name: String, + token: Token[S] + ): List[VariableInfo[S]] = vars match { + case Nil => + logger.error(s"Unexpected. Cannot add occurrence for $name") + Nil + case head :: tail => + if (head.definition.name == name) + head.copy(occurrences = token +: head.occurrences) :: tail + else + head :: addOccurrenceToFirst(tail, name, token) + } + + def addLocation( name: String, token: Token[S] - ): Option[(Token[S], Token[S])] = - tokens.find(_._1 == name).map(_._2).map(token -> _.token) + ): LocationsState[S] = { + copy(variables = addOccurrenceToFirst(variables, name, token)) + } + + def addLocations( + locations: List[(String, Token[S])] + ): LocationsState[S] = { + locations.foldLeft(this) { case (st, (name, token)) => + st.copy(variables = addOccurrenceToFirst(variables, name, token)) + } + } } object LocationsState { @@ -34,7 +57,7 @@ object LocationsState { override def combine(x: LocationsState[S], y: LocationsState[S]): LocationsState[S] = LocationsState( - tokens = x.tokens ++ y.tokens + variables = x.variables ++ y.variables ) } } diff --git a/semantics/src/main/scala/aqua/semantics/rules/names/NamesInterpreter.scala b/semantics/src/main/scala/aqua/semantics/rules/names/NamesInterpreter.scala index 76ee5a78a..1e1994d92 100644 --- a/semantics/src/main/scala/aqua/semantics/rules/names/NamesInterpreter.scala +++ b/semantics/src/main/scala/aqua/semantics/rules/names/NamesInterpreter.scala @@ -4,7 +4,7 @@ import aqua.errors.Errors.internalError import aqua.parser.lexer.{Name, Token} import aqua.semantics.Levenshtein import aqua.semantics.rules.StackInterpreter -import aqua.semantics.rules.locations.{LocationsAlgebra, ExprInfo} +import aqua.semantics.rules.locations.{LocationsAlgebra, DefinitionInfo} import aqua.semantics.rules.report.ReportAlgebra import aqua.types.{ArrowType, StreamType, Type} import cats.data.{OptionT, State} @@ -116,13 +116,13 @@ class NamesInterpreter[S[_], X](using case None => mapStackHeadM(report.error(name, "Cannot define a variable in the root scope").as(false))( fr => (fr.addName(name, `type`) -> true).pure - ) <* locations.addToken(name.value, ExprInfo(name, `type`)) + ) <* locations.addDefinition(DefinitionInfo(name.value, name, `type`)) } override def derive(name: Name[S], `type`: Type, derivedFrom: Set[String]): State[X, Boolean] = define(name, `type`).flatTap(defined => mapStackHead_(_.derived(name, derivedFrom)).whenA(defined) - ) <* locations.addToken(name.value, ExprInfo(name, `type`)) + ) <* locations.addDefinition(DefinitionInfo(name.value, name, `type`)) override def getDerivedFrom(fromNames: List[Set[String]]): State[X, List[Set[String]]] = mapStackHead(Nil)(frame => @@ -141,7 +141,7 @@ class NamesInterpreter[S[_], X](using constants = st.constants.updated(name.value, `type`) ) ).as(true) - }.flatTap(_ => locations.addToken(name.value, ExprInfo(name, `type`))) + }.flatTap(_ => locations.addDefinition(DefinitionInfo(name.value, name, `type`))) override def defineArrow(name: Name[S], arrowType: ArrowType, isRoot: Boolean): SX[Boolean] = readName(name.value).flatMap { @@ -166,7 +166,7 @@ class NamesInterpreter[S[_], X](using .error(name, "Cannot define a variable in the root scope") .as(false) )(fr => (fr.addArrow(name, arrowType) -> true).pure) - }.flatTap(_ => locations.addToken(name.value, ExprInfo[S](name, arrowType))) + }.flatTap(_ => locations.addDefinition(DefinitionInfo[S](name.value, name, arrowType))) override def streamsDefinedWithinScope(): SX[Map[String, StreamType]] = mapStackHead(Map.empty) { frame => diff --git a/semantics/src/main/scala/aqua/semantics/rules/types/TypesInterpreter.scala b/semantics/src/main/scala/aqua/semantics/rules/types/TypesInterpreter.scala index 787cec488..4f76de4d8 100644 --- a/semantics/src/main/scala/aqua/semantics/rules/types/TypesInterpreter.scala +++ b/semantics/src/main/scala/aqua/semantics/rules/types/TypesInterpreter.scala @@ -3,7 +3,7 @@ package aqua.semantics.rules.types import aqua.parser.lexer.* import aqua.raw.value.* import aqua.semantics.rules.StackInterpreter -import aqua.semantics.rules.locations.{ExprInfo, LocationsAlgebra} +import aqua.semantics.rules.locations.{DefinitionInfo, LocationsAlgebra} import aqua.semantics.rules.report.ReportAlgebra import aqua.semantics.rules.types.TypeResolution.TypeResolutionError import aqua.types.* @@ -141,10 +141,9 @@ class TypesInterpreter[S[_], X](using t: NamedType, fields: Map[String, (Name[S], Type)] ) = - locations.addTokenWithFields( - name.value, - ExprInfo[S](name, t), - fields.view.mapValues(ExprInfo[S].apply).toList + locations.addDefinitionWithFields( + DefinitionInfo[S](name.value, name, t), + fields.map { case (n, (t, ty)) => DefinitionInfo[S](n, t, ty) }.toList ) override def defineStructType( @@ -183,7 +182,7 @@ class TypesInterpreter[S[_], X](using case Some(_) => report.error(name, s"Type `${name.value}` was already defined").as(false) case None => modify(_.defineType(name, target)) - .productL(locations.addToken(name.value, ExprInfo(name.asName, target))) + .productL(locations.addDefinition(DefinitionInfo(name.value, name.asName, target))) .as(true) } From e76adc6607f4329c39d216b97430aa541d8891c0 Mon Sep 17 00:00:00 2001 From: DieMyst Date: Wed, 6 Dec 2023 16:03:58 +0700 Subject: [PATCH 25/30] token location --- .../main/scala/aqua/lsp/ResultHelper.scala | 9 +++++---- .../scala/aqua/lsp/LocationsInterpreter.scala | 10 ++++------ .../src/main/scala/aqua/lsp/LspContext.scala | 12 ++++++------ .../src/test/scala/aqua/lsp/AquaLSPSpec.scala | 7 +++---- .../aqua/semantics/expr/AbilitySem.scala | 2 +- .../aqua/semantics/expr/DataStructSem.scala | 2 +- .../definitions/DefinitionsAlgebra.scala | 2 +- .../definitions/DefinitionsInterpreter.scala | 14 ++------------ .../locations/DummyLocationsInterpreter.scala | 7 +------ .../rules/locations/LocationsAlgebra.scala | 19 ++++++++++++------- .../rules/locations/LocationsState.scala | 5 +---- .../rules/locations/VariableInfo.scala | 11 +++++++++++ 12 files changed, 48 insertions(+), 52 deletions(-) create mode 100644 semantics/src/main/scala/aqua/semantics/rules/locations/VariableInfo.scala diff --git a/language-server/language-server-api/.js/src/main/scala/aqua/lsp/ResultHelper.scala b/language-server/language-server-api/.js/src/main/scala/aqua/lsp/ResultHelper.scala index 50da17ada..17c51b20f 100644 --- a/language-server/language-server-api/.js/src/main/scala/aqua/lsp/ResultHelper.scala +++ b/language-server/language-server-api/.js/src/main/scala/aqua/lsp/ResultHelper.scala @@ -6,10 +6,10 @@ import aqua.compiler.{AquaError, AquaWarning} import aqua.files.FileModuleId import aqua.io.AquaFileError import aqua.lsp.AquaLSP.logger -import aqua.parser.lexer.{LiteralToken, Token} +import aqua.parser.lexer.LiteralToken import aqua.parser.lift.{FileSpan, Span} import aqua.parser.{ArrowReturnError, BlockIndentError, LexerError, ParserError} -import aqua.semantics.rules.locations.DefinitionInfo +import aqua.semantics.rules.locations.{DefinitionInfo, TokenLocation as TokenLoc} import aqua.semantics.{HeaderError, RulesViolated, SemanticWarning, WrongAST} import cats.syntax.show.* @@ -90,9 +90,9 @@ object ResultHelper extends Logging { }.toJSArray private def locationsToJs( - locations: List[(Token[FileSpan.F], Token[FileSpan.F])] + locations: List[TokenLoc[FileSpan.F]] ): js.Array[TokenLink] = - locations.flatMap { case (from, to) => + locations.flatMap { case TokenLoc(from, to) => val fromOp = TokenLocation.fromSpan(from.unit._1) val toOp = TokenLocation.fromSpan(to.unit._1) @@ -117,6 +117,7 @@ object ResultHelper extends Logging { def lspToCompilationResult(lsp: LspContext[FileSpan.F]): CompilationResult = { val errors = lsp.errors.map(CompileError.apply).flatMap(errorToInfo) val warnings = lsp.warnings.map(CompileWarning.apply).flatMap(warningToInfo) + errors match case Nil => logger.debug("No errors on compilation.") diff --git a/language-server/language-server-api/src/main/scala/aqua/lsp/LocationsInterpreter.scala b/language-server/language-server-api/src/main/scala/aqua/lsp/LocationsInterpreter.scala index 5964f671b..084d55c7d 100644 --- a/language-server/language-server-api/src/main/scala/aqua/lsp/LocationsInterpreter.scala +++ b/language-server/language-server-api/src/main/scala/aqua/lsp/LocationsInterpreter.scala @@ -2,7 +2,7 @@ package aqua.lsp import aqua.parser.lexer.Token import aqua.semantics.rules.locations.{DefinitionInfo, LocationsAlgebra, LocationsState} - +import aqua.types.AbilityType import cats.data.State import monocle.Lens import scribe.Logging @@ -17,15 +17,13 @@ class LocationsInterpreter[S[_], X](using st.addDefinition(definition) } - private def combineFieldName(name: String, field: String): String = name + "." + field - override def addDefinitionWithFields( definition: DefinitionInfo[S], fields: List[DefinitionInfo[S]] ): State[X, Unit] = { val allTokens = definition +: fields.map { fieldDef => - fieldDef.copy(name = combineFieldName(definition.name, fieldDef.name)) + fieldDef.copy(name = AbilityType.fullName(definition.name, fieldDef.name)) } modify { st => st.addDefinitions(allTokens) @@ -33,7 +31,7 @@ class LocationsInterpreter[S[_], X](using } def pointFieldLocation(typeName: String, fieldName: String, token: Token[S]): State[X, Unit] = - pointLocation(combineFieldName(typeName, fieldName), token) + pointLocation(AbilityType.fullName(typeName, fieldName), token) def pointTokenWithFieldLocation( typeName: String, @@ -43,7 +41,7 @@ class LocationsInterpreter[S[_], X](using ): State[X, Unit] = { for { _ <- pointLocation(typeName, typeToken) - _ <- pointLocation(combineFieldName(typeName, fieldName), token) + _ <- pointLocation(AbilityType.fullName(typeName, fieldName), token) } yield {} } diff --git a/language-server/language-server-api/src/main/scala/aqua/lsp/LspContext.scala b/language-server/language-server-api/src/main/scala/aqua/lsp/LspContext.scala index 99d71d4b7..4115d1038 100644 --- a/language-server/language-server-api/src/main/scala/aqua/lsp/LspContext.scala +++ b/language-server/language-server-api/src/main/scala/aqua/lsp/LspContext.scala @@ -3,10 +3,9 @@ package aqua.lsp import aqua.parser.lexer.{LiteralToken, NamedTypeToken, Token} import aqua.raw.{RawContext, RawPart} import aqua.semantics.header.Picker -import aqua.semantics.rules.locations.VariableInfo +import aqua.semantics.rules.locations.{TokenLocation, VariableInfo} import aqua.semantics.{SemanticError, SemanticWarning} -import aqua.types.{ArrowType, Type} - +import aqua.types.{AbilityType, ArrowType, Type} import cats.syntax.monoid.* import cats.{Monoid, Semigroup} @@ -21,7 +20,7 @@ case class LspContext[S[_]]( errors: List[SemanticError[S]] = Nil, warnings: List[SemanticWarning[S]] = Nil ) { - lazy val allLocations: List[(Token[S], Token[S])] = variables.flatMap(_.allLocations) + lazy val allLocations: List[TokenLocation[S]] = variables.flatMap(_.allLocations) } object LspContext { @@ -89,11 +88,12 @@ object LspContext { override def declares(ctx: LspContext[S]): Set[String] = ctx.raw.declares override def setAbility(ctx: LspContext[S], name: String, ctxAb: LspContext[S]): LspContext[S] = - val prefix = name + "." ctx.copy( raw = ctx.raw.setAbility(name, ctxAb.raw), variables = ctx.variables ++ ctxAb.variables.map(v => - v.copy(definition = v.definition.copy(name = prefix + v.definition.name)) + v.copy(definition = + v.definition.copy(name = AbilityType.fullName(name, v.definition.name)) + ) ) ) diff --git a/language-server/language-server-api/src/test/scala/aqua/lsp/AquaLSPSpec.scala b/language-server/language-server-api/src/test/scala/aqua/lsp/AquaLSPSpec.scala index 166fa0d82..6ae327311 100644 --- a/language-server/language-server-api/src/test/scala/aqua/lsp/AquaLSPSpec.scala +++ b/language-server/language-server-api/src/test/scala/aqua/lsp/AquaLSPSpec.scala @@ -5,9 +5,8 @@ import aqua.parser.Parser import aqua.parser.lift.Span import aqua.parser.lift.Span.S import aqua.raw.ConstantRaw -import aqua.semantics.rules.locations.VariableInfo +import aqua.semantics.rules.locations.{TokenLocation, VariableInfo} import aqua.types.* - import cats.Id import cats.data.* import cats.instances.string.* @@ -25,7 +24,7 @@ class AquaLSPSpec extends AnyFlatSpec with Matchers with Inside { useStart: Int, useEnd: Int ): Boolean = - c.allLocations.exists { case (useT, defT) => + c.allLocations.exists { case TokenLocation(useT, defT) => val defSpan = defT.unit._1 val useSpan = useT.unit._1 defSpan.startIndex == defStart && defSpan.endIndex == defEnd && useSpan.startIndex == useStart && useSpan.endIndex == useEnd @@ -135,7 +134,7 @@ class AquaLSPSpec extends AnyFlatSpec with Matchers with Inside { ) ) - println(res.allLocations.map { case (l, r) => + println(res.allLocations.map { case TokenLocation(l, r) => val lSpan = l.unit._1 val rSpan = r.unit._1 s"($l($lSpan):$r($rSpan))" diff --git a/semantics/src/main/scala/aqua/semantics/expr/AbilitySem.scala b/semantics/src/main/scala/aqua/semantics/expr/AbilitySem.scala index e0798cfd2..a47f8b1f6 100644 --- a/semantics/src/main/scala/aqua/semantics/expr/AbilitySem.scala +++ b/semantics/src/main/scala/aqua/semantics/expr/AbilitySem.scala @@ -28,7 +28,7 @@ class AbilitySem[S[_]](val expr: AbilityExpr[S]) extends AnyVal { ): Prog[Alg, Raw] = { Prog.after_( for { - defs <- D.purgeDefs(expr.name) + defs <- D.purgeDefs() fields = defs.view.mapValues(d => d.name -> d.`type`).toMap abilityType <- T.defineAbilityType(expr.name, fields) result = abilityType.map(st => TypeRaw(expr.name.value, st)) diff --git a/semantics/src/main/scala/aqua/semantics/expr/DataStructSem.scala b/semantics/src/main/scala/aqua/semantics/expr/DataStructSem.scala index 6d5cb54e2..535631653 100644 --- a/semantics/src/main/scala/aqua/semantics/expr/DataStructSem.scala +++ b/semantics/src/main/scala/aqua/semantics/expr/DataStructSem.scala @@ -22,7 +22,7 @@ class DataStructSem[S[_]](val expr: DataStructExpr[S]) extends AnyVal { ): Prog[Alg, Raw] = Prog.after((_: Raw) => for { - defs <- D.purgeDefs(expr.name) + defs <- D.purgeDefs() fields = defs.view.mapValues(d => d.name -> d.`type`).toMap structType <- T.defineStructType(expr.name, fields) result = structType.map(st => TypeRaw(expr.name.value, st)) diff --git a/semantics/src/main/scala/aqua/semantics/rules/definitions/DefinitionsAlgebra.scala b/semantics/src/main/scala/aqua/semantics/rules/definitions/DefinitionsAlgebra.scala index 62a336e92..c44b99eda 100644 --- a/semantics/src/main/scala/aqua/semantics/rules/definitions/DefinitionsAlgebra.scala +++ b/semantics/src/main/scala/aqua/semantics/rules/definitions/DefinitionsAlgebra.scala @@ -9,7 +9,7 @@ import cats.data.{NonEmptyList, NonEmptyMap} trait DefinitionsAlgebra[S[_], Alg[_]] { def defineDef(name: Name[S], `type`: Type): Alg[Boolean] - def purgeDefs(token: NamedTypeToken[S]): Alg[Map[String, DefinitionsState.Def[S]]] + def purgeDefs(): Alg[Map[String, DefinitionsState.Def[S]]] def defineArrow(arrow: Name[S], `type`: ArrowType): Alg[Boolean] diff --git a/semantics/src/main/scala/aqua/semantics/rules/definitions/DefinitionsInterpreter.scala b/semantics/src/main/scala/aqua/semantics/rules/definitions/DefinitionsInterpreter.scala index 8abf21730..43e75ea02 100644 --- a/semantics/src/main/scala/aqua/semantics/rules/definitions/DefinitionsInterpreter.scala +++ b/semantics/src/main/scala/aqua/semantics/rules/definitions/DefinitionsInterpreter.scala @@ -1,21 +1,13 @@ package aqua.semantics.rules.definitions import aqua.parser.lexer.{Name, NamedTypeToken, Token} -import aqua.semantics.rules.StackInterpreter -import aqua.semantics.rules.abilities.AbilitiesState import aqua.semantics.rules.report.ReportAlgebra -import aqua.semantics.rules.types.TypesState import aqua.types.{ArrowType, Type} -import cats.data.{NonEmptyList, NonEmptyMap, State} -import cats.syntax.applicative.* -import cats.syntax.apply.* -import cats.syntax.flatMap.* +import cats.data.{NonEmptyList, State} import cats.syntax.functor.* import cats.syntax.option.* import monocle.Lens -import monocle.macros.GenLens -import scala.collection.immutable.SortedMap class DefinitionsInterpreter[S[_], X](implicit lens: Lens[X, DefinitionsState[S]], @@ -52,9 +44,7 @@ class DefinitionsInterpreter[S[_], X](implicit override def defineArrow(arrow: Name[S], `type`: ArrowType): SX[Boolean] = define(arrow, `type`, "arrow") - override def purgeDefs( - token: NamedTypeToken[S] - ): SX[Map[String, DefinitionsState.Def[S]]] = + override def purgeDefs(): SX[Map[String, DefinitionsState.Def[S]]] = getState.map(_.definitions).flatMap { defs => for { _ <- modify(_.copy(definitions = Map.empty)) diff --git a/semantics/src/main/scala/aqua/semantics/rules/locations/DummyLocationsInterpreter.scala b/semantics/src/main/scala/aqua/semantics/rules/locations/DummyLocationsInterpreter.scala index fee4c1af1..c49d72ed8 100644 --- a/semantics/src/main/scala/aqua/semantics/rules/locations/DummyLocationsInterpreter.scala +++ b/semantics/src/main/scala/aqua/semantics/rules/locations/DummyLocationsInterpreter.scala @@ -1,12 +1,7 @@ package aqua.semantics.rules.locations import aqua.parser.lexer.Token -import aqua.semantics.rules.StackInterpreter -import aqua.semantics.rules.types.TypesState - -import cats.data.{NonEmptyList, NonEmptyMap, State} -import monocle.Lens -import monocle.macros.GenLens +import cats.data.State class DummyLocationsInterpreter[S[_], X] extends LocationsAlgebra[S, State[X, *]] { diff --git a/semantics/src/main/scala/aqua/semantics/rules/locations/LocationsAlgebra.scala b/semantics/src/main/scala/aqua/semantics/rules/locations/LocationsAlgebra.scala index d52b4c2ee..59b4864dc 100644 --- a/semantics/src/main/scala/aqua/semantics/rules/locations/LocationsAlgebra.scala +++ b/semantics/src/main/scala/aqua/semantics/rules/locations/LocationsAlgebra.scala @@ -1,17 +1,22 @@ package aqua.semantics.rules.locations + import aqua.parser.lexer.Token import aqua.types.Type -case class DefinitionInfo[S[_]](name: String, token: Token[S], `type`: Type) -case class VariableInfo[S[_]](definition: DefinitionInfo[S], occurrences: List[Token[S]] = Nil) { - def allLocations: List[(Token[S], Token[S])] = occurrences.map(_ -> definition.token) -} - trait LocationsAlgebra[S[_], Alg[_]] { def addDefinition(definition: DefinitionInfo[S]): Alg[Unit] - def addDefinitionWithFields(definition: DefinitionInfo[S], fields: List[DefinitionInfo[S]]): Alg[Unit] - def pointTokenWithFieldLocation(typeName: String, typeToken: Token[S], fieldName: String, token: Token[S]): Alg[Unit] + def addDefinitionWithFields( + definition: DefinitionInfo[S], + fields: List[DefinitionInfo[S]] + ): Alg[Unit] + + def pointTokenWithFieldLocation( + typeName: String, + typeToken: Token[S], + fieldName: String, + token: Token[S] + ): Alg[Unit] def pointFieldLocation(typeName: String, fieldName: String, token: Token[S]): Alg[Unit] def pointLocation(name: String, token: Token[S]): Alg[Unit] def pointLocations(locations: List[(String, Token[S])]): Alg[Unit] diff --git a/semantics/src/main/scala/aqua/semantics/rules/locations/LocationsState.scala b/semantics/src/main/scala/aqua/semantics/rules/locations/LocationsState.scala index 5b5b8503f..e11d592e0 100644 --- a/semantics/src/main/scala/aqua/semantics/rules/locations/LocationsState.scala +++ b/semantics/src/main/scala/aqua/semantics/rules/locations/LocationsState.scala @@ -1,10 +1,7 @@ package aqua.semantics.rules.locations import aqua.parser.lexer.Token -import aqua.semantics.rules.types.TypesState - import cats.kernel.Monoid -import scala.annotation.{tailrec, unused} import scribe.Logging case class LocationsState[S[_]]( @@ -26,7 +23,7 @@ case class LocationsState[S[_]]( case Nil => logger.error(s"Unexpected. Cannot add occurrence for $name") Nil - case head :: tail => + case head :: tail => if (head.definition.name == name) head.copy(occurrences = token +: head.occurrences) :: tail else diff --git a/semantics/src/main/scala/aqua/semantics/rules/locations/VariableInfo.scala b/semantics/src/main/scala/aqua/semantics/rules/locations/VariableInfo.scala new file mode 100644 index 000000000..efca87c09 --- /dev/null +++ b/semantics/src/main/scala/aqua/semantics/rules/locations/VariableInfo.scala @@ -0,0 +1,11 @@ +package aqua.semantics.rules.locations + +import aqua.parser.lexer.Token +import aqua.types.Type + +case class DefinitionInfo[S[_]](name: String, token: Token[S], `type`: Type) +case class TokenLocation[S[_]](usage: Token[S], definition: Token[S]) + +case class VariableInfo[S[_]](definition: DefinitionInfo[S], occurrences: List[Token[S]] = Nil) { + def allLocations: List[TokenLocation[S]] = occurrences.map(o => TokenLocation(o, definition.token)) +} From 33725d19eeb38304f752a5b78779bb2e45eb4628 Mon Sep 17 00:00:00 2001 From: DieMyst Date: Wed, 6 Dec 2023 16:07:50 +0700 Subject: [PATCH 26/30] pr fixes --- .../src/test/scala/aqua/lsp/AquaLSPSpec.scala | 4 ++-- .../aqua/semantics/rules/locations/LocationsState.scala | 6 ++---- 2 files changed, 4 insertions(+), 6 deletions(-) diff --git a/language-server/language-server-api/src/test/scala/aqua/lsp/AquaLSPSpec.scala b/language-server/language-server-api/src/test/scala/aqua/lsp/AquaLSPSpec.scala index 6ae327311..b8807a98c 100644 --- a/language-server/language-server-api/src/test/scala/aqua/lsp/AquaLSPSpec.scala +++ b/language-server/language-server-api/src/test/scala/aqua/lsp/AquaLSPSpec.scala @@ -134,11 +134,11 @@ class AquaLSPSpec extends AnyFlatSpec with Matchers with Inside { ) ) - println(res.allLocations.map { case TokenLocation(l, r) => + /*println(res.allLocations.map { case TokenLocation(l, r) => val lSpan = l.unit._1 val rSpan = r.unit._1 s"($l($lSpan):$r($rSpan))" - }) + })*/ // inside `foo_wrapper` func res.checkTokenLoc("z", 120, 121, ScalarType.string) shouldBe true diff --git a/semantics/src/main/scala/aqua/semantics/rules/locations/LocationsState.scala b/semantics/src/main/scala/aqua/semantics/rules/locations/LocationsState.scala index e11d592e0..0f3ff7710 100644 --- a/semantics/src/main/scala/aqua/semantics/rules/locations/LocationsState.scala +++ b/semantics/src/main/scala/aqua/semantics/rules/locations/LocationsState.scala @@ -8,9 +8,8 @@ case class LocationsState[S[_]]( variables: List[VariableInfo[S]] = Nil ) extends Logging { - def addDefinitions(newDefinitions: List[DefinitionInfo[S]]): LocationsState[S] = { + def addDefinitions(newDefinitions: List[DefinitionInfo[S]]): LocationsState[S] = copy(variables = newDefinitions.map(d => VariableInfo(d)) ++ variables) - } def addDefinition(newDef: DefinitionInfo[S]): LocationsState[S] = copy(variables = VariableInfo(newDef) +: variables) @@ -39,11 +38,10 @@ case class LocationsState[S[_]]( def addLocations( locations: List[(String, Token[S])] - ): LocationsState[S] = { + ): LocationsState[S] = locations.foldLeft(this) { case (st, (name, token)) => st.copy(variables = addOccurrenceToFirst(variables, name, token)) } - } } object LocationsState { From c541a7e9f4be6ad89adbeff88d280874aed48730 Mon Sep 17 00:00:00 2001 From: DieMyst Date: Thu, 7 Dec 2023 17:20:35 +0700 Subject: [PATCH 27/30] pr fixes, rewrite tests --- .../main/scala/aqua/run/FuncCompiler.scala | 22 +- .../main/scala/aqua/lsp/ResultHelper.scala | 2 +- .../src/main/scala/aqua/lsp/LspContext.scala | 7 + .../src/test/scala/aqua/lsp/AquaLSPSpec.scala | 263 ++++++++++++------ .../scala/aqua/tree/TreeNodeCompanion.scala | 8 +- parser/src/main/scala/aqua/parser/Ast.scala | 2 +- .../scala/aqua/parser/lexer/PropertyOp.scala | 2 +- .../scala/aqua/parser/lift/FileSpan.scala | 9 + .../expr/func/DeclareStreamSem.scala | 5 - .../aqua/semantics/rules/ValuesAlgebra.scala | 2 - .../abilities/AbilitiesInterpreter.scala | 8 +- .../rules/locations/LocationsState.scala | 19 +- .../main/scala/aqua/helpers/syntax/list.scala | 19 ++ .../aqua/{ => helpers}/syntax/optiont.scala | 0 .../scala/aqua/{ => helpers}/tree/Tree.scala | 10 +- 15 files changed, 233 insertions(+), 145 deletions(-) create mode 100644 utils/helpers/src/main/scala/aqua/helpers/syntax/list.scala rename utils/helpers/src/main/scala/aqua/{ => helpers}/syntax/optiont.scala (100%) rename utils/helpers/src/main/scala/aqua/{ => helpers}/tree/Tree.scala (79%) diff --git a/aqua-run/src/main/scala/aqua/run/FuncCompiler.scala b/aqua-run/src/main/scala/aqua/run/FuncCompiler.scala index 134e6e46d..167175499 100644 --- a/aqua-run/src/main/scala/aqua/run/FuncCompiler.scala +++ b/aqua-run/src/main/scala/aqua/run/FuncCompiler.scala @@ -1,34 +1,24 @@ package aqua.run -import aqua.Rendering.given -import aqua.compiler.{AquaCompiler, AquaCompilerConf, CompileResult, CompilerAPI} +import aqua.compiler.{AquaCompilerConf, CompileResult, CompilerAPI} import aqua.files.{AquaFileSources, FileModuleId} -import aqua.{AquaIO, SpanParser} -import aqua.io.{AquaFileError, AquaPath, PackagePath, Prelude} +import aqua.io.{AquaFileError, AquaPath, PackagePath} import aqua.model.transform.TransformConfig import aqua.model.{AquaContext, FuncArrow} import aqua.parser.lift.FileSpan -import aqua.run.CliFunc +import aqua.{AquaIO, SpanParser} -import cats.data.Validated.{invalidNec, validNec} -import cats.data.{Chain, NonEmptyList, Validated, ValidatedNec} -import cats.effect.IO +import cats.data.{Chain, ValidatedNec} import cats.effect.kernel.{Async, Clock} import cats.syntax.applicative.* +import cats.syntax.apply.* import cats.syntax.flatMap.* import cats.syntax.functor.* -import cats.syntax.monad.* -import cats.syntax.show.* -import cats.syntax.traverse.* import cats.syntax.option.* -import cats.syntax.either.* -import cats.syntax.validated.* -import cats.syntax.apply.* +import cats.syntax.traverse.* import fs2.io.file.{Files, Path} import scribe.Logging -import scala.concurrent.duration.Duration - class FuncCompiler[F[_]: Files: AquaIO: Async]( input: Option[AquaPath], imports: List[Path], diff --git a/language-server/language-server-api/.js/src/main/scala/aqua/lsp/ResultHelper.scala b/language-server/language-server-api/.js/src/main/scala/aqua/lsp/ResultHelper.scala index 17c51b20f..3d59b3d2e 100644 --- a/language-server/language-server-api/.js/src/main/scala/aqua/lsp/ResultHelper.scala +++ b/language-server/language-server-api/.js/src/main/scala/aqua/lsp/ResultHelper.scala @@ -129,7 +129,7 @@ object ResultHelper extends Logging { warnings.toJSArray, locationsToJs(lsp.variables.flatMap(v => v.allLocations)), importsToTokenImport(lsp.importTokens), - tokensToJs(lsp.variables.map(_.definition)) + tokensToJs(lsp.allVariablesMerged.map(_.definition)) ) } } diff --git a/language-server/language-server-api/src/main/scala/aqua/lsp/LspContext.scala b/language-server/language-server-api/src/main/scala/aqua/lsp/LspContext.scala index 4115d1038..7df8dca4a 100644 --- a/language-server/language-server-api/src/main/scala/aqua/lsp/LspContext.scala +++ b/language-server/language-server-api/src/main/scala/aqua/lsp/LspContext.scala @@ -21,6 +21,13 @@ case class LspContext[S[_]]( warnings: List[SemanticWarning[S]] = Nil ) { lazy val allLocations: List[TokenLocation[S]] = variables.flatMap(_.allLocations) + // there can be duplicates of variables with the same token + lazy val allVariablesMerged: List[VariableInfo[S]] = variables.foldLeft(Map.empty[Token[S], VariableInfo[S]]) { + case (acc, vi) => + acc.updated(vi.definition.token, acc.get(vi.definition.token).map { v => + v.copy(occurrences = v.occurrences ++ vi.occurrences) + }.getOrElse(vi)) + }.toList.map(_._2) } object LspContext { diff --git a/language-server/language-server-api/src/test/scala/aqua/lsp/AquaLSPSpec.scala b/language-server/language-server-api/src/test/scala/aqua/lsp/AquaLSPSpec.scala index b8807a98c..a529ca709 100644 --- a/language-server/language-server-api/src/test/scala/aqua/lsp/AquaLSPSpec.scala +++ b/language-server/language-server-api/src/test/scala/aqua/lsp/AquaLSPSpec.scala @@ -5,8 +5,9 @@ import aqua.parser.Parser import aqua.parser.lift.Span import aqua.parser.lift.Span.S import aqua.raw.ConstantRaw -import aqua.semantics.rules.locations.{TokenLocation, VariableInfo} +import aqua.semantics.rules.locations.{DefinitionInfo, TokenLocation, VariableInfo} import aqua.types.* + import cats.Id import cats.data.* import cats.instances.string.* @@ -16,38 +17,73 @@ import org.scalatest.matchers.should.Matchers class AquaLSPSpec extends AnyFlatSpec with Matchers with Inside { + private def getByPosition(code: String, str: String, position: Int): Option[(Int, Int)] = { + str.r.findAllMatchIn(code).toList.lift(position).map(r => (r.start, r.end)) + } + extension (c: LspContext[Span.S]) { def checkLocations( - defStart: Int, - defEnd: Int, - useStart: Int, - useEnd: Int - ): Boolean = - c.allLocations.exists { case TokenLocation(useT, defT) => - val defSpan = defT.unit._1 - val useSpan = useT.unit._1 - defSpan.startIndex == defStart && defSpan.endIndex == defEnd && useSpan.startIndex == useStart && useSpan.endIndex == useEnd + name: String, + defPosition: Int, + usePosition: Int, + defCode: String, + useCode: Option[String] = None, + fieldName: Option[String] = None + ): Boolean = { + (for { + defPos <- getByPosition(defCode, name, defPosition) + usePos <- getByPosition(useCode.getOrElse(defCode), fieldName.getOrElse(name), usePosition) + } yield { + val (defStart, defEnd) = defPos + val (useStart, useEnd) = usePos + c.allLocations.exists { case TokenLocation(useT, defT) => + val defSpan = defT.unit._1 + val useSpan = useT.unit._1 + defSpan.startIndex == defStart && defSpan.endIndex == defEnd && useSpan.startIndex == useStart && useSpan.endIndex == useEnd + } + }).getOrElse(false) + } + + def locationsToString(): List[String] = + c.allLocations.map { case TokenLocation(l, r) => + val lSpan = l.unit._1 + val rSpan = r.unit._1 + s"($l($lSpan):$r($rSpan))" } def checkTokenLoc( + code: String, checkName: String, - start: Int, - end: Int, - `type`: Type + position: Int, + `type`: Type, + // if name is combined + fullName: Option[String] = None, + printFiltered: Boolean = false ): Boolean = { - val res = c.variables.exists { case VariableInfo(definition, _) => - val span = definition.token.unit._1 - definition.name == checkName && span.startIndex == start && span.endIndex == end && definition.`type` == `type` - } - /*println(tokens.filter(v => v._1 == checkName && v._2.`type` == `type`).map { - case (name, expr) => - val span = expr.token.unit._1 - println(s"$name(${span.startIndex}:${span.endIndex}) ${expr.`type`}") - })*/ + getByPosition(code, checkName, position).exists { case (start, end) => + val res = c.allVariablesMerged.exists { case VariableInfo(definition, _) => + val span = definition.token.unit._1 + definition.name == fullName.getOrElse( + checkName + ) && span.startIndex == start && span.endIndex == end && definition.`type` == `type` + } + + if (printFiltered) + println( + c.allVariablesMerged + .map(_.definition) + .filter(v => v.name == fullName.getOrElse(checkName) && v.`type` == `type`) + .map { case DefinitionInfo(name, token, t) => + val span = token.unit._1 + s"$name(${span.startIndex}:${span.endIndex}) $t" + } + ) + + res + } - res } } @@ -79,49 +115,75 @@ class AquaLSPSpec extends AnyFlatSpec with Matchers with Inside { id => txt => Parser.parse(Parser.parserSchema)(txt), AquaCompilerConf(ConstantRaw.defaultConstants(None)) ) + .leftMap { errors => + println(errors) + errors + } } it should "return right tokens" in { + val main = + """module Import + |import foo, strFunc, num from "export2.aqua" + | + |import "../gen/OneMore.aqua" + | + |func foo_wrapper() -> string: + | fooResult <- foo() + | if 1 == 1: + | someVar = "aaa" + | strFunc(someVar) + | else: + | someVar = 123 + | num(someVar) + | OneMore fooResult + | OneMore.more_call() + | + |ability Ab: + | someField: u32 + | + |data Str: + | someField: string + | + |func useAbAndStruct{Ab}(): + | s = Str(someField = "asd") + | strFunc(s.someField) + | num(Ab.someField) + | + |""".stripMargin val src = Map( - "index.aqua" -> - """module Import - |import foo, str, num from "export2.aqua" - | - |import "../gen/OneMore.aqua" - | - |func foo_wrapper() -> string: - | z <- foo() - | if 1 == 1: - | a = "aaa" - | str(a) - | else: - | a = 123 - | num(a) - | OneMore z - | OneMore.more_call() - |""".stripMargin + "index.aqua" -> main ) + val firstImport = + """module Export declares strFunc, num, foo + | + |func absb() -> string: + | <- "ff" + | + |func strFunc(someVar: string) -> string: + | <- someVar + | + |func num(someVar: u32) -> u32: + | <- someVar + | + |func foo() -> string: + | <- "I am MyFooBar foo" + | + |""".stripMargin + + val secondImport = + """ + |service OneMore: + | more_call() + | consume(s: string) + |""".stripMargin + val imports = Map( "export2.aqua" -> - """module Export declares str, num, foo - | - |func str(a: string) -> string: - | <- a - | - |func num(a: u32) -> u32: - | <- a - | - |func foo() -> string: - | <- "I am MyFooBar foo" - | - |""".stripMargin, + firstImport, "../gen/OneMore.aqua" -> - """ - |service OneMore: - | more_call() - | consume(s: string) - |""".stripMargin + secondImport ) val res = compile(src, imports).toOption.get.values.head @@ -134,66 +196,85 @@ class AquaLSPSpec extends AnyFlatSpec with Matchers with Inside { ) ) - /*println(res.allLocations.map { case TokenLocation(l, r) => - val lSpan = l.unit._1 - val rSpan = r.unit._1 - s"($l($lSpan):$r($rSpan))" - })*/ - // inside `foo_wrapper` func - res.checkTokenLoc("z", 120, 121, ScalarType.string) shouldBe true - res.checkLocations(120, 121, 224, 225) shouldBe true + res.checkTokenLoc(main, "fooResult", 0, ScalarType.string) shouldBe true + res.checkLocations("fooResult", 0, 1, main) shouldBe true - res.checkTokenLoc("a", 152, 153, LiteralType.string) shouldBe true - res.checkLocations(152, 153, 172, 173) shouldBe true - res.checkTokenLoc("a", 191, 192, LiteralType.unsigned) shouldBe true - res.checkLocations(191, 192, 209, 210) shouldBe true + res.checkTokenLoc(main, "someVar", 0, LiteralType.string, None, true) shouldBe true + res.checkLocations("someVar", 0, 1, main) shouldBe true + res.checkTokenLoc(main, "someVar", 2, LiteralType.unsigned) shouldBe true + res.checkLocations("someVar", 2, 3, main) shouldBe true // num usage - res.checkLocations(84, 87, 205, 208) shouldBe true - // str usage - res.checkLocations(43, 46, 168, 171) shouldBe true + res.checkLocations("num", 1, 1, firstImport, Some(main)) shouldBe true + // strFunc usage + res.checkLocations("strFunc", 1, 1, firstImport, Some(main)) shouldBe true + res.checkLocations("strFunc", 1, 2, firstImport, Some(main)) shouldBe true + + // Str.field + res.checkTokenLoc(main, "someField", 1, ScalarType.string, Some("Str.someField")) shouldBe true + res.checkLocations("someField", 1, 3, main, None) shouldBe true + + // Ab.field + res.checkTokenLoc( + main, + "someField", + 0, + ScalarType.u32, + Some("Ab.someField"), + true + ) shouldBe true // this is tokens from imports, if we will use `FileSpan.F` file names will be different // OneMore service - res.checkTokenLoc("OneMore", 9, 16, serviceType) shouldBe true - res.checkTokenLoc("OneMore.more_call", 20, 29, ArrowType(NilType, NilType)) shouldBe true + res.checkTokenLoc(secondImport, "OneMore", 0, serviceType) shouldBe true res.checkTokenLoc( - "OneMore.consume", - 34, - 41, - ArrowType(ProductType.labelled(("s", ScalarType.string) :: Nil), NilType) + secondImport, + "more_call", + 0, + ArrowType(NilType, NilType), + Some("OneMore.more_call"), + true + ) shouldBe true + res.checkTokenLoc( + secondImport, + "consume", + 0, + ArrowType(ProductType.labelled(("s", ScalarType.string) :: Nil), NilType), + Some("OneMore.consume") ) shouldBe true - // str function and argument + // strFunc function and argument res.checkTokenLoc( - "str", - 43, - 46, + firstImport, + "strFunc", + 1, ArrowType( - ProductType.labelled(("a", ScalarType.string) :: Nil), + ProductType.labelled(("someVar", ScalarType.string) :: Nil), ProductType(ScalarType.string :: Nil) - ) + ), + None, + true ) shouldBe true - res.checkTokenLoc("a", 47, 48, ScalarType.string) shouldBe true + res.checkTokenLoc(firstImport, "someVar", 0, ScalarType.string) shouldBe true // num function and argument res.checkTokenLoc( + firstImport, "num", - 84, - 87, + 1, ArrowType( - ProductType.labelled(("a", ScalarType.u32) :: Nil), + ProductType.labelled(("someVar", ScalarType.u32) :: Nil), ProductType(ScalarType.u32 :: Nil) ) ) shouldBe true - res.checkTokenLoc("a", 88, 89, ScalarType.u32) shouldBe true + res.checkTokenLoc(firstImport, "someVar", 2, ScalarType.u32, None, true) shouldBe true // foo function res.checkTokenLoc( + firstImport, "foo", - 119, - 122, + 1, ArrowType(NilType, ProductType(ScalarType.string :: Nil)) ) shouldBe true } diff --git a/model/tree/src/main/scala/aqua/tree/TreeNodeCompanion.scala b/model/tree/src/main/scala/aqua/tree/TreeNodeCompanion.scala index 241c926ae..7bc42214c 100644 --- a/model/tree/src/main/scala/aqua/tree/TreeNodeCompanion.scala +++ b/model/tree/src/main/scala/aqua/tree/TreeNodeCompanion.scala @@ -1,16 +1,14 @@ package aqua.tree +import aqua.helpers.tree.Tree + import cats.Show import cats.data.Chain import cats.free.Cofree - -import cats.syntax.show.* import cats.syntax.apply.* - +import cats.syntax.show.* import scala.annotation.tailrec -import aqua.helpers.Tree - trait TreeNodeCompanion[T <: TreeNode[T]] { given showTreeLabel: Show[T] diff --git a/parser/src/main/scala/aqua/parser/Ast.scala b/parser/src/main/scala/aqua/parser/Ast.scala index e392c793b..37deead00 100644 --- a/parser/src/main/scala/aqua/parser/Ast.scala +++ b/parser/src/main/scala/aqua/parser/Ast.scala @@ -1,10 +1,10 @@ package aqua.parser +import aqua.helpers.tree.Tree import aqua.parser.expr.* import aqua.parser.head.{HeadExpr, HeaderExpr} import aqua.parser.lift.{LiftParser, Span} import aqua.parser.lift.LiftParser.* -import aqua.helpers.Tree import cats.data.{Chain, Validated, ValidatedNec} import cats.syntax.flatMap.* diff --git a/parser/src/main/scala/aqua/parser/lexer/PropertyOp.scala b/parser/src/main/scala/aqua/parser/lexer/PropertyOp.scala index 1e63f2668..162b61586 100644 --- a/parser/src/main/scala/aqua/parser/lexer/PropertyOp.scala +++ b/parser/src/main/scala/aqua/parser/lexer/PropertyOp.scala @@ -61,7 +61,7 @@ case class IntoCopy[F[_]: Comonad]( object PropertyOp { private val parseField: P[PropertyOp[Span.S]] = - (`.` *> anyName).lift.map(IntoField(_)) + `.` *> anyName.lift.map(IntoField(_)) val parseArrow: P[PropertyOp[Span.S]] = (`.` *> CallArrowToken.callBraces).map { case CallBraces(name, abilities, args) => diff --git a/parser/src/main/scala/aqua/parser/lift/FileSpan.scala b/parser/src/main/scala/aqua/parser/lift/FileSpan.scala index e1a2b0c83..abd103244 100644 --- a/parser/src/main/scala/aqua/parser/lift/FileSpan.scala +++ b/parser/src/main/scala/aqua/parser/lift/FileSpan.scala @@ -17,6 +17,15 @@ case class FileSpan(name: String, locationMap: Eval[LocationMap], span: Span) { */ def focus(ctx: Int): Option[FileSpan.Focus] = span.focus(locationMap.value, ctx).map(FileSpan.Focus(name, locationMap, ctx, _)) + + override def hashCode(): Int = (name, span).hashCode() + + override def equals(obj: Any): Boolean = { + obj match { + case FileSpan(n, _, s) => n == name && s == span + case _ => false + } + } } object FileSpan { diff --git a/semantics/src/main/scala/aqua/semantics/expr/func/DeclareStreamSem.scala b/semantics/src/main/scala/aqua/semantics/expr/func/DeclareStreamSem.scala index f14ae67dc..d8e18ed4c 100644 --- a/semantics/src/main/scala/aqua/semantics/expr/func/DeclareStreamSem.scala +++ b/semantics/src/main/scala/aqua/semantics/expr/func/DeclareStreamSem.scala @@ -8,14 +8,9 @@ import aqua.raw.value.VarRaw import aqua.semantics.Prog import aqua.semantics.rules.names.NamesAlgebra import aqua.semantics.rules.types.TypesAlgebra -import aqua.types.* import cats.Monad -import cats.data.Chain import cats.data.OptionT -import cats.syntax.applicative.* -import cats.syntax.flatMap.* -import cats.syntax.functor.* class DeclareStreamSem[S[_]](val expr: DeclareStreamExpr[S]) { diff --git a/semantics/src/main/scala/aqua/semantics/rules/ValuesAlgebra.scala b/semantics/src/main/scala/aqua/semantics/rules/ValuesAlgebra.scala index 9ec2b3aad..4ddded3d7 100644 --- a/semantics/src/main/scala/aqua/semantics/rules/ValuesAlgebra.scala +++ b/semantics/src/main/scala/aqua/semantics/rules/ValuesAlgebra.scala @@ -1,9 +1,7 @@ package aqua.semantics.rules -import aqua.errors.Errors.internalError import aqua.helpers.syntax.optiont.* import aqua.parser.lexer.* -import aqua.parser.lexer.InfixToken.value import aqua.parser.lexer.InfixToken.{BoolOp, CmpOp, EqOp, MathOp, Op as InfOp} import aqua.parser.lexer.PrefixToken.Op as PrefOp import aqua.raw.value.* diff --git a/semantics/src/main/scala/aqua/semantics/rules/abilities/AbilitiesInterpreter.scala b/semantics/src/main/scala/aqua/semantics/rules/abilities/AbilitiesInterpreter.scala index a73adc335..0f3ee8fa4 100644 --- a/semantics/src/main/scala/aqua/semantics/rules/abilities/AbilitiesInterpreter.scala +++ b/semantics/src/main/scala/aqua/semantics/rules/abilities/AbilitiesInterpreter.scala @@ -1,22 +1,20 @@ package aqua.semantics.rules.abilities -import aqua.parser.lexer.{Name, NamedTypeToken, Token, ValueToken} +import aqua.parser.lexer.{Name, NamedTypeToken, Token} +import aqua.raw.RawContext import aqua.raw.value.ValueRaw -import aqua.raw.{RawContext, ServiceRaw} import aqua.semantics.Levenshtein import aqua.semantics.rules.locations.LocationsAlgebra import aqua.semantics.rules.mangler.ManglerAlgebra import aqua.semantics.rules.report.ReportAlgebra import aqua.semantics.rules.{StackInterpreter, abilities} -import aqua.types.{ArrowType, ServiceType} +import aqua.types.ArrowType import cats.data.{NonEmptyMap, State} import cats.syntax.applicative.* import cats.syntax.apply.* -import cats.syntax.foldable.* import cats.syntax.functor.* import cats.syntax.option.* -import cats.syntax.traverse.* import monocle.Lens import monocle.macros.GenLens diff --git a/semantics/src/main/scala/aqua/semantics/rules/locations/LocationsState.scala b/semantics/src/main/scala/aqua/semantics/rules/locations/LocationsState.scala index 0f3ff7710..dd11b98ed 100644 --- a/semantics/src/main/scala/aqua/semantics/rules/locations/LocationsState.scala +++ b/semantics/src/main/scala/aqua/semantics/rules/locations/LocationsState.scala @@ -1,6 +1,8 @@ package aqua.semantics.rules.locations +import aqua.helpers.syntax.list.* import aqua.parser.lexer.Token + import cats.kernel.Monoid import scribe.Logging @@ -18,29 +20,24 @@ case class LocationsState[S[_]]( vars: List[VariableInfo[S]], name: String, token: Token[S] - ): List[VariableInfo[S]] = vars match { - case Nil => + ): List[VariableInfo[S]] = { + if (!vars.exists(_.definition.name == name)) logger.error(s"Unexpected. Cannot add occurrence for $name") - Nil - case head :: tail => - if (head.definition.name == name) - head.copy(occurrences = token +: head.occurrences) :: tail - else - head :: addOccurrenceToFirst(tail, name, token) + + vars.updateFirst(_.definition.name == name, v => v.copy(occurrences = token +: v.occurrences)) } def addLocation( name: String, token: Token[S] - ): LocationsState[S] = { + ): LocationsState[S] = copy(variables = addOccurrenceToFirst(variables, name, token)) - } def addLocations( locations: List[(String, Token[S])] ): LocationsState[S] = locations.foldLeft(this) { case (st, (name, token)) => - st.copy(variables = addOccurrenceToFirst(variables, name, token)) + st.addLocation(name, token) } } diff --git a/utils/helpers/src/main/scala/aqua/helpers/syntax/list.scala b/utils/helpers/src/main/scala/aqua/helpers/syntax/list.scala new file mode 100644 index 000000000..b5dce1072 --- /dev/null +++ b/utils/helpers/src/main/scala/aqua/helpers/syntax/list.scala @@ -0,0 +1,19 @@ +package aqua.helpers.syntax + +import scala.annotation.tailrec + +object list { + extension[A] (l: List[A]) { + def updateFirst[B >: A](p: A => Boolean, f: A => B): List[B] = { + @tailrec + def update(left: List[B], right: List[A]): List[B] = + right match { + case a :: tail if p(a) => left.reverse ::: f(a) :: tail + case a :: tail => update(a :: left, tail) + case Nil => left.reverse + } + + update(Nil, l) + } + } +} diff --git a/utils/helpers/src/main/scala/aqua/syntax/optiont.scala b/utils/helpers/src/main/scala/aqua/helpers/syntax/optiont.scala similarity index 100% rename from utils/helpers/src/main/scala/aqua/syntax/optiont.scala rename to utils/helpers/src/main/scala/aqua/helpers/syntax/optiont.scala diff --git a/utils/helpers/src/main/scala/aqua/tree/Tree.scala b/utils/helpers/src/main/scala/aqua/helpers/tree/Tree.scala similarity index 79% rename from utils/helpers/src/main/scala/aqua/tree/Tree.scala rename to utils/helpers/src/main/scala/aqua/helpers/tree/Tree.scala index b958e14b4..3ed2c68c6 100644 --- a/utils/helpers/src/main/scala/aqua/tree/Tree.scala +++ b/utils/helpers/src/main/scala/aqua/helpers/tree/Tree.scala @@ -1,13 +1,9 @@ -package aqua.helpers +package aqua.helpers.tree -import cats.data.Chain import cats.free.Cofree -import cats.Traverse -import cats.Show -import cats.Eval -import cats.syntax.show.* -import cats.syntax.traverse.* import cats.syntax.foldable.* +import cats.syntax.show.* +import cats.{Eval, Show, Traverse} object Tree { From 00d4645cc6e269d4ede1ed99a5697786306967fd Mon Sep 17 00:00:00 2001 From: DieMyst Date: Thu, 7 Dec 2023 18:02:14 +0700 Subject: [PATCH 28/30] pr fixes, rewrite teststry to fix test compilation --- integration-tests/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/integration-tests/package.json b/integration-tests/package.json index cbb6536c5..a02ae6d37 100644 --- a/integration-tests/package.json +++ b/integration-tests/package.json @@ -22,7 +22,7 @@ "pubsub": "node -r ts-node/register src/pubsub.ts", "exec": "npm run compile-aqua && npm run prettify-compiled && node -r ts-node/register src/index.ts", "run": "node -r ts-node/register src/index.ts", - "compile-aqua": "ts-node ./src/compile.ts", + "compile-aqua": "node --loader ts-node/esm ./src/compile.ts", "compile-aqua:air": "aqua -i ./aqua/ -o ./compiled-air -a", "prettify-compiled": "prettier --write src/compiled", "prettify": "prettier --write src", From 718cfe88b4770ba3dd5dadf3016369cfcf4f54f1 Mon Sep 17 00:00:00 2001 From: DieMyst Date: Thu, 7 Dec 2023 18:44:27 +0700 Subject: [PATCH 29/30] fix doubled definitions --- aqua-src/antithesis.aqua | 10 ++-------- .../semantics/rules/names/NamesInterpreter.scala | 14 +++++++------- 2 files changed, 9 insertions(+), 15 deletions(-) diff --git a/aqua-src/antithesis.aqua b/aqua-src/antithesis.aqua index 66da671de..04fd94067 100644 --- a/aqua-src/antithesis.aqua +++ b/aqua-src/antithesis.aqua @@ -1,9 +1,3 @@ -func arr(strs: []string) -> []string: +func arr() -> string: n = "str" - arr = [n] - <- arr - -func ppp() -> []u32: - n = 123 - arr = [123] - <- arr \ No newline at end of file + <- n \ No newline at end of file diff --git a/semantics/src/main/scala/aqua/semantics/rules/names/NamesInterpreter.scala b/semantics/src/main/scala/aqua/semantics/rules/names/NamesInterpreter.scala index 1e1994d92..d47cd85e4 100644 --- a/semantics/src/main/scala/aqua/semantics/rules/names/NamesInterpreter.scala +++ b/semantics/src/main/scala/aqua/semantics/rules/names/NamesInterpreter.scala @@ -4,14 +4,12 @@ import aqua.errors.Errors.internalError import aqua.parser.lexer.{Name, Token} import aqua.semantics.Levenshtein import aqua.semantics.rules.StackInterpreter -import aqua.semantics.rules.locations.{LocationsAlgebra, DefinitionInfo} +import aqua.semantics.rules.locations.{DefinitionInfo, LocationsAlgebra} import aqua.semantics.rules.report.ReportAlgebra import aqua.types.{ArrowType, StreamType, Type} + import cats.data.{OptionT, State} import cats.syntax.all.* -import cats.syntax.applicative.* -import cats.syntax.flatMap.* -import cats.syntax.functor.* import monocle.Lens import monocle.macros.GenLens @@ -122,7 +120,7 @@ class NamesInterpreter[S[_], X](using override def derive(name: Name[S], `type`: Type, derivedFrom: Set[String]): State[X, Boolean] = define(name, `type`).flatTap(defined => mapStackHead_(_.derived(name, derivedFrom)).whenA(defined) - ) <* locations.addDefinition(DefinitionInfo(name.value, name, `type`)) + ) override def getDerivedFrom(fromNames: List[Set[String]]): State[X, List[Set[String]]] = mapStackHead(Nil)(frame => @@ -165,8 +163,10 @@ class NamesInterpreter[S[_], X](using report .error(name, "Cannot define a variable in the root scope") .as(false) - )(fr => (fr.addArrow(name, arrowType) -> true).pure) - }.flatTap(_ => locations.addDefinition(DefinitionInfo[S](name.value, name, arrowType))) + )(fr => (fr.addArrow(name, arrowType) -> true).pure).flatTap(_ => + locations.addDefinition(DefinitionInfo[S](name.value, name, arrowType)) + ) + } override def streamsDefinedWithinScope(): SX[Map[String, StreamType]] = mapStackHead(Map.empty) { frame => From e54507745c54b2c00f35da516da1664e88c33a01 Mon Sep 17 00:00:00 2001 From: DieMyst Date: Thu, 7 Dec 2023 18:52:08 +0700 Subject: [PATCH 30/30] don't delete possible duplications --- .../.js/src/main/scala/aqua/lsp/ResultHelper.scala | 2 +- .../src/main/scala/aqua/lsp/LspContext.scala | 7 ------- .../src/test/scala/aqua/lsp/AquaLSPSpec.scala | 4 ++-- 3 files changed, 3 insertions(+), 10 deletions(-) diff --git a/language-server/language-server-api/.js/src/main/scala/aqua/lsp/ResultHelper.scala b/language-server/language-server-api/.js/src/main/scala/aqua/lsp/ResultHelper.scala index 3d59b3d2e..17c51b20f 100644 --- a/language-server/language-server-api/.js/src/main/scala/aqua/lsp/ResultHelper.scala +++ b/language-server/language-server-api/.js/src/main/scala/aqua/lsp/ResultHelper.scala @@ -129,7 +129,7 @@ object ResultHelper extends Logging { warnings.toJSArray, locationsToJs(lsp.variables.flatMap(v => v.allLocations)), importsToTokenImport(lsp.importTokens), - tokensToJs(lsp.allVariablesMerged.map(_.definition)) + tokensToJs(lsp.variables.map(_.definition)) ) } } diff --git a/language-server/language-server-api/src/main/scala/aqua/lsp/LspContext.scala b/language-server/language-server-api/src/main/scala/aqua/lsp/LspContext.scala index 7df8dca4a..4115d1038 100644 --- a/language-server/language-server-api/src/main/scala/aqua/lsp/LspContext.scala +++ b/language-server/language-server-api/src/main/scala/aqua/lsp/LspContext.scala @@ -21,13 +21,6 @@ case class LspContext[S[_]]( warnings: List[SemanticWarning[S]] = Nil ) { lazy val allLocations: List[TokenLocation[S]] = variables.flatMap(_.allLocations) - // there can be duplicates of variables with the same token - lazy val allVariablesMerged: List[VariableInfo[S]] = variables.foldLeft(Map.empty[Token[S], VariableInfo[S]]) { - case (acc, vi) => - acc.updated(vi.definition.token, acc.get(vi.definition.token).map { v => - v.copy(occurrences = v.occurrences ++ vi.occurrences) - }.getOrElse(vi)) - }.toList.map(_._2) } object LspContext { diff --git a/language-server/language-server-api/src/test/scala/aqua/lsp/AquaLSPSpec.scala b/language-server/language-server-api/src/test/scala/aqua/lsp/AquaLSPSpec.scala index a529ca709..cbb0d0757 100644 --- a/language-server/language-server-api/src/test/scala/aqua/lsp/AquaLSPSpec.scala +++ b/language-server/language-server-api/src/test/scala/aqua/lsp/AquaLSPSpec.scala @@ -63,7 +63,7 @@ class AquaLSPSpec extends AnyFlatSpec with Matchers with Inside { ): Boolean = { getByPosition(code, checkName, position).exists { case (start, end) => - val res = c.allVariablesMerged.exists { case VariableInfo(definition, _) => + val res = c.variables.exists { case VariableInfo(definition, _) => val span = definition.token.unit._1 definition.name == fullName.getOrElse( checkName @@ -72,7 +72,7 @@ class AquaLSPSpec extends AnyFlatSpec with Matchers with Inside { if (printFiltered) println( - c.allVariablesMerged + c.variables .map(_.definition) .filter(v => v.name == fullName.getOrElse(checkName) && v.`type` == `type`) .map { case DefinitionInfo(name, token, t) =>