Skip to content

Commit

Permalink
LNG-96 object creation (#592)
Browse files Browse the repository at this point in the history
  • Loading branch information
DieMyst committed Nov 30, 2022
1 parent a51c421 commit 63433f2
Show file tree
Hide file tree
Showing 23 changed files with 402 additions and 116 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/e2e.yml
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ jobs:

aqua-playground:
needs: aqua
uses: fluencelabs/aqua-playground/.github/workflows/tests.yml@update-e2e
uses: fluencelabs/aqua-playground/.github/workflows/tests.yml@master
with:
aqua-version: "${{ needs.aqua.outputs.aqua-version }}"

Expand Down
2 changes: 1 addition & 1 deletion build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ val scribeV = "3.7.1"
name := "aqua-hll"

val commons = Seq(
baseAquaVersion := "0.7.7",
baseAquaVersion := "0.8.0",
version := baseAquaVersion.value + "-" + sys.env.getOrElse("BUILD_NUMBER", "SNAPSHOT"),
scalaVersion := dottyVersion,
libraryDependencies ++= Seq(
Expand Down
4 changes: 3 additions & 1 deletion cli/.js/src/main/scala/aqua/json/JsonEncoder.scala
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,9 @@ object JsonEncoder {
.sequence
.map { fields =>
// HACK: JSON can have empty object and it is possible if there is only optional fields
val fs = if (fields.isEmpty) List(("some_random_field_that_does_not_even_exists", BottomType)) else fields
val fs =
if (fields.isEmpty) List(("some_random_field_that_does_not_even_exists", BottomType))
else fields
StructType("", NonEmptyMap.fromMap(SortedMap(fs: _*)).get)
}

Expand Down
4 changes: 2 additions & 2 deletions cli/.js/src/main/scala/aqua/remote/DistOpts.scala
Original file line number Diff line number Diff line change
Expand Up @@ -96,9 +96,9 @@ object DistOpts extends Logging {
}
val addBlueprintType = StructType(
"AddBlueprint",
NonEmptyMap(
NonEmptyMap.of(
("name", ScalarType.string),
SortedMap(("dependencies", ArrayType(ScalarType.string)))
("dependencies", ArrayType(ScalarType.string))
)
)
val addBlueprintRequestVar =
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
package aqua.model.inline

import aqua.model.{
CallModel,
CallServiceModel,
LiteralModel,
OpModel,
SeqModel,
ValueModel,
VarModel
}
import aqua.model.inline.raw.RawInliner
import cats.data.Chain
import aqua.model.inline.state.{Arrows, Exports, Mangler}
import aqua.raw.value.{MakeStructRaw, LiteralRaw}
import cats.data.{NonEmptyMap, State}
import aqua.model.inline.Inline
import aqua.model.inline.RawValueInliner.{unfold, valueToModel}
import aqua.types.ScalarType
import cats.syntax.traverse.*
import cats.syntax.monoid.*
import cats.syntax.functor.*
import cats.syntax.flatMap.*
import cats.syntax.apply.*

object MakeStructRawInliner extends RawInliner[MakeStructRaw] {

private def createObj(fields: NonEmptyMap[String, ValueModel], result: VarModel): OpModel.Tree = {
val args = fields.toSortedMap.toList.flatMap { case (name, value) =>
LiteralModel.fromRaw(LiteralRaw.quote(name)) :: value :: Nil
}
CallServiceModel(
LiteralModel("\"json\"", ScalarType.string),
"obj",
CallModel(
args,
CallModel.Export(result.name, result.`type`) :: Nil
)
).leaf
}

override def apply[S: Mangler: Exports: Arrows](
raw: MakeStructRaw,
propertiesAllowed: Boolean
): State[S, (ValueModel, Inline)] = {
for {
name <- Mangler[S].findAndForbidName(raw.structType.name + "_obj")
foldedFields <- raw.fields.nonEmptyTraverse(unfold(_))
} yield {
val varModel = VarModel(name, raw.baseType)
val valsInline = foldedFields.toSortedMap.values.map(_._2).fold(Inline.empty)(_ |+| _)
val fields = foldedFields.map(_._1)
val objCreation = createObj(fields, varModel)
(
varModel,
Inline(
valsInline.flattenValues,
Chain.one(SeqModel.wrap((valsInline.predo :+ objCreation).toList: _*))
)
)
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,9 @@ object RawValueInliner extends Logging {
case cr: CollectionRaw =>
CollectionRawInliner(cr, propertiesAllowed)

case dr: MakeStructRaw =>
MakeStructRawInliner(dr, propertiesAllowed)

case cr: CallArrowRaw =>
CallArrowRawInliner(cr, propertiesAllowed)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,12 +42,11 @@ class ArrowInlinerSpec extends AnyFlatSpec with Matchers {
}

/*
func stream-callback(cb: []string -> ()):
records: *string
cb(records)
func stream-callback(cb: []string -> ()):
records: *string
cb(records)
*/
"arrow inliner" should "pass stream to callback properly" in {

val streamType = StreamType(ScalarType.string)
val streamVar = VarRaw("records", streamType)
val streamModel = VarModel("records", StreamType(ScalarType.string))
Expand Down Expand Up @@ -125,14 +124,14 @@ class ArrowInlinerSpec extends AnyFlatSpec with Matchers {
)
) should be(true)

} /*
func stream-callback(cb: string -> ()):
records: *string
cb(records!)
*/
}

// TODO: unignore and fix after stream restrictions will be implemented
ignore /*"arrow inliner"*/ should "pass stream to callback properly, holding property" in {
/*
func stream-callback(cb: string -> ()):
records: *string
cb(records!)
*/
ignore /*"arrow inliner"*/ should "pass stream with gate to callback properly" in {
val streamType = StreamType(ScalarType.string)
val streamVar = VarRaw("records", streamType)
val streamVarLambda =
Expand Down Expand Up @@ -221,17 +220,17 @@ class ArrowInlinerSpec extends AnyFlatSpec with Matchers {
}

/*
service TestService("test-service"):
get_records() -> []string
service TestService("test-service"):
get_records() -> []string
func inner(inner-records: *[]string):
inner-records <- TestService.get_records()
func inner(inner-records: *[]string):
inner-records <- TestService.get_records()
func retrieve_records() -> [][]string:
records: *[]string
-- 'inner-records' argument in `inner` should be renamed as `records` in resulted AIR
append_records(records)
<- records
func retrieve_records() -> [][]string:
records: *[]string
-- 'inner-records' argument in `inner` should be renamed as `records` in resulted AIR
append_records(records)
<- records
*/
"arrow inliner" should "work with streams as arguments" in {

Expand Down Expand Up @@ -412,7 +411,6 @@ class ArrowInlinerSpec extends AnyFlatSpec with Matchers {
CallModel(ValueModel.fromRaw(flattenObject) :: Nil, Nil)
).leaf
)

)
) should be(true)

Expand Down Expand Up @@ -558,7 +556,8 @@ class ArrowInlinerSpec extends AnyFlatSpec with Matchers {
.leaf
)

val foldOp = ForTag(iVar.name, array, Some(ForTag.WaitMode)).wrap(inFold, NextTag(iVar.name).leaf)
val foldOp =
ForTag(iVar.name, array, Some(ForTag.WaitMode)).wrap(inFold, NextTag(iVar.name).leaf)

val model: OpModel.Tree = ArrowInliner
.callArrow[InliningState](
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -375,8 +375,8 @@ class RawValueInlinerSpec extends AnyFlatSpec with Matchers {
.value
._2

// println(resVal)
// println(resTree)
// println(resVal)
// println(resTree)
}

"raw value inliner" should "desugarize a recursive lambda value" in {
Expand Down
18 changes: 17 additions & 1 deletion model/raw/src/main/scala/aqua/raw/value/ValueRaw.scala
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,9 @@ case class ApplyPropertyRaw(value: ValueRaw, property: PropertyRaw) extends Valu
override def renameVars(map: Map[String, String]): ValueRaw =
ApplyPropertyRaw(value.renameVars(map), property.renameVars(map))

override def map(f: ValueRaw => ValueRaw): ValueRaw = f(ApplyPropertyRaw(f(value), property.map(f)))
override def map(f: ValueRaw => ValueRaw): ValueRaw = f(
ApplyPropertyRaw(f(value), property.map(f))
)

override def toString: String = s"$value.$property"

Expand Down Expand Up @@ -185,6 +187,20 @@ case class CollectionRaw(values: NonEmptyList[ValueRaw], boxType: BoxType) exten
copy(values = values.map(_.renameVars(map)))
}

case class MakeStructRaw(fields: NonEmptyMap[String, ValueRaw], structType: StructType) extends ValueRaw {

override def baseType: Type = structType

override def map(f: ValueRaw => ValueRaw): ValueRaw = f(copy(fields = fields.map(f)))

override def varNames: Set[String] = {
fields.toSortedMap.values.flatMap(_.varNames).toSet
}

override def renameVars(map: Map[String, String]): ValueRaw =
copy(fields = fields.map(_.renameVars(map)))
}

case class CallArrowRaw(
// TODO: ability should hold a type, not name
ability: Option[String],
Expand Down
2 changes: 1 addition & 1 deletion parser/src/main/scala/aqua/parser/head/ModuleExpr.scala
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ object ModuleExpr extends HeaderExpr.Leaf {
nameOrAbList.map(Left(_)) | `star`.lift.map(Token.lift(_)).map(Right(_))

override val p: Parser[ModuleExpr[Span.S]] =
(`module` *> ` ` *> Ability.dotted ~
((`module` | `aqua-word`) *> ` ` *> Ability.dotted ~
(` declares ` *> nameOrAbListOrAll).?).map {
case (name, None) =>
ModuleExpr(name, None, Nil, Nil)
Expand Down
2 changes: 2 additions & 0 deletions parser/src/main/scala/aqua/parser/lexer/Name.scala
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,8 @@ case class Name[F[_]: Comonad](name: F[String]) extends Token[F] {
def rename(newName: String): Name[F] = copy(name.as(newName))

def value: String = name.extract

override def toString() = value
}

object Name {
Expand Down
15 changes: 8 additions & 7 deletions parser/src/main/scala/aqua/parser/lexer/PropertyOp.scala
Original file line number Diff line number Diff line change
Expand Up @@ -25,14 +25,15 @@ case class IntoField[F[_]: Comonad](name: F[String]) extends PropertyOp[F] {
override def mapK[K[_]: Comonad](fk: F ~> K): PropertyOp[K] = copy(fk(name))

def value: String = name.extract

override def toString: String = name.extract
}

case class IntoIndex[F[_]: Comonad](token: Token[F], idx: Option[ValueToken[F]])
case class IntoIndex[F[_]: Comonad](point: F[Unit], idx: Option[ValueToken[F]])
extends PropertyOp[F] {
override def as[T](v: T): F[T] = token.as(v)
override def as[T](v: T): F[T] = point.as(v)

override def mapK[K[_]: Comonad](fk: F ~> K): IntoIndex[K] =
copy(token.mapK(fk), idx.map(_.mapK(fk)))
override def mapK[K[_]: Comonad](fk: F ~> K): IntoIndex[K] = copy(fk(point), idx.map(_.mapK(fk)))
}

object PropertyOp {
Expand All @@ -42,10 +43,10 @@ object PropertyOp {

private val parseIdx: P[PropertyOp[Span.S]] =
(P.defer(
(ValueToken.`value`.between(`[`, `]`) | (exclamation *> ValueToken.num))
.map(v => IntoIndex(v, Some(v)))
(ValueToken.`value`.between(`[`, `]`).lift | (exclamation *> ValueToken.num).lift)
.map(v => IntoIndex(v.map(_.unit), Some(v._2)))
.backtrack
) | exclamation.lift.map(e => IntoIndex(Token.lift[Span.S, Unit](e), None))).flatMap { ii =>
) | exclamation.lift.map(e => IntoIndex(e, None))).flatMap { ii =>
ii.idx match {
case Some(LiteralToken(_, lt)) if lt == LiteralType.signed =>
P.fail.withContext("Collection indexes must be non-negative")
Expand Down
2 changes: 2 additions & 0 deletions parser/src/main/scala/aqua/parser/lexer/Token.scala
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ package aqua.parser.lexer
import cats.data.NonEmptyList
import cats.parse.{Accumulator0, Parser as P, Parser0 as P0}
import cats.{~>, Comonad, Functor}
import cats.syntax.functor.*

trait Token[F[_]] {
def as[T](v: T): F[T]
Expand Down Expand Up @@ -34,6 +35,7 @@ object Token {
val `data`: P[Unit] = P.string("data")
val `import`: P[Unit] = P.string("import")
val `module`: P[Unit] = P.string("module")
val `aqua-word`: P[Unit] = P.string("aqua")
val `declares`: P[Unit] = P.string("declares")
val ` declares ` : P[Unit] = `declares`.surroundedBy(` `)
val `declare`: P[Unit] = P.string("declare")
Expand Down
2 changes: 2 additions & 0 deletions parser/src/main/scala/aqua/parser/lexer/TypeToken.scala
Original file line number Diff line number Diff line change
Expand Up @@ -69,6 +69,8 @@ case class CustomTypeToken[F[_]: Comonad](name: F[String]) extends DataTypeToken
override def mapK[K[_]: Comonad](fk: F ~> K): CustomTypeToken[K] = copy(fk(name))

def value: String = name.extract

override def toString: String = name.extract
}

object CustomTypeToken {
Expand Down
35 changes: 33 additions & 2 deletions parser/src/main/scala/aqua/parser/lexer/ValueToken.scala
Original file line number Diff line number Diff line change
Expand Up @@ -11,15 +11,16 @@ import cats.parse.{Numbers, Parser as P, Parser0 as P0}
import cats.syntax.comonad.*
import cats.syntax.functor.*
import cats.{~>, Comonad, Functor}
import cats.data.NonEmptyList
import cats.data.{NonEmptyList, NonEmptyMap}
import aqua.parser.lift.Span
import aqua.parser.lift.Span.{P0ToSpan, PToSpan, S}

sealed trait ValueToken[F[_]] extends Token[F] {
def mapK[K[_]: Comonad](fk: F ~> K): ValueToken[K]
}

case class VarToken[F[_]](name: Name[F], property: List[PropertyOp[F]] = Nil) extends ValueToken[F] {
case class VarToken[F[_]](name: Name[F], property: List[PropertyOp[F]] = Nil)
extends ValueToken[F] {
override def as[T](v: T): F[T] = name.as(v)

def mapK[K[_]: Comonad](fk: F ~> K): VarToken[K] = copy(name.mapK(fk), property.map(_.mapK(fk)))
Expand Down Expand Up @@ -47,6 +48,8 @@ case class CollectionToken[F[_]: Comonad](
override def as[T](v: T): F[T] = point.as(v)

def mode: CollectionToken.Mode = point.extract

override def toString: String = s"CollectionToken(${point.extract}, $values)"
}

object CollectionToken {
Expand Down Expand Up @@ -91,6 +94,33 @@ object CallArrowToken {
}
}

case class StructValueToken[F[_]: Comonad](
typeName: CustomTypeToken[F],
fields: NonEmptyMap[String, ValueToken[F]]
) extends ValueToken[F] {

override def mapK[K[_]: Comonad](fk: F ~> K): StructValueToken[K] =
copy(typeName.mapK(fk), fields.map(_.mapK(fk)))

override def as[T](v: T): F[T] = typeName.as(v)
}

object StructValueToken {

val dataValue: P[StructValueToken[Span.S]] =
(`Class`.lift
~ comma(
((`name` <* (` `.?.with1 *> `=` *> ` `.?)).with1 ~ ValueToken.`value`).surroundedBy(`/s*`)
)
.between(` `.?.with1 *> `(` <* `/s*`, `/s*` *> `)`))
.withContext(
"Missing braces '()' after the struct type"
)
.map { case (dn, args) =>
StructValueToken(CustomTypeToken(dn), NonEmptyMap.of(args.head, args.tail: _*))
}
}

// Two values as operands, with an infix between them
case class InfixToken[F[_]: Comonad](
left: ValueToken[F],
Expand Down Expand Up @@ -166,6 +196,7 @@ object InfixToken {
P.defer(
CollectionToken.collection
) ::
P.defer(StructValueToken.dataValue).backtrack ::
P.defer(CallArrowToken.callArrow).backtrack ::
P.defer(brackets(InfixToken.mathExpr)) ::
varProperty ::
Expand Down
Loading

0 comments on commit 63433f2

Please sign in to comment.