Skip to content

Commit

Permalink
Disable Scala3 significant indentation (zio#357)
Browse files Browse the repository at this point in the history
  • Loading branch information
guizmaii committed Sep 26, 2023
1 parent 5f8eaa6 commit 00535ae
Show file tree
Hide file tree
Showing 127 changed files with 2,539 additions and 1,415 deletions.
6 changes: 5 additions & 1 deletion build.sbt
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
import com.jsuereth.sbtpgp.PgpKeys.publishSigned

Global / onChangedBuildSource := ReloadOnSourceChanges

inThisBuild(
List(
organization := "io.getquill",
Expand Down Expand Up @@ -326,6 +328,8 @@ lazy val basicSettings = Seq(
// //Tests.Argument(TestFrameworks.ScalaTest, "-h", "testresults")
// ),
scalacOptions ++= Seq(
"-language:implicitConversions", "-explain"
"-language:implicitConversions", "-explain",
// See https://docs.scala-lang.org/scala3/guides/migration/tooling-syntax-rewriting.html
"-no-indent"
)
)
Original file line number Diff line number Diff line change
Expand Up @@ -11,15 +11,16 @@ import caliban.Value

case class ProductArgs[T](keyValues: Map[String, String])

object CalibanIntegration:
object CalibanIntegration {

def quillColumns(field: Field) =
def quillColumns(field: Field) = {
def recurseFetchFields(field: Field): List[Field] =
if (Types.innerType(field.fieldType).kind == __TypeKind.OBJECT)
field.fields.flatMap(recurseFetchFields(_))
else
List(field)
field.fields.flatMap(recurseFetchFields(_)).map(_.name)
}

def flattenToPairs(key: String, value: InputValue): List[(String, String)] =
value match {
Expand Down Expand Up @@ -63,4 +64,4 @@ object CalibanIntegration:
def resolve(value: ProductArgs[T]): Step[Any] = Step.NullStep
}

end CalibanIntegration
} // end CalibanIntegration
Original file line number Diff line number Diff line change
Expand Up @@ -12,9 +12,9 @@ import io.getquill.CalibanIntegration._
class CalibanIntegrationNestedSpec extends CalibanSpec {
import Ctx._

object Nested:
object Nested {
import NestedSchema._
object Dao:
object Dao {
def personAddress(columns: List[String], filters: Map[String, String]) =
Ctx.run {
query[PersonT].leftJoin(query[AddressT]).on((p, a) => p.id == a.ownerId)
Expand All @@ -30,6 +30,8 @@ class CalibanIntegrationNestedSpec extends CalibanSpec {
println(s"ERROR $e")
ZIO.unit
})
}
}

case class Queries(
personAddressNested: Field => (ProductArgs[NestedSchema.PersonAddressNested] => Task[List[NestedSchema.PersonAddressNested]])
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,9 +12,9 @@ import io.getquill.CalibanIntegration._
class CalibanIntegrationSpec extends CalibanSpec {
import Ctx._

object Flat:
object Flat {
import FlatSchema._
object Dao:
object Dao {
def personAddress(columns: List[String], filters: Map[String, String]): ZIO[Any, Throwable, List[PersonAddress]] =
Ctx.run {
query[PersonT].leftJoin(query[AddressT]).on((p, a) => p.id == a.ownerId)
Expand All @@ -26,6 +26,8 @@ class CalibanIntegrationSpec extends CalibanSpec {
println(s"Results: $list for columns: $columns")
ZIO.unit
})
}
}

case class Queries(
personAddressFlat: Field => (ProductArgs[FlatSchema.PersonAddress] => Task[List[FlatSchema.PersonAddress]]),
Expand Down
7 changes: 4 additions & 3 deletions quill-caliban/src/test/scala/io/getquill/CalibanSpec.scala
Original file line number Diff line number Diff line change
Expand Up @@ -33,13 +33,14 @@ trait CalibanSpec extends AnyFreeSpec with Matchers with BeforeAndAfterAll {

def api: GraphQL[Any]

extension [A](qzio: ZIO[Any, Throwable, A])
extension [A](qzio: ZIO[Any, Throwable, A]) {
def unsafeRunSync(): A =
zio.Unsafe.unsafe { implicit unsafe =>
zio.Runtime.default.unsafe.run(qzio).getOrThrow()
}
}

def unsafeRunQuery(queryString: String) =
def unsafeRunQuery(queryString: String) = {
val output =
(for {
interpreter <- api.interpreter
Expand All @@ -54,5 +55,5 @@ trait CalibanSpec extends AnyFreeSpec with Matchers with BeforeAndAfterAll {
fail(s"GraphQL Validation Failures: ${output.errors}")
else
output.data.toString
end unsafeRunQuery
} // end unsafeRunQuery
}
13 changes: 8 additions & 5 deletions quill-caliban/src/test/scala/io/getquill/Schema.scala
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
package io.getquill

object FlatSchema:
object FlatSchema {
case class PersonT(id: Int, first: String, last: String, age: Int)
case class AddressT(ownerId: Int, street: String)
case class PersonAddress(id: Int, first: String, last: String, age: Int, street: Option[String])
object ExampleData:
object ExampleData {
val people =
List(
PersonT(1, "One", "A", 44),
Expand All @@ -22,14 +22,16 @@ object FlatSchema:
PersonAddress(2, "Two", "B", 55, Some("123 St")),
PersonAddress(3, "Three", "C", 66, None),
)
}
}

object NestedSchema:
object NestedSchema {
case class Name(first: String, last: String)
case class PersonT(id: Int, name: Name, age: Int)
case class AddressT(ownerId: Int, street: String)
// Needs to be named differently from Flat.PersonAddress___ since Caliban infers from this class & name must be different
case class PersonAddressNested(id: Int, name: Name, age: Int, street: Option[String])
object ExampleData:
object ExampleData {
val people =
List(
PersonT(1, Name("One", "A"), 44),
Expand All @@ -46,4 +48,5 @@ object NestedSchema:
PersonAddressNested(1, Name("One", "A"), 44, Some("123 St")),
PersonAddressNested(2, Name("Two", "B"), 55, Some("123 St")),
PersonAddressNested(3, Name("Three", "C"), 66, None),
)
) }
}
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ import io.getquill
import io.getquill.FlatSchema._


object Dao:
object Dao {
case class PersonAddressPlanQuery(plan: String, pa: List[PersonAddress])
private val logger = ContextLogger(classOf[Dao.type])

Expand All @@ -44,12 +44,13 @@ object Dao:
inline def plan(inline columns: List[String], inline filters: Map[String, String]) =
quote { sql"EXPLAIN ${q(columns, filters)}".pure.as[Query[String]] }

def personAddress(columns: List[String], filters: Map[String, String]) =
def personAddress(columns: List[String], filters: Map[String, String]) = {
println(s"Getting columns: $columns")
run(q(columns, filters)).implicitDS.mapError(e => {
logger.underlying.error("personAddress query failed", e)
e
})
}

def personAddressPlan(columns: List[String], filters: Map[String, String]) =
run(plan(columns, filters), OuterSelectWrap.Never).map(_.mkString("\n")).implicitDS.mapError(e => {
Expand All @@ -63,9 +64,9 @@ object Dao:
_ <- run(liftQuery(ExampleData.people).foreach(row => query[PersonT].insertValue(row)))
_ <- run(liftQuery(ExampleData.addresses).foreach(row => query[AddressT].insertValue(row)))
} yield ()).implicitDS
end Dao
} // end Dao

object CalibanExample extends zio.ZIOAppDefault:
object CalibanExample extends zio.ZIOAppDefault {

case class Queries(
personAddress: Field => (ProductArgs[PersonAddress] => Task[List[PersonAddress]]),
Expand Down Expand Up @@ -106,4 +107,4 @@ object CalibanExample extends zio.ZIOAppDefault:
override def run =
myApp.exitCode

end CalibanExample
} // end CalibanExample
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ import io.getquill.util.ContextLogger
import io.getquill.NestedSchema._


object DaoNested:
object DaoNested {
case class PersonAddressPlanQuery(plan: String, pa: List[PersonAddressNested])
private val logger = ContextLogger(classOf[DaoNested.type])

Expand All @@ -43,12 +43,13 @@ object DaoNested:
inline def plan(inline columns: List[String], inline filters: Map[String, String]) =
quote { sql"EXPLAIN ${q(columns, filters)}".pure.as[Query[String]] }

def personAddress(columns: List[String], filters: Map[String, String]) =
def personAddress(columns: List[String], filters: Map[String, String]) = {
println(s"Getting columns: $columns")
run(q(columns, filters)).implicitDS.mapError(e => {
logger.underlying.error("personAddress query failed", e)
e
})
}

def personAddressPlan(columns: List[String], filters: Map[String, String]) =
run(plan(columns, filters), OuterSelectWrap.Never).map(_.mkString("\n")).implicitDS.mapError(e => {
Expand All @@ -62,9 +63,9 @@ object DaoNested:
_ <- run(liftQuery(ExampleData.people).foreach(row => query[PersonT].insertValue(row)))
_ <- run(liftQuery(ExampleData.addresses).foreach(row => query[AddressT].insertValue(row)))
} yield ()).implicitDS
end DaoNested
} // end DaoNested

object CalibanExampleNested extends zio.ZIOAppDefault:
object CalibanExampleNested extends zio.ZIOAppDefault {
private val logger = ContextLogger(classOf[CalibanExampleNested.type])

case class Queries(
Expand Down Expand Up @@ -107,4 +108,4 @@ object CalibanExampleNested extends zio.ZIOAppDefault:
override def run =
myApp.exitCode

end CalibanExampleNested
} // end CalibanExampleNested
10 changes: 6 additions & 4 deletions quill-cassandra/src/main/scala/io/getquill/UdtMetaDsl.scala
Original file line number Diff line number Diff line change
Expand Up @@ -20,16 +20,16 @@ trait UdtMeta[T <: Udt] {
def alias(col: String): Option[String]
}

object UdtMeta:
object UdtMeta {
import scala.quoted.*
def build[T <: Udt: Type](using Quotes): Expr[UdtMeta[T]] =
def build[T <: Udt: Type](using Quotes): Expr[UdtMeta[T]] = {
import quotes.reflect.*
if (TypeRepr.of[T] =:= TypeRepr.of[Udt])
// TODO quill.trace.types 'summoning' level should enable this
//println("Cannot derive schema for the base Udt (print the stack trace too)")
'{ ??? }
else
Expr.summon[UdtMeta[T]] match
Expr.summon[UdtMeta[T]] match {
// if there is an implicit meta
case Some(meta) => meta
// def apply[T <: Udt: Type](path: Expr[String], columns: Expr[Seq[T => (Any, String)]])(using Quotes): Expr[UdtMeta[T]] = {
Expand All @@ -38,4 +38,6 @@ object UdtMeta:
// TODO quill.trace.types 'summoning' level should enable this
//println(s"Dsl not found. Making one with the type name: ${typeName}")
UdtMetaDslMacro[T](Expr(typeName), Expr.ofList(Seq()))
end UdtMeta
}
}
} // end UdtMeta
Original file line number Diff line number Diff line change
Expand Up @@ -18,11 +18,12 @@ object UdtMetaDslMacro {
import quotes.reflect._

val columnsList =
columns match
columns match {
case '{ Nil } => Nil
case '{ List() } => Nil
case Varargs(cols) => cols
case _ => report.throwError(s"Invalid UdtMeta columns list: ${Format.Expr(columns)}", columns)
}

// Do we need to do asTerm.underlyingArgument.asExpr to the terms here? As far as I understand,
// it is not a good idea to splice trees back in that have been underlyingArgumented (check conversations with Stucki)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,13 +12,15 @@ trait CollectionDecoders extends EncodingDsl with CassandraRowContext {
this: Decoders =>

// TODO Remove variable b and put directly
implicit def listDecoder[T, Cas](implicit mapper: CassandraMapper[Cas, T, MapperSide.Decode], ct: ClassTag[Cas]): Decoder[List[T]] =
implicit def listDecoder[T, Cas](implicit mapper: CassandraMapper[Cas, T, MapperSide.Decode], ct: ClassTag[Cas]): Decoder[List[T]] = {
val b: BaseDecoder[List[T]] = (index, row, session) => row.getList[Cas](index, asClassOf[Cas]).asScala.map(row => mapper.f(row, session)).toList
decoder(b)
}

implicit def setDecoder[T, Cas](implicit mapper: CassandraMapper[Cas, T, MapperSide.Decode], ct: ClassTag[Cas]): Decoder[Set[T]] =
implicit def setDecoder[T, Cas](implicit mapper: CassandraMapper[Cas, T, MapperSide.Decode], ct: ClassTag[Cas]): Decoder[Set[T]] = {
val b: BaseDecoder[Set[T]] = (index, row, session) => row.getSet[Cas](index, asClassOf[Cas]).asScala.map(row => mapper.f(row, session)).toSet
decoder(b)
}

implicit def mapDecoder[K, V, KCas, VCas](
implicit
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,17 +13,21 @@ import com.datastax.oss.driver.api.core.data.UdtValue
import com.datastax.oss.driver.api.core.cql.Row
import com.datastax.oss.driver.api.core.cql.BoundStatement

trait CassandraEncoderMaker[Encoder[_], T]:
trait CassandraEncoderMaker[Encoder[_], T] {
def apply(e: (Int, T, BoundStatement, UdtValueLookup) => BoundStatement): Encoder[T]
}

trait CassandraDecoderMaker[Decoder[_], T]:
trait CassandraDecoderMaker[Decoder[_], T] {
def apply(e: (Int, Row, UdtValueLookup) => T): Decoder[T]
}

trait CassandraEncodeMapperMaker[Encoder[_], T]:
trait CassandraEncodeMapperMaker[Encoder[_], T] {
def apply(f: (T, UdtValueLookup) => UdtValue): CassandraMapper[T, UdtValue, MapperSide.Encode]
}

trait CassandraDecodeMapperMaker[Encoder[_], T]:
trait CassandraDecodeMapperMaker[Encoder[_], T] {
def apply(f: (UdtValue, UdtValueLookup) => T): CassandraMapper[UdtValue, T, MapperSide.Decode]
}

trait Encoders
extends CassandraRowContext
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,28 +4,33 @@ import scala.deriving._
import scala.quoted._
import io.getquill.util.Format

object MirrorFields:
object MirrorFields {
import io.getquill.metaprog.TypeExtensions._

private def recurseCollect[Fields: Type, Types: Type](fieldsTup: Type[Fields], typesTup: Type[Types])(using Quotes): List[(String, Type[_])] =
private def recurseCollect[Fields: Type, Types: Type](fieldsTup: Type[Fields], typesTup: Type[Types])(using Quotes): List[(String, Type[_])] = {
import quotes.reflect._
(fieldsTup, typesTup) match
(fieldsTup, typesTup) match {
case ('[field *: fields], '[tpe *: types]) =>
val fieldValue = Type.of[field].constValue
(fieldValue, Type.of[tpe]) :: recurseCollect[fields, types](Type.of[fields], Type.of[types])
case (_, '[EmptyTuple]) => Nil
case _ => report.throwError("Cannot Derive Product during Type Flattening of Expression:\n" + typesTup)
}
}

def of[T: Type](using Quotes): (Expr[Mirror.ProductOf[T]], List[(String, Type[_])]) =
def of[T: Type](using Quotes): (Expr[Mirror.ProductOf[T]], List[(String, Type[_])]) = {
import quotes.reflect._
Expr.summon[Mirror.Of[T]] match
Expr.summon[Mirror.Of[T]] match {
case Some(ev) =>
ev match
ev match {
case '{ $m: Mirror.ProductOf[T] { type MirroredElemLabels = elementLabels; type MirroredElemTypes = elementTypes }} =>
(m, recurseCollect[elementLabels, elementTypes](Type.of[elementLabels], Type.of[elementTypes]))
case '{ $m: Mirror.SumOf[T] { type MirroredElemLabels = elementLabels; type MirroredElemTypes = elementTypes }} =>
report.throwError(s"The detected type of ${Format.TypeOf[T]} is a Sum (i.e. Enum or Sealed trait hiearchy. Only Product-type (i.e. Case-Class) UDTs are supported.")
}
case None =>
val traces = Thread.currentThread.getStackTrace.take(50).map(" " + _.toString).mkString("\n")
report.throwError(s"Could not detect mirror for: ${Format.TypeOf[T]}")
end MirrorFields
}
}
} // end MirrorFields
Loading

0 comments on commit 00535ae

Please sign in to comment.