Skip to content

Commit

Permalink
Switch from abstract val to def in traits
Browse files Browse the repository at this point in the history
  • Loading branch information
RustedBones committed Aug 23, 2023
1 parent 993ad92 commit 8054da3
Show file tree
Hide file tree
Showing 10 changed files with 51 additions and 55 deletions.
2 changes: 1 addition & 1 deletion avro/src/main/scala/magnolify/avro/AvroType.scala
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ import scala.jdk.CollectionConverters._
import scala.reflect.ClassTag

sealed trait AvroType[T] extends Converter[T, GenericRecord, GenericRecord] {
val schema: Schema
def schema: Schema
def apply(r: GenericRecord): T = from(r)
def apply(t: T): GenericRecord = to(t)
}
Expand Down
6 changes: 3 additions & 3 deletions bigquery/src/main/scala/magnolify/bigquery/TableRowType.scala
Original file line number Diff line number Diff line change
Expand Up @@ -34,9 +34,9 @@ class description(description: String) extends StaticAnnotation with Serializabl
}

sealed trait TableRowType[T] extends Converter[T, TableRow, TableRow] {
val schema: TableSchema
val description: String
val selectedFields: Seq[String]
def schema: TableSchema
def description: String
def selectedFields: Seq[String]

def apply(v: TableRow): T = from(v)
def apply(v: T): TableRow = to(v)
Expand Down
10 changes: 5 additions & 5 deletions bigtable/src/main/scala/magnolify/bigtable/BigtableType.scala
Original file line number Diff line number Diff line change
Expand Up @@ -113,7 +113,7 @@ object BigtableField {
sealed trait Record[T] extends BigtableField[T]

sealed trait Primitive[T] extends BigtableField[T] {
val size: Option[Int]
def size: Option[Int]
def fromByteString(v: ByteString): T
def toByteString(v: T): ByteString

Expand Down Expand Up @@ -187,7 +187,7 @@ object BigtableField {
class FromWord[T] {
def apply[U](f: T => U)(g: U => T)(implicit btf: Primitive[T]): Primitive[U] =
new Primitive[U] {
override val size: Option[Int] = btf.size
override def size: Option[Int] = btf.size
def fromByteString(v: ByteString): U = f(btf.fromByteString(v))
def toByteString(v: U): ByteString = btf.toByteString(g(v))
}
Expand All @@ -196,7 +196,7 @@ object BigtableField {
private def primitive[T](
capacity: Int
)(f: ByteBuffer => T)(g: (ByteBuffer, T) => ByteBuffer): Primitive[T] = new Primitive[T] {
override val size: Option[Int] = Some(capacity)
override def size: Option[Int] = Some(capacity)
override def fromByteString(v: ByteString): T = f(v.asReadOnlyByteBuffer())
override def toByteString(v: T): ByteString = {
val bb = ByteBuffer.allocate(capacity)
Expand Down Expand Up @@ -224,7 +224,7 @@ object BigtableField {
}

implicit val btfByteString: Primitive[ByteString] = new Primitive[ByteString] {
override val size: Option[Int] = None
override def size: Option[Int] = None
override def fromByteString(v: ByteString): ByteString = v
override def toByteString(v: ByteString): ByteString = v
}
Expand Down Expand Up @@ -268,7 +268,7 @@ object BigtableField {
fc: FactoryCompat[T, C[T]]
): Primitive[C[T]] =
new Primitive[C[T]] {
override val size: Option[Int] = None
override def size: Option[Int] = None

override def fromByteString(v: ByteString): C[T] = {
val buf = v.asReadOnlyByteBuffer()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@ object KeyField {
}

sealed trait EntityField[T] extends Serializable {
val keyField: KeyField[T]
def keyField: KeyField[T]
def from(v: Value)(cm: CaseMapper): T
def to(v: T)(cm: CaseMapper): Value.Builder
}
Expand Down
3 changes: 0 additions & 3 deletions neo4j/src/main/scala/magnolify/neo4j/ValueType.scala
Original file line number Diff line number Diff line change
Expand Up @@ -50,10 +50,7 @@ object ValueType {
}

sealed trait ValueField[T] extends Serializable {
self =>

def from(v: Value)(cm: CaseMapper): T

def to(v: T)(cm: CaseMapper): Value
}

Expand Down
49 changes: 24 additions & 25 deletions parquet/src/main/scala/magnolify/parquet/ParquetField.scala
Original file line number Diff line number Diff line change
Expand Up @@ -42,14 +42,14 @@ sealed trait ParquetField[T] extends Serializable {
def schema(cm: CaseMapper): Type =
schemaCache.getOrElseUpdate(cm.uuid, buildSchema(cm))

val hasAvroArray: Boolean = false
def hasAvroArray: Boolean = false
def fieldDocs(cm: CaseMapper): Map[String, String]
def typeDoc: Option[String]

protected val isGroup: Boolean = false
protected def isGroup: Boolean = false
protected def isEmpty(v: T): Boolean
def write(c: RecordConsumer, v: T)(cm: CaseMapper): Unit
def newConverter: TypeConverter[T]
def newConverter(): TypeConverter[T]

protected def writeGroup(c: RecordConsumer, v: T)(cm: CaseMapper): Unit = {
if (isGroup) {
Expand All @@ -64,8 +64,7 @@ sealed trait ParquetField[T] extends Serializable {

object ParquetField {
sealed trait Record[T] extends ParquetField[T] {
override protected val isGroup: Boolean = true

override protected def isGroup: Boolean = true
override protected def isEmpty(v: T): Boolean = false
}

Expand All @@ -81,15 +80,15 @@ object ParquetField {
override protected def isEmpty(v: T): Boolean = tc.isEmpty(p.dereference(v))
override def write(c: RecordConsumer, v: T)(cm: CaseMapper): Unit =
tc.writeGroup(c, p.dereference(v))(cm)
override def newConverter: TypeConverter[T] = {
val buffered = tc.newConverter
override def newConverter(): TypeConverter[T] = {
val buffered = tc.newConverter()
.asInstanceOf[TypeConverter.Buffered[p.PType]]
new TypeConverter.Delegate[p.PType, T](buffered) {
override def get: T = inner.get(b => caseClass.construct(_ => b.head))
}
}
override def fieldDocs(cm: CaseMapper): Map[String, String] = Map.empty
override val typeDoc: Option[String] = None
override def typeDoc: Option[String] = None
}
} else {
new Record[T] {
Expand Down Expand Up @@ -137,9 +136,9 @@ object ParquetField {
}
}

override def newConverter: TypeConverter[T] =
override def newConverter(): TypeConverter[T] =
new GroupConverter with TypeConverter.Buffered[T] {
private val fieldConverters = caseClass.parameters.map(_.typeclass.newConverter)
private val fieldConverters = caseClass.parameters.map(_.typeclass.newConverter())

override def isPrimitive: Boolean = false

Expand Down Expand Up @@ -189,8 +188,8 @@ object ParquetField {
new Primitive[U] {
override def buildSchema(cm: CaseMapper): Type = pf.schema(cm)
override def write(c: RecordConsumer, v: U)(cm: CaseMapper): Unit = pf.write(c, g(v))(cm)
override def newConverter: TypeConverter[U] =
pf.newConverter.asInstanceOf[TypeConverter.Primitive[T]].map(f)
override def newConverter(): TypeConverter[U] =
pf.newConverter().asInstanceOf[TypeConverter.Primitive[T]].map(f)
override type ParquetT = pf.ParquetT
}
}
Expand All @@ -200,7 +199,7 @@ object ParquetField {
sealed trait Primitive[T] extends ParquetField[T] {
override protected def isEmpty(v: T): Boolean = false
override def fieldDocs(cm: CaseMapper): Map[String, String] = Map.empty
override val typeDoc: Option[String] = None
override def typeDoc: Option[String] = None
type ParquetT <: Comparable[ParquetT]
}

Expand All @@ -213,7 +212,7 @@ object ParquetField {
new Primitive[T] {
override def buildSchema(cm: CaseMapper): Type = Schema.primitive(ptn, lta)
override def write(c: RecordConsumer, v: T)(cm: CaseMapper): Unit = f(c)(v)
override def newConverter: TypeConverter[T] = g
override def newConverter(): TypeConverter[T] = g
override type ParquetT = UnderlyingT
}

Expand Down Expand Up @@ -284,13 +283,13 @@ object ParquetField {

override def fieldDocs(cm: CaseMapper): Map[String, String] = t.fieldDocs(cm)

override val typeDoc: Option[String] = None
override def typeDoc: Option[String] = None

override def write(c: RecordConsumer, v: Option[T])(cm: CaseMapper): Unit =
v.foreach(t.writeGroup(c, _)(cm))

override def newConverter: TypeConverter[Option[T]] = {
val buffered = t.newConverter
override def newConverter(): TypeConverter[Option[T]] = {
val buffered = t.newConverter()
.asInstanceOf[TypeConverter.Buffered[T]]
.withRepetition(Repetition.OPTIONAL)
new TypeConverter.Delegate[T, Option[T]](buffered) {
Expand Down Expand Up @@ -325,7 +324,7 @@ object ParquetField {
}
}

override protected val isGroup: Boolean = hasAvroArray
override protected def isGroup: Boolean = hasAvroArray
override protected def isEmpty(v: C[T]): Boolean = v.isEmpty

override def write(c: RecordConsumer, v: C[T])(cm: CaseMapper): Unit =
Expand All @@ -337,8 +336,8 @@ object ParquetField {
v.foreach(t.writeGroup(c, _)(cm))
}

override def newConverter: TypeConverter[C[T]] = {
val buffered = t.newConverter
override def newConverter(): TypeConverter[C[T]] = {
val buffered = t.newConverter()
.asInstanceOf[TypeConverter.Buffered[T]]
.withRepetition(Repetition.REPEATED)
val arrayConverter = new TypeConverter.Delegate[T, C[T]](buffered) {
Expand All @@ -362,7 +361,7 @@ object ParquetField {

override def fieldDocs(cm: CaseMapper): Map[String, String] = t.fieldDocs(cm)

override val typeDoc: Option[String] = None
override def typeDoc: Option[String] = None
}
}

Expand All @@ -375,8 +374,8 @@ object ParquetField {
def apply[U](f: T => U)(g: U => T)(implicit pf: Primitive[T]): Primitive[U] = new Primitive[U] {
override def buildSchema(cm: CaseMapper): Type = Schema.setLogicalType(pf.schema(cm), lta)
override def write(c: RecordConsumer, v: U)(cm: CaseMapper): Unit = pf.write(c, g(v))(cm)
override def newConverter: TypeConverter[U] =
pf.newConverter.asInstanceOf[TypeConverter.Primitive[T]].map(f)
override def newConverter(): TypeConverter[U] =
pf.newConverter().asInstanceOf[TypeConverter.Primitive[T]].map(f)

override type ParquetT = pf.ParquetT
}
Expand Down Expand Up @@ -418,7 +417,7 @@ object ParquetField {
override def write(c: RecordConsumer, v: BigDecimal)(cm: CaseMapper): Unit =
c.addBinary(Binary.fromConstantByteArray(Decimal.toFixed(v, precision, scale, length)))

override def newConverter: TypeConverter[BigDecimal] = TypeConverter.newByteArray.map { ba =>
override def newConverter(): TypeConverter[BigDecimal] = TypeConverter.newByteArray.map { ba =>
Decimal.fromBytes(ba, precision, scale)
}

Expand Down Expand Up @@ -453,7 +452,7 @@ object ParquetField {
)
)

override def newConverter: TypeConverter[UUID] = TypeConverter.newByteArray.map { ba =>
override def newConverter(): TypeConverter[UUID] = TypeConverter.newByteArray.map { ba =>
val bb = ByteBuffer.wrap(ba)
val h = bb.getLong
val l = bb.getLong
Expand Down
8 changes: 4 additions & 4 deletions parquet/src/main/scala/magnolify/parquet/ParquetType.scala
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ sealed trait ParquetType[T] extends Serializable {

def schema: MessageType
def avroSchema: AvroSchema
val avroCompat: Boolean
def avroCompat: Boolean

def setupInput(job: Job): Unit = {
job.setInputFormatClass(classOf[ParquetInputFormat[T]])
Expand All @@ -71,8 +71,8 @@ sealed trait ParquetType[T] extends Serializable {
def readBuilder(file: InputFile): ReadBuilder[T] = new ReadBuilder(file, readSupport)
def writeBuilder(file: OutputFile): WriteBuilder[T] = new WriteBuilder(file, writeSupport)

def write(c: RecordConsumer, v: T): Unit = ()
def newConverter: TypeConverter[T] = null
def write(c: RecordConsumer, v: T): Unit
def newConverter(): TypeConverter[T]
}

object ParquetType {
Expand All @@ -97,7 +97,7 @@ object ParquetType {
override val avroCompat: Boolean =
pa == ParquetArray.AvroCompat.avroCompat || f.hasAvroArray
override def write(c: RecordConsumer, v: T): Unit = r.write(c, v)(cm)
override def newConverter: TypeConverter[T] = r.newConverter
override def newConverter(): TypeConverter[T] = r.newConverter()
}
case _ =>
throw new IllegalArgumentException(s"ParquetType can only be created from Record. Got $f")
Expand Down
14 changes: 7 additions & 7 deletions protobuf/src/main/scala/magnolify/protobuf/ProtobufType.scala
Original file line number Diff line number Diff line change
Expand Up @@ -101,8 +101,8 @@ sealed trait ProtobufField[T] extends Serializable {
type FromT
type ToT

val hasOptional: Boolean
val default: Option[T]
def hasOptional: Boolean
def default: Option[T]

def checkDefaults(descriptor: Descriptor)(cm: CaseMapper): Unit = ()

Expand All @@ -119,7 +119,7 @@ object ProtobufField {
}

sealed trait Record[T] extends Aux[T, Message, Message] {
override val default: Option[T] = None
override def default: Option[T] = None
}

// ////////////////////////////////////////////////
Expand All @@ -133,7 +133,7 @@ object ProtobufField {
new ProtobufField[T] {
override type FromT = tc.FromT
override type ToT = tc.ToT
override val hasOptional: Boolean = tc.hasOptional
override def hasOptional: Boolean = tc.hasOptional
override val default: Option[T] = tc.default.map(x => caseClass.construct(_ => x))
override def from(v: FromT)(cm: CaseMapper): T = caseClass.construct(_ => tc.from(v)(cm))
override def to(v: T, b: Message.Builder)(cm: CaseMapper): ToT =
Expand Down Expand Up @@ -243,7 +243,7 @@ object ProtobufField {

private def aux[T, From, To](_default: T)(f: From => T)(g: T => To): ProtobufField[T] =
new Aux[T, From, To] {
override val hasOptional: Boolean = false
override def hasOptional: Boolean = false
override val default: Option[T] = Some(_default)
override def from(v: FromT)(cm: CaseMapper): T = f(v)
override def to(v: T, b: Message.Builder)(cm: CaseMapper): ToT = g(v)
Expand Down Expand Up @@ -282,7 +282,7 @@ object ProtobufField {

implicit def pfOption[T](implicit f: ProtobufField[T]): ProtobufField[Option[T]] =
new Aux[Option[T], f.FromT, f.ToT] {
override val hasOptional: Boolean = true
override def hasOptional: Boolean = true
override val default: Option[Option[T]] = f.default match {
case Some(v) => Some(Some(v))
case None => None
Expand All @@ -306,7 +306,7 @@ object ProtobufField {
fc: FactoryCompat[T, C[T]]
): ProtobufField[C[T]] =
new Aux[C[T], ju.List[f.FromT], ju.List[f.ToT]] {
override val hasOptional: Boolean = false
override def hasOptional: Boolean = false
override val default: Option[C[T]] = Some(fc.newBuilder.result())
override def from(v: ju.List[f.FromT])(cm: CaseMapper): C[T] = {
val b = fc.newBuilder
Expand Down
10 changes: 5 additions & 5 deletions shared/src/main/scala/magnolify/shared/EnumType.scala
Original file line number Diff line number Diff line change
Expand Up @@ -23,11 +23,11 @@ import scala.reflect.macros._
import scala.annotation.{implicitNotFound, nowarn}

sealed trait EnumType[T] extends Serializable { self =>
val name: String
val namespace: String
val values: List[String]
val valueSet: Set[String]
val annotations: List[Any]
def name: String
def namespace: String
def values: List[String]
def valueSet: Set[String]
def annotations: List[Any]
def from(v: String): T
def to(v: T): String

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ class doc(msg: String) extends StaticAnnotation with Serializable {
}

sealed trait ExampleType[T] extends Converter[T, Example, Example.Builder] {
val schema: Schema
def schema: Schema
def apply(v: Example): T = from(v)
def apply(v: T): Example = to(v).build()
}
Expand Down

0 comments on commit 8054da3

Please sign in to comment.