Skip to content

Commit

Permalink
correct function names
Browse files Browse the repository at this point in the history
fix compilation & upgrade dependencies
  • Loading branch information
tribbloid committed Feb 10, 2024
1 parent dc6d80c commit 97fefd6
Show file tree
Hide file tree
Showing 7 changed files with 18 additions and 97 deletions.
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
package com.tribbloids.spookystuff.utils.data

import ai.acyclic.prover.commons.same.EqualBy
import com.tribbloids.spookystuff.relay.RootTagged
import com.tribbloids.spookystuff.relay.xml.Xml
import com.tribbloids.spookystuff.utils.{CommonUtils, TreeThrowable}
Expand All @@ -15,7 +14,7 @@ import scala.util.Try
/**
* entity-(with)-attribute-value
*/
trait EAVLike extends HasEagerInnerObjects with EqualBy with RootTagged with Serializable {
trait EAVLike extends HasEagerInnerObjects with RootTagged with Serializable {

def internal: collection.Map[String, Any]

Expand Down Expand Up @@ -92,13 +91,6 @@ trait EAVLike extends HasEagerInnerObjects with EqualBy with RootTagged with Ser
}
}

@transient protected lazy val sortEvidence: Seq[String] = {

val result = KVs.declared.map(v => v._2.map(_.toString).orNull)
result
}
override def samenessDelegatedTo: Any = sortEvidence

@transient lazy val asProperties: Properties = {
val properties = new Properties()

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,22 +28,6 @@ trait EAVSystem {
}

implicit def relay: Relay[^] = _Relay()

/**
* will determine ordering by the following evidences, in descending precedence:
* - values of the defined attributes, in the order of definition
*/
private lazy val _defaultOrdering: Ordering[_ <: EAV] = {
import Ordering.Implicits._

Ordering.by { v: EAV =>
v.sortEvidence
}
}

implicit def defaultOrdering[T <: EAV]: Ordering[T] = {
_defaultOrdering.asInstanceOf[Ordering[T]]
}
}

type ^ <: EAV
Expand Down Expand Up @@ -172,12 +156,12 @@ trait EAVSystem {
(m.getParameterTypes.length == 0) &&
DSLUtils.isSerializable(m.getReturnType)
}
val commonGetters = _methods
val commonGetters: Array[(String, Method)] = _methods
.filter { m =>
m.getName.startsWith("get")
}
.map(v => v.getName.stripPrefix("get") -> v)
val booleanGetters = _methods
val booleanGetters: Array[(String, Method)] = _methods
.filter { m =>
m.getName.startsWith("is")
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,9 @@ abstract class Resource extends LocalCleanable {
.groupBy(_.lookup("Type").toString)

val childMaps = grouped.view.mapValues { vs =>
vs.sorted
val sorted = vs.sortBy(_.sortEvidence)

sorted
.map { md =>
md.internal
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,5 +21,11 @@ object ResourceMetadata extends EAVSystem {
case object StatusCode extends Attr[Int](List("status-code"))

case object `isDir` extends Attr[Boolean]()

@transient lazy val sortEvidence: Option[String] = {

val result = URI.get
result
}
}
}
Original file line number Diff line number Diff line change
@@ -1,14 +1,12 @@
package com.tribbloids.spookystuff.metrics

import com.tribbloids.spookystuff.utils.CommonUtils
import com.tribbloids.spookystuff.relay.TreeIR
import com.tribbloids.spookystuff.utils.CommonUtils
import org.apache.spark.ml.dsl.utils.refl.ReflectionUtils
import org.apache.spark.util.AccumulatorV2

import java.lang.reflect.Modifier
import scala.collection.mutable
import scala.language.implicitConversions
import scala.util.Try

/**
* Created by peng on 03/10/15.
Expand Down Expand Up @@ -84,59 +82,5 @@ abstract class AbstractMetrics extends MetricLike {

object AbstractMetrics {

// case object Empty extends AbstractMetrics

// TODO: useless at the moment
abstract class HasExtraMembers extends AbstractMetrics {

def initialise(): Unit = {

// lazy members has to be initialised before shipping
extraMembers
}

final protected def writeReplace(): Any = {
initialise()
this
}

@transient private lazy val extraMembers: List[(String, MetricLike)] = {
val methods = this.getClass.getMethods.toList
.filter { method =>
val parameterMatch = method.getParameterCount == 0
val returnTypeMatch = classOf[MetricLike].isAssignableFrom(method.getReturnType)

returnTypeMatch && parameterMatch
}
.sortBy(_.getName)

val publicMethods = methods.filter { method =>
val mod = method.getModifiers
!method.getName.startsWith("copy") && Modifier.isPublic(mod) && !Modifier.isStatic(mod)
}

val extra = publicMethods.flatMap { method =>
Try {
val value = method.invoke(this).asInstanceOf[MetricLike]
if (value == null)
throw new UnsupportedOperationException(s"member `${method.getName}` has not been initialised")

if (value.eq(this) || value == null) {
None
} else {
Some(method.getName -> value)
}
}.toOption.toSeq.flatten
}

extra
}

override protected def _symbol2children: List[(String, Any)] = {

super._symbol2children ++ extraMembers
}
}

implicit def asView(v: AbstractMetrics): v.View.type = v.View
}
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
package com.tribbloids.spookystuff.metrics

import com.tribbloids.spookystuff.metrics.MetricsSuite.{DummyMetrics, DummyMetrics_HasMembers, DummyTreeMetrics}
import com.tribbloids.spookystuff.metrics.MetricsSpec.{DummyMetrics, DummyTreeMetrics}
import com.tribbloids.spookystuff.testutils.{BaseSpec, TestHelper}
import com.tribbloids.spookystuff.relay.io.Encoder
import org.apache.spark.sql.execution.streaming.EventTimeStatsAccum
import org.apache.spark.util.{DoubleAccumulator, LongAccumulator}

object MetricsSuite {
object MetricsSpec {

case class DummyMetrics(
v1: Acc[LongAccumulator] = "v1" -> 0L,
Expand All @@ -17,21 +17,14 @@ object MetricsSuite {
v3: Acc[EventTimeStatsAccum] = "v3" -> 2L,
sub: DummyMetrics = DummyMetrics()
) extends AbstractMetrics

case class DummyMetrics_HasMembers() extends AbstractMetrics.HasExtraMembers {

lazy val v1: Acc[LongAccumulator] = "v1" -> 0L
lazy val v2: Acc[DoubleAccumulator] = "v2" -> 1.0

}
}

class MetricsSuite extends BaseSpec {
class MetricsSpec extends BaseSpec {

TestHelper.TestSC

it("can be converted to JSON") {
Seq(DummyMetrics(), DummyMetrics_HasMembers()).foreach { v =>
Seq(DummyMetrics()).foreach { v =>
val m = v.View
m.toTreeIR
.toJSON()
Expand Down
2 changes: 1 addition & 1 deletion prover-commons
Submodule prover-commons updated 21 files
+1 −1 buildSrc
+10 −10 gradlew.bat
+2 −0 module/core/src/main/scala/ai/acyclic/prover/commons/__Glossary.scala
+2 −2 module/core/src/main/scala/ai/acyclic/prover/commons/function/api/FnLike.scala
+0 −1 module/core/src/main/scala/ai/acyclic/prover/commons/function/api/HasPolyLike.scala
+64 −17 module/core/src/main/scala/ai/acyclic/prover/commons/graph/Engine.scala
+6 −6 module/core/src/main/scala/ai/acyclic/prover/commons/graph/NodeK.scala
+1 −1 module/core/src/main/scala/ai/acyclic/prover/commons/graph/local/Local.scala
+4 −4 module/core/src/main/scala/ai/acyclic/prover/commons/graph/viz/LinkedHierarchy.scala
+1 −1 module/core/src/test/scala/ai/acyclic/prover/commons/function/PreDefSpec.scala
+28 −24 module/core/src/test/scala/ai/acyclic/prover/commons/graph/GraphFixture.scala
+3 −3 module/core/src/test/scala/ai/acyclic/prover/commons/graph/TreeFixture.scala
+5 −4 module/core/src/test/scala/ai/acyclic/prover/commons/graph/local/ops/AnyGraphBinarySpec.scala
+6 −5 module/core/src/test/scala/ai/acyclic/prover/commons/graph/local/ops/AnyGraphUnarySpec.scala
+4 −4 module/core/src/test/scala/ai/acyclic/prover/commons/graph/viz/FlowSpec.scala
+5 −5 module/core/src/test/scala/ai/acyclic/prover/commons/graph/viz/LinkedHierarchySpec.scala
+4 −4 module/meta2/src/main/scala/ai/acyclic/prover/commons/meta2/ProductDiscovery.scala
+5 −10 module/meta2/src/main/scala/ai/acyclic/prover/commons/viz/TypeOfMixin.scala
+25 −10 notebook/src/test/scala/ai/acyclic/prover/commons/notebook/ForwardPlot.scala
+2 −2 notebook/src/test/scala/ai/acyclic/prover/commons/notebook/PathToDOT.scala
+1 −1 notebook/src/test/scala/ai/acyclic/prover/commons/notebook/SplainPluginVersions.scala

0 comments on commit 97fefd6

Please sign in to comment.