Commit 040b09e8 authored by Arthur Bit-Monnot's avatar Arthur Bit-Monnot

Remove outdated code.

parent 68903ebb
package dahu.maps
trait Arrow[-Domain, +CoDomain] {
def apply(v: Domain): CoDomain
def andThen[Res](next: Arrow[CoDomain, Res]): Arrow[Domain, Res] =
ComposedArrow(this, next)
def asScalaFunction: Domain => CoDomain = x => this.apply(x)
}
object Arrow {
def lift[A, B](f: A => B): Arrow[A, B] = FunctionArrow(f)
}
final case class FunctionArrow[In, Res](f: In => Res) extends Arrow[In, Res] {
override def apply(v1: In): Res = f(v1)
}
final case class ComposedArrow[A, B, C](ab: Arrow[A, B], bc: Arrow[B, C]) extends Arrow[A, C] {
override def apply(v1: A): C = bc(ab(v1))
}
package dahu.maps
import scala.collection.mutable
trait PartialArrow[A, B] extends Arrow[A, Option[B]] {}
trait MutableArrow[A, B] {
def apply(a: A): B
}
class MutableInputs[A, B] {
val known: mutable.Map[A, B] = mutable.Map()
def update(a: A, b: B): Unit = known.update(a, b)
def unset(a: A): Unit = known -= a
def apply(a: A): Option[B] = known.get(a)
}
class ArrowFromInputs[A, B, C](val inputs: MutableInputs[A, B], val arrow: B ==> C)
extends MutableArrow[A, Option[C]] {
override def apply(a: A): Option[C] = inputs(a).map(arrow(_))
}
package dahu.maps
import dahu.maps.memoization.{ArrayCache, Cache}
import scala.reflect.ClassTag
trait TypeInstances[@specialized(Int) T] {
/** All instances of T in no particular order. */
def enumerate: Array[T]
}
trait OpaqueIntSubset[@specialized(Int) T] extends TypeInstances[T] {
def first: T = wrap(unsubst(enumerate).min)
def last: T = wrap(unsubst(enumerate).max)
def unwrap(a: T): Int
def wrap(i: Int): T
def subst[F[_]](fi: F[Int]): F[T]
def unsubst[F[_]](fa: F[T]): F[Int]
def newCache[B](implicit classTag: ClassTag[B]): Cache[T, B] = new ArrayCache[T, B](this)
}
package dahu.maps.memoization
import dahu.maps.OpaqueIntSubset
import scala.reflect.ClassTag
trait Cache[A, B] {
def get(a: A): Option[B]
def getOrElse(a: A, b: => B): B
def getOrElseUpdate(a: A, b: => B): B
}
class ArrayCache[A, B](ev: OpaqueIntSubset[A])(implicit tag: ClassTag[B]) extends Cache[A, B] {
import ev._
private val offset: Int = unwrap(ev.first)
private val size: Int = 1 + unwrap(last) - unwrap(first)
private def address(a: A): Int = unwrap(a) - offset
val memory: Array[B] = new Array[B](size)
val mask: Array[Boolean] = Array.fill[Boolean](size)(false)
override def get(a: A): Option[B] =
if(mask(address(a)))
Some(memory(unwrap(a) - offset))
else
None
override def getOrElse(a: A, b: => B): B = get(a).getOrElse(b)
override def getOrElseUpdate(a: A, b: => B): B =
if(mask(address(a))) {
memory(address(a))
} else {
mask(address(a)) = true
memory(address(a)) = b
b
}
}
package dahu.maps.memoization
import dahu.maps.{==>, OpaqueIntSubset, TypeInstances}
import scala.reflect.ClassTag
object Memo {
trait Memo[A, B] {
def apply(a: A): B
}
def memoize[A, B](values: Set[A], f: A => B): Memo[A, B] =
new Memo[A, B] {
val m: Map[A, B] = values.map(x => (x, f(x))).toMap
override def apply(a: A): B = m(a)
}
def memoize[A, B](f: A => B)(implicit instances: TypeInstances[A]): Memo[A, B] = new Memo[A, B] {
val m: Map[A, B] = instances.enumerate.map(x => (x, f(x))).toMap
override def apply(a: A): B = m(a)
}
class ArrayMemo[A, B](f: A ==> B)(implicit ev: OpaqueIntSubset[A], tag: ClassTag[B])
extends Memo[A, B] {
import ev._
private val offset: Int = unwrap(ev.first)
private val inputs: Array[A] = ev.enumerate
val memory: Array[B] = new Array[B](1 + unwrap(last) - unwrap(first));
{
var i = 0
while(i < inputs.length) {
memory(unwrap(inputs(i)) - offset) = f(inputs(i))
i += 1
}
}
override def apply(a: A): B = memory(unwrap(a) - offset)
}
}
package dahu
package object arrows {
type ==>[A, B] = Arrow[A, B]
}
package dahu.maps
import cats.Functor
import dahu.maps.memoization.Cache
package object recursion {
type Algebra[F[_], X] = F[X] ==> X
type Coalgebra[F[_], X] = X ==> F[X]
def hylo[A, B, F[_]](coalgebra: Coalgebra[F, A], algebra: Algebra[F, B])(
implicit F: Functor[F]): A ==> B = {
def go(id: A): B =
algebra(F.map(coalgebra(id))(go))
Arrow.lift(go)
}
def memoizedHylo[A, B, F[_]](coalgebra: Coalgebra[F, A],
algebra: Algebra[F, B],
cache: Cache[A, B])(implicit F: Functor[F]): A ==> B = {
def go(id: A): B =
cache.getOrElseUpdate(id, algebra(F.map(coalgebra(id))(go)))
Arrow.lift(go)
}
}
package dahu.ast
import dahu.maps.recursion.Algebra
import dahu.maps.{==>, recursion, OpaqueIntSubset}
import dahu.recursion.{ExprF, InputF}
import dahu.recursion.Types._
import scala.reflect.ClassTag
trait ASTable {
/** Opaque type representing the IDs of the expression in this table. */
val EId: IndexLabelImpl = new IndexLabelImpl {
type T = Int
override def fromInt(s: Int): T = s
override def toInt(lbl: T): Int = lbl
override def fromIntF[F[_]](fs: F[Int]): F[T] = fs
override def toIntF[F[_]](fs: F[T]): F[Int] = fs
}
type EId = EId.T
val VarId: SubIndexLabelImpl[EId] = new SubIndexLabelImpl[EId] {
type T = EId
override def fromInt(s: Int): EId = EId.fromInt(s)
override def toInt(lbl: T): Int = EId.toInt(lbl)
override def toIntF[F[_]](fs: F[EId]): F[Int] = EId.toIntF(fs)
override def fromIntF[F[_]](fs: F[Int]): F[EId] = EId.fromIntF(fs)
override def wrap(s: EId): T = s
override def unwrap(lbl: T): EId = lbl
override def subst[F[_]](fs: F[EId]): F[T] = fs
override def unsubst[F[_]](fs: F[T]): F[EId] = fs
}
type VarId = VarId.T
type Expr = ExprF[EId]
type Variable = InputF[EId]
def root: EId
def coalgebra: EId ==> Expr // equivalent to Coalgebra[ExprF, EId]
def variableCoalgebra: VarId ==> Variable =
id => coalgebra(VarId.unwrap(id)).asInstanceOf[Variable]
def ids: OpaqueIntSubset[EId]
def variableIds: OpaqueIntSubset[VarId]
def hylo[X](algebra: Algebra[ExprF, X]): EId ==> X = recursion.hylo(coalgebra, algebra)
def memoizedHylo[X](algebra: Algebra[ExprF, X])(implicit classTag: ClassTag[X]): EId ==> X =
recursion.memoizedHylo(coalgebra, algebra, ids.newCache[X])
}
package dahu.ast
import cats.Functor
import dahu.maps._
import dahu.maps.memoization.Cache
import dahu.maps.recursion.Algebra
import dahu.recursion._
import spire.ClassTag
import scala.collection.mutable
object Ops {
val toTable: dahu.expr.Expr[Any] ==> ASTable = Arrow.lift((x: dahu.expr.Expr[Any]) => {
val (headInt, tableInt) = Algebras.encodeAsPair(x)
new ASTable {
private val vec: Array[ExprF[EId]] = tableInt.asInstanceOf[Vector[Expr]].toArray
override val root: EId = EId(headInt)
override val coalgebra: EId ==> Expr = Arrow.lift(x => vec(EId.toInt(x)))
override val ids: OpaqueIntSubset[EId] = new OpaqueIntSubset[EId] {
override def wrap(i: Int): EId = EId.fromInt(i)
override def unwrap(a: EId): Int = EId.toInt(a)
override def subst[F[_]](fi: F[Int]): F[EId] = EId.fromIntF(fi)
override def unsubst[F[_]](fa: F[EId]): F[Int] = EId.toIntF(fa)
override val enumerate: Array[EId] = subst(tableInt.indices.toArray)
}
override val variableIds: OpaqueIntSubset[VarId] = new OpaqueIntSubset[VarId] {
override def wrap(i: Int): VarId = VarId.fromInt(i)
override def unwrap(a: VarId): Int = VarId.toInt(a)
override def subst[F[_]](fi: F[Int]): F[VarId] = VarId.fromIntF(fi)
override def unsubst[F[_]](fa: F[VarId]): F[Int] = VarId.toIntF(fa)
override val enumerate: Array[VarId] =
subst(tableInt.indices.filter(vec(_).isInstanceOf[Variable]).toArray)
}
}
})
def extractType[X]: ExprF[X] ==> Type = Arrow.lift(_.typ)
def types(ast: ASTable): ast.EId ==> Type = ast.coalgebra.andThen(extractType)
def evaluator(ast: ASTable)(inputs: ast.Variable ==> Value): ast.EId ==> Value = {
def go(i: ast.EId): Value = {
ast.coalgebra(i) match {
case CstF(v, _) => v
case x @ InputF(_, _) => inputs(x)
case ComputationF(f, args, _) => Value(f.compute(args.map(go)))
}
}
Arrow.lift(go)
}
def evalAlgebra(ast: ASTable)(inputs: ast.Variable ==> Value): Algebra[ExprF, Value] =
Arrow.lift {
case CstF(v, _) => v
case x @ InputF(_, _) => inputs(x)
case ComputationF(f, args, _) => Value(f.compute(args))
}
}
package dahu.cerberdo
import cats.{~>, Id}
import dahu.expr._
import dahu.recursion.InputF
import shapeless.{Generic, HNil}
object Planning {
// solution structure
type Timepoint = Int
type Duration = Int
class CSP
class Timeline[+V] {}
class Token[V](start: Timepoint, duration: Duration, end: Timepoint, tl: Timeline[V], value: V)
class Observation[V](time: Int, tl: Timeline[V], value: V)
class FilledTimeline[V](tl: Timeline[V], tokens: Seq[Token[V]], observations: Set[Observation[V]])
class Frame(timelines: Set[Timeline[Any]], csp: CSP)
// planning structure
object Domain {
sealed abstract class Item
object Item {
object a extends Item
object b extends Item
val allInstances = List(a, b)
}
sealed abstract class Loc
object Loc {
case object la extends Loc
case object lb extends Loc
case object processed extends Loc
val allInstances = List(la, lb, processed)
}
val atTimelines: Map[Item, Timeline[Loc]] =
Item.allInstances.map(i => i -> new Timeline[Loc]).toMap
case class Token[+V](s: Expr[Int],
d: Expr[Int],
e: Expr[Int],
present: Expr[Boolean],
sv: Timeline[V],
value: Expr[V],
isEffect: Boolean)
var tpId = 0
def intVar(name: String): Expr[Int] = Input[Int](name)
def tp(name: String): Expr[Int] = intVar(name)
def token[V](name: String,
present: Expr[Boolean],
sv: Timeline[V],
value: Expr[V],
isEffect: Boolean): Token[V] =
Token(tp(s"start($name)"),
intVar(s"duration($name)"),
tp(s"end($name)"),
present,
sv,
value,
isEffect)
def effectToken[V](name: String, present: Expr[Boolean], sv: Timeline[V], value: Expr[V]) =
token(name, present, sv, value, isEffect = true)
def condToken[V](name: String, present: Expr[Boolean], sv: Timeline[V], value: Expr[V]) =
token(name, present, sv, value, isEffect = false)
case class Action(name: String,
present: Expr[Boolean],
tokens: Seq[Token[Any]],
constraints: Expr[Boolean])
// action process(Item i),
val actions = for(i <- Item.allInstances) {
val baseName = s"pick_$i"
val present = Input[Boolean](s"pick_$i")
val from = Input[Loc](s"pick_from_$i")
val sv = atTimelines(i)
val condition = condToken(baseName + "_from", present, sv, from)
val effect = effectToken(baseName + "_to", present, sv, Cst(Loc.processed))
import dahu.expr.dsl._
val constraints = condition.e === effect.s
Action(baseName, present, List(condition, effect), constraints)
}
val initState: List[Token[Loc]] = List(
effectToken(s"init_a", Cst(true), atTimelines(Item.a), Cst(Loc.la)),
effectToken(s"init_b", Cst(true), atTimelines(Item.b), Cst(Loc.lb))
)
val goalState: List[Token[Loc]] = List(
condToken(s"processed_a", Cst(true), atTimelines(Item.a), Cst(Loc.processed))
)
}
object Structs extends App {
case class IntervalF[F[_]](start: F[Int], duration: F[Int], end: F[Int], present: F[Boolean])
type Interval = IntervalF[Id]
type IntervalExpr = Product[IntervalF, IntervalF[Expr]]
// type IntervalExpr = Product[IntervalF]
case class TokenF[F[_], V](itv: F[Interval],
timeline: F[Timeline[V]],
value: F[V],
isEffect: F[Boolean])
type Token[V] = TokenF[Id, V]
class TokenExpr[V](override val itv: IntervalExpr,
override val timeline: Cst[Timeline[V]],
override val value: Expr[V],
override val isEffect: Cst[Boolean])
extends TokenF[Expr, V](itv, timeline, value, isEffect)
def interval: IntervalExpr =
Product[IntervalF, IntervalF[Expr]](IntervalF[Expr](Cst(1), Cst(2), Cst(3), Cst(true)))
// val x = Product(IntervalF[Expr](Cst(1), Cst(1), Cst(2), Cst(true)))
import scala.reflect.runtime.universe._
val x = weakTypeTag[IntervalF[Id]]
println(x.toString())
// case class Arr[V](members: Seq[Expr[V]]) extends Struct with Expr[Seq[V]] { // Expr[Seq[V]]
// override def nested: Seq[Expr[Any]] = members
// }
// abstract class TokF[F[_]](start: F[Int], duration: F[Int], end: F[Int], present: F[Boolean])
// type Tok = TokF[Id]
// type TokE = TokF[Expr]
// case class TokExpr(start: Expr[Int],
// duration: Expr[Int],
// end: Expr[Int],
// present: Expr[Boolean])
// extends Struct
// with Expr[Tok] {
// def nested = Seq(start, duration, end, present)
// }
// case class ActionF[F[+ _]](name: String, start: F[Int], end: F[Int], tokens: Arr[Tok])
//
// type Action = ActionF[Id]
}
}
package dahu.dataframe
import dahu.dataframe.Column.ColumnImpl
import shapeless.HList
trait ColumnView[V] {
def size: Int
def values: Iterable[V]
def valueAt(row: Int): V
}
/**
* Column view that is independent of the container.
*
* @tparam V Field type
* @tparam D Type of the datastructure containing the vector structure
*/
trait Column[V, D] extends ColumnView[V] {
def updated(row: Int, value: V): D
}
object Column {
class ColumnImpl[K, V, D](df: D, wi: WithColumn[K, V, D]) extends Column[V, D] {
override def size: Int = wi.size(df)
override def valueAt(row: Int): V = wi.valueAt(df, row)
override def values: Iterable[V] = wi.values(df)
override def updated(row: Int, value: V): D = wi.updated(df, row, value)
}
def from[K, V, D](df: D, k: K)(implicit wi: WithColumn[K, V, D]): Column[V, D] =
new ColumnImpl(df, wi)
}
/**
* Column view that adds access to the raw content.
*
* @tparam V Type of the fields
* @tparam F Container type.
* @tparam D Type of the data structure containing the column
*/
trait ColumnF[V, F[_], D] extends Column[V, D] {
def content: F[V]
def swapped(values: F[V]): D
}
object ColumnF {
class ColumnFImpl[K, V, F[_], D](df: D, wi: WithColumn.Aux[K, V, F, D])
extends ColumnImpl(df, wi)
with ColumnF[V, F, D] {
override def content: F[V] = wi.columnContent(df)
override def swapped(values: F[V]): D = wi.swapped(df, values)
}
def from[K, V, F[_], D](df: D, k: K)(implicit wi: WithColumn.Aux[K, V, F, D]): ColumnF[V, F, D] =
new ColumnFImpl(df, wi)
}
trait IndexedColumn[V, MD <: HList] {
def id(v: V): Option[Int]
def contains(v: V): Boolean
def idUnsafe(v: V): Int
}
object IndexedColumn {
def from[K, V, MD <: HList](df: DF[MD], k: K)(
implicit wi: WithIndex[K, V, MD]): IndexedColumn[V, MD] =
new IndexedColumn[V, MD] {
override def id(v: V): Option[Int] = wi.id(df, v)
override def contains(v: V): Boolean = wi.contains(df, v)
override def idUnsafe(v: V): Int = wi.idUnsafe(df, v)
}
}
package dahu.dataframe
/**
* Marker class that provides extension methods for subclasses.
*
* It is also used as a restriction for adding containers to dataframes.
* This is mainly to avoid programming mistakes.
* */
trait ColumnContainer
object ColumnContainer {
implicit class ColumnContainerOps[D <: ColumnContainer](val d: D) extends AnyVal {
def apply[K, V](k: K)(implicit wi: WithColumn[K, V, D]): Column[V, D] =
Column.from(d, k)
def column[K, V, F[_]](k: K)(implicit wi: WithColumn.Aux[K, V, F, D]): ColumnF[V, F, D] =
ColumnF.from(d, k)
}
}
package dahu.dataframe
import dahu.dataframe.errors.ColumnsOfDifferentSizes
import dahu.dataframe.metadata._
import dahu.dataframe.vector.{IndexedVector, Vec}
import shapeless.{::, HList, HNil}
case class DF[MD <: HList](cols: Vector[_])
object DF {
def empty: DF[HNil] =
new DF[HNil](Vector())
implicit class DFOps[M <: HList](val df: DF[M]) extends AnyVal {
def raw[K, CM, V, F[_]](k: K)(implicit meta: ColumnMeta.Aux[K, M, CM],
value: Value.Aux[CM, V],
container: Container.Aux[CM, F],
index: ReverseIndexOfKey[K, M]): F[V] = {
df.cols(index()).asInstanceOf[F[V]]
}
def withColumn[K0, V0, F0[_]](key: K0, values: F0[V0])(
implicit size: RowNumber[M],
vec: Vec[F0]): DF[ColumMetadata[K0, V0, F0] :: M] = {
size(df) match {
case Some(x) if x != vec.size(values) =>
throw ColumnsOfDifferentSizes(s"New column $key has size ${vec.size(values)} != $x")
case _ =>
}
type CM = ColumMetadata[K0, V0, F0]
val colMeta: CM = new ColumMetadata[K0, V0, F0] {}
new DF[CM :: M](df.cols :+ values)
}
def withContainer[A <: ColumnContainer](container: A): DF[A :: M] =
DF[A :: M](df.cols :+ container)
/** Base method to retrieve a column. The container type of the column is not required,
* which restricts the operations that can made on it.
* Look at {{{column()}}} for less restricted implementation. */
def apply[K, V](k: K)(implicit wi: WithColumn[K, V, DF[M]]): Column[V, DF[M]] =
Column.from(df, k)
/** Way to retrieve a more feature full implementation of a column but
* that requires the container type. */
def column[K, V, F[_]](k: K)(
implicit wi: WithColumn.Aux[K, V, F, DF[M]]): ColumnF[V, F, DF[M]] =
ColumnF.from(df, k)
def indexOf[K](implicit ev: ReverseIndexOfKey[K, M]): Int = ev()
def indexed[K, V0, PrevCM, MOut <: HList](k: K)(
implicit prev: ColumnMeta.Aux[K, M, PrevCM],
value: Value.Aux[PrevCM, V0],
container: Container.Aux[PrevCM, Vector],
swapped: Swapped.Aux[K, ColumMetadata[K, V0, IndexedVector], M, MOut],
index: ReverseIndexOfKey[K, M]): DF[MOut] = {
val v: Vector[V0] = raw(k)
val map: Map[V0, Int] = v.zipWithIndex.toMap
val col = IndexedVector[V0](v, map)
type CM = ColumMetadata[K, V0, IndexedVector]
val meta: CM = new ColumMetadata[K, V0, IndexedVector] {}
swapped(df, meta, col)
}
}
}
package dahu.dataframe
import shapeless._
import shapeless.ops.hlist.{Length, ToTraversable}
import shapeless.ops.nat.ToInt
trait MetaData[H <: HList] {
type Fields <: HList
val width: Int
def toList(l: Fields): List[Any]