Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Submit feedback
Sign in / Register
Toggle navigation
S
SMarTplan
Project
Project
Details
Activity
Releases
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
sage
SMarTplan
Commits
040b09e8
Commit
040b09e8
authored
May 09, 2018
by
Arthur Bit-Monnot
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
Remove outdated code.
parent
68903ebb
Changes
46
Hide whitespace changes
Inline
Side-by-side
Showing
46 changed files
with
0 additions
and
2417 deletions
+0
-2417
Arrow.scala
dahu-staging/src/main/scala/dahu/arrows/Arrow.scala
+0
-23
MutableArrow.scala
dahu-staging/src/main/scala/dahu/arrows/MutableArrow.scala
+0
-26
TypeInstances.scala
dahu-staging/src/main/scala/dahu/arrows/TypeInstances.scala
+0
-25
Cache.scala
...taging/src/main/scala/dahu/arrows/memoization/Cache.scala
+0
-40
Memo.scala
...staging/src/main/scala/dahu/arrows/memoization/Memo.scala
+0
-41
package.scala
dahu-staging/src/main/scala/dahu/arrows/package.scala
+0
-7
package.scala
...taging/src/main/scala/dahu/arrows/recursion/package.scala
+0
-26
ASTable.scala
dahu-staging/src/main/scala/dahu/ast/ASTable.scala
+0
-53
Ops.scala
dahu-staging/src/main/scala/dahu/ast/Ops.scala
+0
-66
Planning.scala
dahu-staging/src/main/scala/dahu/cerberdo/Planning.scala
+0
-148
Column.scala
dahu-staging/src/main/scala/dahu/dataframe/Column.scala
+0
-78
ColumnContainer.scala
...aging/src/main/scala/dahu/dataframe/ColumnContainer.scala
+0
-21
DF.scala
dahu-staging/src/main/scala/dahu/dataframe/DF.scala
+0
-68
MetaData.scala
dahu-staging/src/main/scala/dahu/dataframe/MetaData.scala
+0
-29
RowNumber.scala
dahu-staging/src/main/scala/dahu/dataframe/RowNumber.scala
+0
-39
WithColumn.scala
dahu-staging/src/main/scala/dahu/dataframe/WithColumn.scala
+0
-73
WithIndex.scala
dahu-staging/src/main/scala/dahu/dataframe/WithIndex.scala
+0
-39
Exceptions.scala
...ing/src/main/scala/dahu/dataframe/errors/Exceptions.scala
+0
-18
FrameMeta.scala
...ng/src/main/scala/dahu/dataframe/metadata/FrameMeta.scala
+0
-39
ReverseIndexOfKey.scala
...ain/scala/dahu/dataframe/metadata/ReverseIndexOfKey.scala
+0
-26
Swapped.scala
...ging/src/main/scala/dahu/dataframe/metadata/Swapped.scala
+0
-52
column.scala
...aging/src/main/scala/dahu/dataframe/metadata/column.scala
+0
-47
ReverseIndexOf.scala
.../src/main/scala/dahu/dataframe/utils/ReverseIndexOf.scala
+0
-51
Index.scala
...-staging/src/main/scala/dahu/dataframe/vector/Index.scala
+0
-14
IndexedVector.scala
.../src/main/scala/dahu/dataframe/vector/IndexedVector.scala
+0
-50
NullableVec.scala
...ng/src/main/scala/dahu/dataframe/vector/NullableVec.scala
+0
-43
Vec.scala
dahu-staging/src/main/scala/dahu/dataframe/vector/Vec.scala
+0
-29
NullableArray.scala
...n/scala/dahu/dataframe/vector/mutable/NullableArray.scala
+0
-35
package.scala
...taging/src/main/scala/dahu/dataframe/vector/package.scala
+0
-3
Math.scala
dahu-staging/src/main/scala/dahu/expr/Math.scala
+0
-50
AST.scala
dahu-staging/src/main/scala/dahu/interpreter/ast/AST.scala
+0
-98
Domain.scala
...ging/src/main/scala/dahu/interpreter/domains/Domain.scala
+0
-54
ExecutingPrg.scala
...rc/main/scala/dahu/interpreter/forward/ExecutingPrg.scala
+0
-126
package.scala
...ing/src/main/scala/dahu/interpreter/forward/package.scala
+0
-59
package.scala
dahu-staging/src/main/scala/dahu/interpreter/package.scala
+0
-62
Exploration.scala
dahu-staging/src/main/scala/dahu/planning/Exploration.scala
+0
-63
ExprF.scala
dahu-staging/src/main/scala/dahu/recursion/ExprF.scala
+0
-73
Algebras.scala
dahu-staging/src/test/scala/dahu/cerbero/Algebras.scala
+0
-42
Planning.scala
dahu-staging/src/test/scala/dahu/cerbero/Planning.scala
+0
-157
MiniCSP.scala
dahu-staging/src/test/scala/dahu/constraints/MiniCSP.scala
+0
-45
DataFrameTest.scala
...staging/src/test/scala/dahu/dataframe/DataFrameTest.scala
+0
-123
MetadataTest.scala
...src/test/scala/dahu/dataframe/metadata/MetadataTest.scala
+0
-43
Keys.scala
dahu-staging/src/test/scala/dahu/dataframe/utils/Keys.scala
+0
-21
UtilsTests.scala
...ging/src/test/scala/dahu/dataframe/utils/UtilsTests.scala
+0
-22
Arrows.scala
dahu-staging/src/test/scala/dahu/exploration/Arrows.scala
+0
-70
rs.scala
dahu-staging/src/test/scala/dahu/exploration/rs.scala
+0
-100
No files found.
dahu-staging/src/main/scala/dahu/arrows/Arrow.scala
deleted
100644 → 0
View file @
68903ebb
package
dahu.maps
trait
Arrow
[
-Domain
,
+CoDomain
]
{
def
apply
(
v
:
Domain
)
:
CoDomain
def
andThen
[
Res
](
next
:
Arrow
[
CoDomain
,
Res
])
:
Arrow
[
Domain
,
Res
]
=
ComposedArrow
(
this
,
next
)
def
asScalaFunction
:
Domain
=>
CoDomain
=
x
=>
this
.
apply
(
x
)
}
object
Arrow
{
def
lift
[
A
,
B
](
f
:
A
=>
B
)
:
Arrow
[
A
,
B
]
=
FunctionArrow
(
f
)
}
final
case
class
FunctionArrow
[
In
,
Res
](
f
:
In
=>
Res
)
extends
Arrow
[
In
,
Res
]
{
override
def
apply
(
v1
:
In
)
:
Res
=
f
(
v1
)
}
final
case
class
ComposedArrow
[
A
,
B
,
C
](
ab
:
Arrow
[
A
,
B
],
bc
:
Arrow
[
B
,
C
])
extends
Arrow
[
A
,
C
]
{
override
def
apply
(
v1
:
A
)
:
C
=
bc
(
ab
(
v1
))
}
dahu-staging/src/main/scala/dahu/arrows/MutableArrow.scala
deleted
100644 → 0
View file @
68903ebb
package
dahu.maps
import
scala.collection.mutable
trait
PartialArrow
[
A
,
B
]
extends
Arrow
[
A
,
Option
[
B
]]
{}
trait
MutableArrow
[
A
,
B
]
{
def
apply
(
a
:
A
)
:
B
}
class
MutableInputs
[
A
,
B
]
{
val
known
:
mutable.Map
[
A
,
B
]
=
mutable
.
Map
()
def
update
(
a
:
A
,
b
:
B
)
:
Unit
=
known
.
update
(
a
,
b
)
def
unset
(
a
:
A
)
:
Unit
=
known
-=
a
def
apply
(
a
:
A
)
:
Option
[
B
]
=
known
.
get
(
a
)
}
class
ArrowFromInputs
[
A
,
B
,
C
](
val
inputs
:
MutableInputs
[
A
,
B
],
val
arrow
:
B
=
=>
C
)
extends
MutableArrow
[
A
,
Option
[
C
]]
{
override
def
apply
(
a
:
A
)
:
Option
[
C
]
=
inputs
(
a
).
map
(
arrow
(
_
))
}
dahu-staging/src/main/scala/dahu/arrows/TypeInstances.scala
deleted
100644 → 0
View file @
68903ebb
package
dahu.maps
import
dahu.maps.memoization.
{
ArrayCache
,
Cache
}
import
scala.reflect.ClassTag
trait
TypeInstances
[
@specialized
(
Int
)
T
]
{
/** All instances of T in no particular order. */
def
enumerate
:
Array
[
T
]
}
trait
OpaqueIntSubset
[
@specialized
(
Int
)
T
]
extends
TypeInstances
[
T
]
{
def
first
:
T
=
wrap
(
unsubst
(
enumerate
).
min
)
def
last
:
T
=
wrap
(
unsubst
(
enumerate
).
max
)
def
unwrap
(
a
:
T
)
:
Int
def
wrap
(
i
:
Int
)
:
T
def
subst
[
F
[
_
]](
fi
:
F
[
Int
])
:
F
[
T
]
def
unsubst
[
F
[
_
]](
fa
:
F
[
T
])
:
F
[
Int
]
def
newCache
[
B
](
implicit
classTag
:
ClassTag
[
B
])
:
Cache
[
T
,
B
]
=
new
ArrayCache
[
T
,
B
](
this
)
}
dahu-staging/src/main/scala/dahu/arrows/memoization/Cache.scala
deleted
100644 → 0
View file @
68903ebb
package
dahu.maps.memoization
import
dahu.maps.OpaqueIntSubset
import
scala.reflect.ClassTag
trait
Cache
[
A
,
B
]
{
def
get
(
a
:
A
)
:
Option
[
B
]
def
getOrElse
(
a
:
A
,
b
:
=>
B
)
:
B
def
getOrElseUpdate
(
a
:
A
,
b
:
=>
B
)
:
B
}
class
ArrayCache
[
A
,
B
](
ev
:
OpaqueIntSubset
[
A
])(
implicit
tag
:
ClassTag
[
B
])
extends
Cache
[
A
,
B
]
{
import
ev._
private
val
offset
:
Int
=
unwrap
(
ev
.
first
)
private
val
size
:
Int
=
1
+
unwrap
(
last
)
-
unwrap
(
first
)
private
def
address
(
a
:
A
)
:
Int
=
unwrap
(
a
)
-
offset
val
memory
:
Array
[
B
]
=
new
Array
[
B
](
size
)
val
mask
:
Array
[
Boolean
]
=
Array
.
fill
[
Boolean
](
size
)(
false
)
override
def
get
(
a
:
A
)
:
Option
[
B
]
=
if
(
mask
(
address
(
a
)))
Some
(
memory
(
unwrap
(
a
)
-
offset
))
else
None
override
def
getOrElse
(
a
:
A
,
b
:
=>
B
)
:
B
=
get
(
a
).
getOrElse
(
b
)
override
def
getOrElseUpdate
(
a
:
A
,
b
:
=>
B
)
:
B
=
if
(
mask
(
address
(
a
)))
{
memory
(
address
(
a
))
}
else
{
mask
(
address
(
a
))
=
true
memory
(
address
(
a
))
=
b
b
}
}
dahu-staging/src/main/scala/dahu/arrows/memoization/Memo.scala
deleted
100644 → 0
View file @
68903ebb
package
dahu.maps.memoization
import
dahu.maps.
{==>,
OpaqueIntSubset
,
TypeInstances
}
import
scala.reflect.ClassTag
object
Memo
{
trait
Memo
[
A
,
B
]
{
def
apply
(
a
:
A
)
:
B
}
def
memoize
[
A
,
B
](
values
:
Set
[
A
],
f
:
A
=>
B
)
:
Memo
[
A
,
B
]
=
new
Memo
[
A
,
B
]
{
val
m
:
Map
[
A
,
B
]
=
values
.
map
(
x
=>
(
x
,
f
(
x
))).
toMap
override
def
apply
(
a
:
A
)
:
B
=
m
(
a
)
}
def
memoize
[
A
,
B
](
f
:
A
=>
B
)(
implicit
instances
:
TypeInstances
[
A
])
:
Memo
[
A
,
B
]
=
new
Memo
[
A
,
B
]
{
val
m
:
Map
[
A
,
B
]
=
instances
.
enumerate
.
map
(
x
=>
(
x
,
f
(
x
))).
toMap
override
def
apply
(
a
:
A
)
:
B
=
m
(
a
)
}
class
ArrayMemo
[
A
,
B
](
f
:
A
=
=>
B
)(
implicit
ev
:
OpaqueIntSubset
[
A
],
tag
:
ClassTag
[
B
])
extends
Memo
[
A
,
B
]
{
import
ev._
private
val
offset
:
Int
=
unwrap
(
ev
.
first
)
private
val
inputs
:
Array
[
A
]
=
ev
.
enumerate
val
memory
:
Array
[
B
]
=
new
Array
[
B
](
1
+
unwrap
(
last
)
-
unwrap
(
first
));
{
var
i
=
0
while
(
i
<
inputs
.
length
)
{
memory
(
unwrap
(
inputs
(
i
))
-
offset
)
=
f
(
inputs
(
i
))
i
+=
1
}
}
override
def
apply
(
a
:
A
)
:
B
=
memory
(
unwrap
(
a
)
-
offset
)
}
}
dahu-staging/src/main/scala/dahu/arrows/package.scala
deleted
100644 → 0
View file @
68903ebb
package
dahu
package
object
arrows
{
type
==>[
A
,
B
]
=
Arrow
[
A
,
B
]
}
dahu-staging/src/main/scala/dahu/arrows/recursion/package.scala
deleted
100644 → 0
View file @
68903ebb
package
dahu.maps
import
cats.Functor
import
dahu.maps.memoization.Cache
package
object
recursion
{
type
Algebra
[
F
[
_
]
,
X
]
=
F
[
X
]
==>
X
type
Coalgebra
[
F
[
_
]
,
X
]
=
X
==>
F
[
X
]
def
hylo
[
A
,
B
,
F
[
_
]](
coalgebra
:
Coalgebra
[
F
,
A
],
algebra
:
Algebra
[
F
,
B
])(
implicit
F
:
Functor
[
F
])
:
A
=
=>
B
=
{
def
go
(
id
:
A
)
:
B
=
algebra
(
F
.
map
(
coalgebra
(
id
))(
go
))
Arrow
.
lift
(
go
)
}
def
memoizedHylo
[
A
,
B
,
F
[
_
]](
coalgebra
:
Coalgebra
[
F
,
A
],
algebra
:
Algebra
[
F
,
B
],
cache
:
Cache
[
A
,
B
])(
implicit
F
:
Functor
[
F
])
:
A
=
=>
B
=
{
def
go
(
id
:
A
)
:
B
=
cache
.
getOrElseUpdate
(
id
,
algebra
(
F
.
map
(
coalgebra
(
id
))(
go
)))
Arrow
.
lift
(
go
)
}
}
dahu-staging/src/main/scala/dahu/ast/ASTable.scala
deleted
100644 → 0
View file @
68903ebb
package
dahu.ast
import
dahu.maps.recursion.Algebra
import
dahu.maps.
{==>,
recursion
,
OpaqueIntSubset
}
import
dahu.recursion.
{
ExprF
,
InputF
}
import
dahu.recursion.Types._
import
scala.reflect.ClassTag
trait
ASTable
{
/** Opaque type representing the IDs of the expression in this table. */
val
EId
:
IndexLabelImpl
=
new
IndexLabelImpl
{
type
T
=
Int
override
def
fromInt
(
s
:
Int
)
:
T
=
s
override
def
toInt
(
lbl
:
T
)
:
Int
=
lbl
override
def
fromIntF
[
F
[
_
]](
fs
:
F
[
Int
])
:
F
[
T
]
=
fs
override
def
toIntF
[
F
[
_
]](
fs
:
F
[
T
])
:
F
[
Int
]
=
fs
}
type
EId
=
EId
.
T
val
VarId
:
SubIndexLabelImpl
[
EId
]
=
new
SubIndexLabelImpl
[
EId
]
{
type
T
=
EId
override
def
fromInt
(
s
:
Int
)
:
EId
=
EId
.
fromInt
(
s
)
override
def
toInt
(
lbl
:
T
)
:
Int
=
EId
.
toInt
(
lbl
)
override
def
toIntF
[
F
[
_
]](
fs
:
F
[
EId
])
:
F
[
Int
]
=
EId
.
toIntF
(
fs
)
override
def
fromIntF
[
F
[
_
]](
fs
:
F
[
Int
])
:
F
[
EId
]
=
EId
.
fromIntF
(
fs
)
override
def
wrap
(
s
:
EId
)
:
T
=
s
override
def
unwrap
(
lbl
:
T
)
:
EId
=
lbl
override
def
subst
[
F
[
_
]](
fs
:
F
[
EId
])
:
F
[
T
]
=
fs
override
def
unsubst
[
F
[
_
]](
fs
:
F
[
T
])
:
F
[
EId
]
=
fs
}
type
VarId
=
VarId
.
T
type
Expr
=
ExprF
[
EId
]
type
Variable
=
InputF
[
EId
]
def
root
:
EId
def
coalgebra
:
EId
=
=>
Expr
// equivalent to Coalgebra[ExprF, EId]
def
variableCoalgebra
:
VarId
=
=>
Variable
=
id
=>
coalgebra
(
VarId
.
unwrap
(
id
)).
asInstanceOf
[
Variable
]
def
ids
:
OpaqueIntSubset
[
EId
]
def
variableIds
:
OpaqueIntSubset
[
VarId
]
def
hylo
[
X
](
algebra
:
Algebra
[
ExprF
,
X
])
:
EId
=
=>
X
=
recursion
.
hylo
(
coalgebra
,
algebra
)
def
memoizedHylo
[
X
](
algebra
:
Algebra
[
ExprF
,
X
])(
implicit
classTag
:
ClassTag
[
X
])
:
EId
=
=>
X
=
recursion
.
memoizedHylo
(
coalgebra
,
algebra
,
ids
.
newCache
[
X
])
}
dahu-staging/src/main/scala/dahu/ast/Ops.scala
deleted
100644 → 0
View file @
68903ebb
package
dahu.ast
import
cats.Functor
import
dahu.maps._
import
dahu.maps.memoization.Cache
import
dahu.maps.recursion.Algebra
import
dahu.recursion._
import
spire.ClassTag
import
scala.collection.mutable
object
Ops
{
val
toTable
:
dahu.expr.Expr
[
Any
]
==>
ASTable
=
Arrow
.
lift
((
x
:
dahu.expr.Expr
[
Any
])
=>
{
val
(
headInt
,
tableInt
)
=
Algebras
.
encodeAsPair
(
x
)
new
ASTable
{
private
val
vec
:
Array
[
ExprF
[
EId
]]
=
tableInt
.
asInstanceOf
[
Vector
[
Expr
]].
toArray
override
val
root
:
EId
=
EId
(
headInt
)
override
val
coalgebra
:
EId
=
=>
Expr
=
Arrow
.
lift
(
x
=>
vec
(
EId
.
toInt
(
x
)))
override
val
ids
:
OpaqueIntSubset
[
EId
]
=
new
OpaqueIntSubset
[
EId
]
{
override
def
wrap
(
i
:
Int
)
:
EId
=
EId
.
fromInt
(
i
)
override
def
unwrap
(
a
:
EId
)
:
Int
=
EId
.
toInt
(
a
)
override
def
subst
[
F
[
_
]](
fi
:
F
[
Int
])
:
F
[
EId
]
=
EId
.
fromIntF
(
fi
)
override
def
unsubst
[
F
[
_
]](
fa
:
F
[
EId
])
:
F
[
Int
]
=
EId
.
toIntF
(
fa
)
override
val
enumerate
:
Array
[
EId
]
=
subst
(
tableInt
.
indices
.
toArray
)
}
override
val
variableIds
:
OpaqueIntSubset
[
VarId
]
=
new
OpaqueIntSubset
[
VarId
]
{
override
def
wrap
(
i
:
Int
)
:
VarId
=
VarId
.
fromInt
(
i
)
override
def
unwrap
(
a
:
VarId
)
:
Int
=
VarId
.
toInt
(
a
)
override
def
subst
[
F
[
_
]](
fi
:
F
[
Int
])
:
F
[
VarId
]
=
VarId
.
fromIntF
(
fi
)
override
def
unsubst
[
F
[
_
]](
fa
:
F
[
VarId
])
:
F
[
Int
]
=
VarId
.
toIntF
(
fa
)
override
val
enumerate
:
Array
[
VarId
]
=
subst
(
tableInt
.
indices
.
filter
(
vec
(
_
).
isInstanceOf
[
Variable
]).
toArray
)
}
}
})
def
extractType
[
X
]
:
ExprF
[
X
]
==>
Type
=
Arrow
.
lift
(
_
.
typ
)
def
types
(
ast
:
ASTable
)
:
ast.EId
=
=>
Type
=
ast
.
coalgebra
.
andThen
(
extractType
)
def
evaluator
(
ast
:
ASTable
)(
inputs
:
ast.Variable
=
=>
Value
)
:
ast.EId
=
=>
Value
=
{
def
go
(
i
:
ast.EId
)
:
Value
=
{
ast
.
coalgebra
(
i
)
match
{
case
CstF
(
v
,
_
)
=>
v
case
x
@
InputF
(
_
,
_
)
=>
inputs
(
x
)
case
ComputationF
(
f
,
args
,
_
)
=>
Value
(
f
.
compute
(
args
.
map
(
go
)))
}
}
Arrow
.
lift
(
go
)
}
def
evalAlgebra
(
ast
:
ASTable
)(
inputs
:
ast.Variable
=
=>
Value
)
:
Algebra
[
ExprF
,
Value
]
=
Arrow
.
lift
{
case
CstF
(
v
,
_
)
=>
v
case
x
@
InputF
(
_
,
_
)
=>
inputs
(
x
)
case
ComputationF
(
f
,
args
,
_
)
=>
Value
(
f
.
compute
(
args
))
}
}
dahu-staging/src/main/scala/dahu/cerberdo/Planning.scala
deleted
100644 → 0
View file @
68903ebb
package
dahu.cerberdo
import
cats.
{~>,
Id
}
import
dahu.expr._
import
dahu.recursion.InputF
import
shapeless.
{
Generic
,
HNil
}
object
Planning
{
// solution structure
type
Timepoint
=
Int
type
Duration
=
Int
class
CSP
class
Timeline
[
+V
]
{}
class
Token
[
V
](
start
:
Timepoint
,
duration
:
Duration
,
end
:
Timepoint
,
tl
:
Timeline
[
V
],
value
:
V
)
class
Observation
[
V
](
time
:
Int
,
tl
:
Timeline
[
V
],
value
:
V
)
class
FilledTimeline
[
V
](
tl
:
Timeline
[
V
],
tokens
:
Seq
[
Token
[
V
]],
observations
:
Set
[
Observation
[
V
]])
class
Frame
(
timelines
:
Set
[
Timeline
[
Any
]],
csp
:
CSP
)
// planning structure
object
Domain
{
sealed
abstract
class
Item
object
Item
{
object
a
extends
Item
object
b
extends
Item
val
allInstances
=
List
(
a
,
b
)
}
sealed
abstract
class
Loc
object
Loc
{
case
object
la
extends
Loc
case
object
lb
extends
Loc
case
object
processed
extends
Loc
val
allInstances
=
List
(
la
,
lb
,
processed
)
}
val
atTimelines
:
Map
[
Item
,
Timeline
[
Loc
]]
=
Item
.
allInstances
.
map
(
i
=>
i
->
new
Timeline
[
Loc
]).
toMap
case
class
Token
[
+V
](
s
:
Expr
[
Int
],
d
:
Expr
[
Int
],
e
:
Expr
[
Int
],
present
:
Expr
[
Boolean
],
sv
:
Timeline
[
V
],
value
:
Expr
[
V
],
isEffect
:
Boolean
)
var
tpId
=
0
def
intVar
(
name
:
String
)
:
Expr
[
Int
]
=
Input
[
Int
](
name
)
def
tp
(
name
:
String
)
:
Expr
[
Int
]
=
intVar
(
name
)
def
token
[
V
](
name
:
String
,
present
:
Expr
[
Boolean
],
sv
:
Timeline
[
V
],
value
:
Expr
[
V
],
isEffect
:
Boolean
)
:
Token
[
V
]
=
Token
(
tp
(
s
"start($name)"
),
intVar
(
s
"duration($name)"
),
tp
(
s
"end($name)"
),
present
,
sv
,
value
,
isEffect
)
def
effectToken
[
V
](
name
:
String
,
present
:
Expr
[
Boolean
],
sv
:
Timeline
[
V
],
value
:
Expr
[
V
])
=
token
(
name
,
present
,
sv
,
value
,
isEffect
=
true
)
def
condToken
[
V
](
name
:
String
,
present
:
Expr
[
Boolean
],
sv
:
Timeline
[
V
],
value
:
Expr
[
V
])
=
token
(
name
,
present
,
sv
,
value
,
isEffect
=
false
)
case
class
Action
(
name
:
String
,
present
:
Expr
[
Boolean
],
tokens
:
Seq
[
Token
[
Any
]],
constraints
:
Expr
[
Boolean
])
// action process(Item i),
val
actions
=
for
(
i
<-
Item
.
allInstances
)
{
val
baseName
=
s
"pick_$i"
val
present
=
Input
[
Boolean
](
s
"pick_$i"
)
val
from
=
Input
[
Loc
](
s
"pick_from_$i"
)
val
sv
=
atTimelines
(
i
)
val
condition
=
condToken
(
baseName
+
"_from"
,
present
,
sv
,
from
)
val
effect
=
effectToken
(
baseName
+
"_to"
,
present
,
sv
,
Cst
(
Loc
.
processed
))
import
dahu.expr.dsl._
val
constraints
=
condition
.
e
===
effect
.
s
Action
(
baseName
,
present
,
List
(
condition
,
effect
),
constraints
)
}
val
initState
:
List
[
Token
[
Loc
]]
=
List
(
effectToken
(
s
"init_a"
,
Cst
(
true
),
atTimelines
(
Item
.
a
),
Cst
(
Loc
.
la
)),
effectToken
(
s
"init_b"
,
Cst
(
true
),
atTimelines
(
Item
.
b
),
Cst
(
Loc
.
lb
))
)
val
goalState
:
List
[
Token
[
Loc
]]
=
List
(
condToken
(
s
"processed_a"
,
Cst
(
true
),
atTimelines
(
Item
.
a
),
Cst
(
Loc
.
processed
))
)
}
object
Structs
extends
App
{
case
class
IntervalF
[
F
[
_
]](
start
:
F
[
Int
],
duration
:
F
[
Int
],
end
:
F
[
Int
],
present
:
F
[
Boolean
])
type
Interval
=
IntervalF
[
Id
]
type
IntervalExpr
=
Product
[
IntervalF
,
IntervalF
[
Expr
]]
// type IntervalExpr = Product[IntervalF]
case
class
TokenF
[
F
[
_
]
,
V
](
itv
:
F
[
Interval
],
timeline
:
F
[
Timeline
[
V
]],
value
:
F
[
V
],
isEffect
:
F
[
Boolean
])
type
Token
[
V
]
=
TokenF
[
Id
,
V
]
class
TokenExpr
[
V
](
override
val
itv
:
IntervalExpr
,
override
val
timeline
:
Cst
[
Timeline
[
V
]],
override
val
value
:
Expr
[
V
],
override
val
isEffect
:
Cst
[
Boolean
])
extends
TokenF
[
Expr
,
V
](
itv
,
timeline
,
value
,
isEffect
)
def
interval
:
IntervalExpr
=
Product
[
IntervalF
,
IntervalF
[
Expr
]](
IntervalF
[
Expr
](
Cst
(
1
),
Cst
(
2
),
Cst
(
3
),
Cst
(
true
)))
// val x = Product(IntervalF[Expr](Cst(1), Cst(1), Cst(2), Cst(true)))
import
scala.reflect.runtime.universe._
val
x
=
weakTypeTag
[
IntervalF
[
Id
]]
println
(
x
.
toString
())
// case class Arr[V](members: Seq[Expr[V]]) extends Struct with Expr[Seq[V]] { // Expr[Seq[V]]
// override def nested: Seq[Expr[Any]] = members
// }
// abstract class TokF[F[_]](start: F[Int], duration: F[Int], end: F[Int], present: F[Boolean])
// type Tok = TokF[Id]
// type TokE = TokF[Expr]
// case class TokExpr(start: Expr[Int],
// duration: Expr[Int],
// end: Expr[Int],
// present: Expr[Boolean])
// extends Struct
// with Expr[Tok] {
// def nested = Seq(start, duration, end, present)
// }
// case class ActionF[F[+ _]](name: String, start: F[Int], end: F[Int], tokens: Arr[Tok])
//
// type Action = ActionF[Id]
}
}
dahu-staging/src/main/scala/dahu/dataframe/Column.scala
deleted
100644 → 0
View file @
68903ebb
package
dahu.dataframe
import
dahu.dataframe.Column.ColumnImpl
import
shapeless.HList
trait
ColumnView
[
V
]
{
def
size
:
Int
def
values
:
Iterable
[
V
]
def
valueAt
(
row
:
Int
)
:
V
}
/**
* Column view that is independent of the container.
*
* @tparam V Field type
* @tparam D Type of the datastructure containing the vector structure
*/
trait
Column
[
V
,
D
]
extends
ColumnView
[
V
]
{
def
updated
(
row
:
Int
,
value
:
V
)
:
D
}
object
Column
{
class
ColumnImpl
[
K
,
V
,
D
](
df
:
D
,
wi
:
WithColumn
[
K
,
V
,
D
])
extends
Column
[
V
,
D
]
{
override
def
size
:
Int
=
wi
.
size
(
df
)
override
def
valueAt
(
row
:
Int
)
:
V
=
wi
.
valueAt
(
df
,
row
)
override
def
values
:
Iterable
[
V
]
=
wi
.
values
(
df
)
override
def
updated
(
row
:
Int
,
value
:
V
)
:
D
=
wi
.
updated
(
df
,
row
,
value
)
}
def
from
[
K
,
V
,
D
](
df
:
D
,
k
:
K
)(
implicit
wi
:
WithColumn
[
K
,
V
,
D
])
:
Column
[
V
,
D
]
=
new
ColumnImpl
(
df
,
wi
)
}
/**
* Column view that adds access to the raw content.
*
* @tparam V Type of the fields
* @tparam F Container type.
* @tparam D Type of the data structure containing the column
*/
trait
ColumnF
[
V
,
F
[
_
]
,
D
]
extends
Column
[
V
,
D
]
{
def
content
:
F
[
V
]
def
swapped
(
values
:
F
[
V
])
:
D
}
object
ColumnF
{
class
ColumnFImpl
[
K
,
V
,
F
[
_
]
,
D
](
df
:
D
,
wi
:
WithColumn.Aux
[
K
,
V
,
F
,
D
])
extends
ColumnImpl
(
df
,
wi
)
with
ColumnF
[
V
,
F
,
D
]
{
override
def
content
:
F
[
V
]
=
wi
.
columnContent
(
df
)
override
def
swapped
(
values
:
F
[
V
])
:
D
=
wi
.
swapped
(
df
,
values
)
}
def
from
[
K
,
V
,
F
[
_
]
,
D
](
df
:
D
,
k
:
K
)(
implicit
wi
:
WithColumn.Aux
[
K
,
V
,
F
,
D
])
:
ColumnF
[
V
,
F
,
D
]
=
new
ColumnFImpl
(
df
,
wi
)
}
trait
IndexedColumn
[
V
,
MD
<:
HList
]
{
def
id
(
v
:
V
)
:
Option
[
Int
]
def
contains
(
v
:
V
)
:
Boolean
def
idUnsafe
(
v
:
V
)
:
Int
}
object
IndexedColumn
{
def
from
[
K
,
V
,
MD
<:
HList
](
df
:
DF
[
MD
],
k
:
K
)(
implicit
wi
:
WithIndex
[
K
,
V
,
MD
])
:
IndexedColumn
[
V
,
MD
]
=
new
IndexedColumn
[
V
,
MD
]
{
override
def
id
(
v
:
V
)
:
Option
[
Int
]
=
wi
.
id
(
df
,
v
)
override
def
contains
(
v
:
V
)
:
Boolean
=
wi
.
contains
(
df
,
v
)
override
def
idUnsafe
(
v
:
V
)
:
Int
=
wi
.
idUnsafe
(
df
,
v
)
}
}
dahu-staging/src/main/scala/dahu/dataframe/ColumnContainer.scala
deleted
100644 → 0
View file @
68903ebb
package
dahu.dataframe
/**
* Marker class that provides extension methods for subclasses.
*
* It is also used as a restriction for adding containers to dataframes.
* This is mainly to avoid programming mistakes.
* */
trait
ColumnContainer
object
ColumnContainer
{
implicit
class
ColumnContainerOps
[
D
<:
ColumnContainer
](
val
d
:
D
)
extends
AnyVal
{
def
apply
[
K
,
V
](
k
:
K
)(
implicit
wi
:
WithColumn
[
K
,
V
,
D
])
:
Column
[
V
,
D
]
=
Column
.
from
(
d
,
k
)
def
column
[
K
,
V
,
F
[
_
]](
k
:
K
)(
implicit
wi
:
WithColumn.Aux
[
K
,
V
,
F
,
D
])
:
ColumnF
[
V
,
F
,
D
]
=
ColumnF
.
from
(
d
,
k
)
}
}
dahu-staging/src/main/scala/dahu/dataframe/DF.scala
deleted
100644 → 0
View file @
68903ebb
package
dahu.dataframe
import
dahu.dataframe.errors.ColumnsOfDifferentSizes
import
dahu.dataframe.metadata._
import
dahu.dataframe.vector.
{
IndexedVector
,
Vec
}
import
shapeless.
{::,
HList
,
HNil
}
case
class
DF
[
MD
<:
HList
](
cols
:
Vector
[
_
])
object
DF
{
def
empty
:
DF
[
HNil
]
=
new
DF
[
HNil
](
Vector
())
implicit
class
DFOps
[
M
<:
HList
](
val
df
:
DF
[
M
])
extends
AnyVal
{
def
raw
[
K
,
CM
,
V
,
F
[
_
]](
k
:
K
)(
implicit
meta
:
ColumnMeta.Aux
[
K
,
M
,
CM
],
value
:
Value.Aux
[
CM
,
V
],
container
:
Container.Aux
[
CM
,
F
],
index
:
ReverseIndexOfKey
[
K
,
M
])
:
F
[
V
]
=
{
df
.
cols
(
index
()).
asInstanceOf
[
F
[
V
]]
}
def
withColumn
[
K0
,
V0
,
F0
[
_
]](
key
:
K0
,
values
:
F0
[
V0
])(
implicit
size
:
RowNumber
[
M
],
vec
:
Vec
[
F0
])
:
DF
[
ColumMetadata
[
K0
,
V0
,
F0
]
::
M
]
=
{
size
(
df
)
match
{
case
Some
(
x
)
if
x
!=
vec
.
size
(
values
)
=>
throw
ColumnsOfDifferentSizes
(
s
"New column $key has size ${vec.size(values)} != $x"
)
case
_
=>
}
type
CM
=
ColumMetadata
[
K0
,
V0
,
F0
]
val
colMeta
:
CM
=
new
ColumMetadata
[
K0
,
V0
,
F0
]
{}
new
DF
[
CM
::
M
](
df
.
cols
:+
values
)
}
def
withContainer
[
A
<:
ColumnContainer
](
container
:
A
)
:
DF
[
A
::
M
]
=
DF
[
A
::
M
](
df
.
cols
:+
container
)
/** Base method to retrieve a column. The container type of the column is not required,
* which restricts the operations that can made on it.
* Look at {{{column()}}} for less restricted implementation. */
def
apply
[
K
,
V
](
k
:
K
)(
implicit
wi
:
WithColumn
[
K
,
V
,
DF
[
M
]])
:
Column
[
V
,
DF
[
M
]]
=
Column
.
from
(
df
,
k
)
/** Way to retrieve a more feature full implementation of a column but
* that requires the container type. */
def
column
[
K
,
V
,
F
[
_
]](
k
:
K
)(
implicit
wi
:
WithColumn.Aux
[
K
,
V
,
F
,
DF
[
M
]])
:
ColumnF
[
V
,
F
,
DF
[
M
]]
=
ColumnF
.
from
(
df
,
k
)
def
indexOf
[
K
](
implicit
ev
:
ReverseIndexOfKey
[
K
,
M
])
:
Int
=
ev
()
def
indexed
[
K
,
V0
,
PrevCM
,
MOut
<:
HList
](
k
:
K
)(
implicit
prev
:
ColumnMeta.Aux
[
K
,
M
,
PrevCM
],
value
:
Value.Aux
[
PrevCM
,
V0
],
container
:
Container.Aux
[
PrevCM
,
Vector
],
swapped
:
Swapped.Aux
[
K
,
ColumMetadata
[
K
,
V0
,
IndexedVector
]
,
M
,
MOut
],
index
:
ReverseIndexOfKey
[
K
,
M
])
:
DF
[
MOut
]
=
{
val
v
:
Vector
[
V0
]
=
raw
(
k
)
val
map
:
Map
[
V0
,
Int
]
=
v
.
zipWithIndex
.
toMap
val
col
=
IndexedVector
[
V0
](
v
,
map
)
type
CM
=
ColumMetadata
[
K
,
V0
,
IndexedVector
]
val
meta
:
CM
=
new
ColumMetadata
[
K
,
V0
,
IndexedVector
]
{}
swapped
(
df
,
meta
,
col
)
}
}
}
dahu-staging/src/main/scala/dahu/dataframe/MetaData.scala
deleted
100644 → 0
View file @
68903ebb
package
dahu.dataframe
import
shapeless._
import
shapeless.ops.hlist.
{
Length
,
ToTraversable
}
import
shapeless.ops.nat.ToInt
trait
MetaData
[
H
<:
HList
]
{
type
Fields
<:
HList
val
width
:
Int
def
toList
(
l
:
Fields
)
:
List
[
Any
]