Parser.scala 28.6 KB
Newer Older
1
package dahu.planning.anml.parser
2

3
import java.io.{File, IOException}
4

5
import dahu.planning.model._
6 7 8 9
import ParserApi.baseApi._
import ParserApi.baseApi.Parsed.Success
import ParserApi.whiteApi._
import ParserApi.extendedApi._
10
import fastparse.core.Parsed.Failure
11 12
import dahu.planning.model.common._
import dahu.planning.model.common.operators.{
13 14 15 16 17 18 19
  Associativity,
  BinOperatorGroup,
  BinaryOperator,
  OperatorGroup,
  UnaryOperator,
  UniOperatorGroup
}
20
import dahu.planning.model.full._
21

22
import scala.annotation.tailrec
23 24
import scala.util.Try

25
abstract class AnmlParser(val initialContext: Ctx)(implicit predef: Predef) {
26 27 28 29 30

  /** Denotes the current context of this AnmlParser.
    * It is used by many suparsers to find the variable/fluent/type associated to an identifier.
    * Over the course of Parsing, the current context is likely to change (i.e. `ctx` will point to
    * a new context since [[Ctx]] is immutable. */
31
  protected var ctx: Ctx = initialContext
32 33 34 35 36 37
  protected def updateContext(newContext: Ctx) {
    ctx = newContext
  }

  val word: Parser[String] = {
    import fastparse.all._ // override sequence composition to ignore white spaces
38 39
    (CharIn(('a' to 'z') ++ ('A' to 'Z') ++ "_") ~
      CharsWhileIn(('a' to 'z') ++ ('A' to 'Z') ++ ('0' to '9') ++ "_", min = 0)).!.opaque("word")
40 41 42
  }
  val int: Parser[Int] = CharsWhileIn('0' to '9').!.map(_.toInt).opaque("int")

43 44 45 46 47
  val typeKW: Parser[Unit] = word.filter(_ == "type").silent.opaque("type")
  val withKW: Parser[Unit] = word.filter(_ == "with").silent.opaque("with")
  val instanceKW: Parser[Unit] = word.filter(_ == "instance").silent.opaque("instance")
  val fluentKW: Parser[Unit] = word.filter(_ == "fluent").silent.opaque("fluent")
  val constantKW: Parser[Unit] = word.filter(_ == "constant").silent.opaque("constant")
48
  val timepointKW: Parser[Unit] = word.filter(_ == "timepoint").silent.opaque("instance")
49 50 51
  val actionKW: Parser[Unit] = word.filter(_ == "action").silent.opaque("action")
  val durationKW: Parser[Unit] = word.filter(_ == "duration").silent.opaque("duration")
  val containsKW: Parser[Unit] = word.filter(_ == "contains").silent.opaque("contains")
52
  val keywords: Set[String] =
53
    Set("type", "instance", "action", "duration", "fluent", "variable", "predicate", "timepoint")
54
  val nonIdent: Set[String] = keywords
55

56 57
  val simpleIdent: Parser[String] =
    word.opaque("ident").namedFilter(!nonIdent.contains(_), "not-reserved")
58 59
  val ident: Parser[String] = simpleIdent.rep(min = 1, sep = ".").!.opaque("possibly-nested-ident")
  val typeName: Parser[String] = word.filter(!keywords.contains(_)).opaque("type-name")
60
  val variableName: Parser[String] = ident.opaque("variable-name")
61

62
  val freeIdent: Parser[String] =
63
    simpleIdent
64 65 66 67 68 69
      .namedFilter(id =>
                     ctx.findDeclaration(id) match {
                       case Some(_) => false
                       case None    => true
                   },
                   "unused")
70 71 72 73

  val declaredType: Parser[Type] =
    typeName.optGet(ctx.findType(_), "declared")

74
  val timepointDeclaration: Parser[LocalVarDeclaration] =
75
    timepointKW ~/
76
      freeIdent.map(name => LocalVarDeclaration(Timepoint(ctx.id(name)))) ~
77 78
      ";"

79
  protected val definedTP: Parser[LocalVar] =
80 81
    ident.optGet(ctx.findTimepoint(_), "declared-timepoint")

82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135
//  val timepoint: Parser[TPRef] = {
//    (int ~ "+").flatMap(d => timepoint.map(tp => tp + d)) |
//      (definedTP.map(TPRef(_)) ~ (("+" | "-").! ~ int).?).map {
//        case (tp, Some(("+", delay))) => tp + delay
//        case (tp, Some(("-", delay))) => tp - delay
//        case (tp, None)               => tp
//        case _                        => sys.error("Buggy parser implementation")
//      } |
//      int.flatMap(
//        i => // integer as a tp defined relatively to the global start, if one exists
//          ctx.root.findTimepoint("start") match {
//            case Some(st) => PassWith(TPRef(st) + i)
//            case None     => Fail.opaque("fail: no start timepoint in top level scope")
//        })
//  }.opaque("timepoint")

//  lazy val delay: Parser[Delay] =
//    (durationKW ~/ Pass)
//      .flatMap(
//        _ =>
//          ctx
//            .findTimepoint("start")
//            .flatMap(st => ctx.findTimepoint("end").map(ed => (TPRef(st), TPRef(ed)))) match {
//            case Some((st, ed)) => PassWith(Delay(st, ed))
//            case None           => sys.error("No start/end timepoint")
//        })
//      .opaque("duration") |
//      (timepoint ~ "-" ~ definedTP.map(TPRef(_)) ~ (("+" | "-").! ~ intExpr).?).map {
//        case (t1, t2, None)           => t1 - t2
//        case (t1, t2, Some(("+", i))) => (t1 - t2) + i
//        case (t1, t2, Some(("-", i))) => (t1 - t2) - i
//        case _                        => sys.error("Buggy parser implementation")
//      }
//
//  lazy val temporalConstraint: Parser[Seq[TBefore]] = {
//    (timepoint ~ ("<=" | "<" | ">=" | ">" | "==" | ":=" | "=").! ~/ timepoint ~ ";")
//      .map {
//        case (t1, "<", t2)                                     => Seq(t1 < t2)
//        case (t1, "<=", t2)                                    => Seq(t1 <= t2)
//        case (t1, ">", t2)                                     => Seq(t1 > t2)
//        case (t1, ">=", t2)                                    => Seq(t1 >= t2)
//        case (t1, eq, t2) if Set("=", "==", ":=").contains(eq) => t1 === t2
//        case _                                                 => sys.error("Buggy parser implementation")
//      } |
//      (delay ~ ("<=" | "<" | ">=" | ">" | "==" | ":=" | "=").! ~/ intExpr ~ ";")
//        .map {
//          case (d, "<", t)                                     => Seq(d < t)
//          case (d, "<=", t)                                    => Seq(d <= t)
//          case (d, ">", t)                                     => Seq(d > t)
//          case (d, ">=", t)                                    => Seq(d >= t)
//          case (d, eq, t) if Set("=", "==", ":=").contains(eq) => d === t
//          case _                                               => sys.error("Buggy parser implementation")
//        }
//  }
136

137
  val variable: Parser[Term] =
138 139 140 141 142 143 144 145 146 147 148
    ident.optGet(ctx.findVariable(_), "declared-variable")

  val fluent: Parser[FluentTemplate] =
    ident.optGet(ctx.findFluent(_), "declared-fluent")

  val constantFunc: Parser[ConstantTemplate] =
    ident.optGet(ctx.findConstant(_), "declared-fluent")

  /** Parses a fluent in the object oriented notation.
    * "x.f" where x is a variable of type T and f is a fluent declared in type T or in a supertype of T.
    * Returns the fluent T.f and x which is to be the first argument of T.f */
149
  val partiallyAppliedFunction: Parser[(FunctionTemplate, Term)] = {
150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173

    /** Retrieves a fluent template declared the the given type or one of its super types.*/
    def findInTypeFunction(typ: Type, fluentName: String): Option[FunctionTemplate] =
      ctx
        .findFunction(typ.id.name + "." + fluentName)
        .orElse(typ.parent.flatMap(p => findInTypeFunction(p, fluentName)))

    ident
      .map(str => str.split("\\.").toList)
      // split into first idents (variable) and last (fluent name)
      .map(idents => (idents.dropRight(1), idents.last))
      // only keep if first idents represent a valid variable
      .map(tup => (ctx.findVariable(tup._1.mkString(".")), tup._2))
      .namedFilter(_._1.isDefined, "declared-variable")
      .map(tup => (tup._1.get, tup._2))
      // keep if we can find the fluent in the type of the variable
      .namedFilter({ case (v, fluentName) => findInTypeFunction(v.typ, fluentName).isDefined },
                   s"fluent-available-for-this-variable-type")
      // return the fluent and the variable
      .map { case (v, fluentName) => (findInTypeFunction(v.typ, fluentName).get, v) }
  }

  private[this] def varList(expectedTypes: Seq[Type],
                            sep: String,
174
                            previous: Seq[StaticExpr] = Seq()): Parser[Seq[StaticExpr]] = {
175
    if(expectedTypes.isEmpty) {
176 177
      PassWith(previous)
    } else {
178
      staticExpr
179 180 181
        .namedFilter(_.typ.isSubtypeOf(expectedTypes.head), "has-expected-type")
        .flatMap(
          v =>
182
            if(expectedTypes.tail.isEmpty)
183 184 185 186 187 188
              PassWith(previous :+ v)
            else
              Pass ~ sep ~/ varList(expectedTypes.tail, sep, previous :+ v))
    }
  }

189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209
  /** Parses a sequence of args necessarily enclosed in parenthesis if non empty
    * Example of valid inputs "", "()", "(Type1 arg1)", "(Type1 arg1, Type2 arg2)"
    */
  protected val argList: Parser[Seq[(String, Type)]] = {
    val arg: Parser[(String, Type)] =
      (declaredType ~ ident)
        .map { case (typ, argName) => (argName, typ) }

    /** A list of at least one argument formatted as "Type1 arg, Type2 arg2" */
    def distinctArgSeq(sep: String,
                       previous: Seq[(String, Type)] = Seq()): Parser[Seq[(String, Type)]] =
      Pass ~ arg
        .namedFilter(a => !previous.exists(_._1 == a._1), "not-used-in-current-arg-sequence")
        .flatMap(a => (Pass ~ sep ~/ distinctArgSeq(sep, previous :+ a)) | PassWith(previous :+ a))

    ("(" ~/
      ((&(word) ~/ distinctArgSeq(",")) | PassWith(Seq()).opaque("no-args")) ~
      ")") | // parenthesis with and without args
      PassWith(Seq()).opaque("no-args") // no args no, parenthesis
  }

210
  val timedSymExpr: Parser[TimedExpr] = {
211
    val partiallyAppliedFluent = partiallyAppliedFunction
212
      .namedFilter(_._1.isInstanceOf[FluentTemplate], "is-fluent")
213 214 215 216
      .map(tup => (tup._1.asInstanceOf[FluentTemplate], tup._2))

    (fluent ~/ Pass).flatMap(f =>
      f.params.map(param => param.typ) match {
217
        case Seq() => (("(" ~/ ")") | Pass) ~ PassWith(new Fluent(f, Seq()))
218
        case paramTypes =>
219
          "(" ~/ varList(paramTypes, ",").map(args => new Fluent(f, args)) ~ ")" ~/ Pass
220 221 222 223
    }) |
      (partiallyAppliedFluent ~/ Pass).flatMap {
        case (f, firstArg) =>
          f.params.map(param => param.typ) match {
224 225
            case Seq(singleParam) =>
              (("(" ~/ ")") | Pass) ~ PassWith(new Fluent(f, Seq(CommonTerm(firstArg))))
226 227
            case paramTypes =>
              "(" ~/ varList(paramTypes.tail, ",")
228
                .map(args => new Fluent(f, CommonTerm(firstArg) +: args)) ~ ")" ~/ Pass
229 230 231 232
          }
      }
  }

233
  val constantFuncTerm: Parser[Constant] = P {
234
    val partiallyAppliedConstant = partiallyAppliedFunction
235
      .namedFilter(_._1.isInstanceOf[ConstantTemplate], "is-constant")
236 237
      .map(tup => (tup._1.asInstanceOf[ConstantTemplate], tup._2))

238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256
    (constantFunc ~/ Pass).flatMap(f =>
      f.params.map(param => param.typ) match {
        case Seq() => (("(" ~/ ")") | Pass) ~ PassWith(Constant(f, Seq()))
        case paramTypes =>
          "(" ~/ varList(paramTypes, ",").map(args => Constant(f, args)) ~ ")" ~/ Pass
    }) |
      (partiallyAppliedConstant ~/ Pass).flatMap {
        case (f, firstArg) =>
          f.params.map(param => param.typ) match {
            case Seq(singleParam) =>
              (("(" ~/ ")") | Pass) ~ PassWith(Constant(f, Seq(CommonTerm(firstArg))))
            case paramTypes =>
              "(" ~/ varList(paramTypes.tail, ",")
                .map(args => Constant(f, CommonTerm(firstArg) +: args)) ~ ")" ~/ Pass
          }
      }
  }

  val staticTerm: Parser[StaticExpr] = {
257 258 259 260 261 262 263 264 265 266 267
    (durationKW ~/ Pass)
      .flatMap(_ =>
        (for {
          st <- ctx.findTimepoint("start")
          ed <- ctx.findTimepoint("end")
        } yield full.BinaryExprTree(operators.Sub, CommonTerm(ed), CommonTerm(st))) match {
          case Some(e) => PassWith(e)
          case None    => sys.error("No start/end timepoint")
      })
      .opaque("duration") |
      int.map(i => CommonTerm(IntLiteral(i))) |
268
      variable.map(CommonTerm(_)) |
269
      constantFuncTerm
270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287
  }

  val staticExpr: P[StaticExpr] = Tmp.expr

  object Tmp {
    type E = StaticExpr
    type PE = Parser[StaticExpr]
    def term: PE = P(staticTerm ~/ Pass)
    def expr: PE = P(top)
    val bottom: PE = P(("(" ~/ expr ~/ ")") | term)
    val top: PE =
      operators.layeredOps.foldLeft(bottom) {
        case (inner, opGroup) => groupParser(opGroup, inner)
      }

    def binGroupParser(gpe: BinOperatorGroup, inner: PE): PE = {
      // parser for a single operator in the group
      val operator: P[BinaryOperator] =
288 289
        StringIn(gpe.ops.map(_.op).toSeq: _*).!.optGet(str => gpe.ops.find(_.op == str))
          .opaque(gpe.ops.map(a => "\"" + a.op + "\"").mkString("(", "|", ")"))
290 291 292 293 294 295 296 297 298 299
      gpe match {
        case BinOperatorGroup(ops, _, Associativity.Left) =>
          (inner ~/ (operator ~/ inner).rep).optGet({
            case (head, tail) =>
              tail.foldLeft(Option(head)) {
                case (acc, (op, rhs)) => acc.flatMap(x => asBinary(op, x, rhs))
              }
          }, "well-typed")

        case BinOperatorGroup(ops, _, Associativity.Right) =>
300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317
          (inner ~/ (operator ~/ inner).rep).optGet(
            {
              case (head, tail) =>
                def makeRightAssociative[A, B](e1: A, nexts: List[(B, A)]): (List[(A, B)], A) =
                  nexts match {
                    case Nil => (Nil, e1)
                    case (b, e2) :: rest =>
                      val (prevs, last) = makeRightAssociative(e2, rest)
                      ((e1, b) :: prevs, last)
                  }
                val (prevs: List[(StaticExpr, BinaryOperator)], last: StaticExpr) =
                  makeRightAssociative(head, tail.toList)
                prevs.foldRight(Option(last)) {
                  case ((lhs, op), rhs) => rhs.flatMap(asBinary(op, lhs, _))
                }
            },
            "well-typed"
          )
318 319 320 321 322 323 324 325 326 327 328

        case BinOperatorGroup(ops, _, Associativity.Non) =>
          (inner ~/ (operator ~/ inner).?).optGet({
            case (lhs, None)            => Some(lhs)
            case (lhs, Some((op, rhs))) => asBinary(op, lhs, rhs)
          }, "well-typed")
      }
    }

    def unaryGroupParser(gpe: UniOperatorGroup, inner: PE): PE = {
      val operator: P[UnaryOperator] =
329 330 331 332 333 334 335
        StringIn(gpe.ops.map(_.op).toSeq: _*).!.optGet(str => gpe.ops.find(_.op == str))
          .opaque(gpe.ops.map(a => "\"" + a.op + "\"").mkString("(", "|", ")"))
      (operator.? ~ inner)
        .optGet({
          case (None, e)     => Some(e)
          case (Some(op), e) => Try(full.UnaryExprTree(op, e)).toOption
        }, "well-typed")
336 337 338 339 340 341 342 343 344
    }

    def groupParser(gpe: OperatorGroup, inner: PE): PE = gpe match {
      case x: BinOperatorGroup => binGroupParser(x, inner)
      case x: UniOperatorGroup => unaryGroupParser(x, inner)
    }
    def asBinary(op: BinaryOperator, lhs: StaticExpr, rhs: StaticExpr): Option[StaticExpr] = {
      op.tpe(lhs.typ, rhs.typ) match {
        case Right(_) => Some(full.BinaryExprTree(op, lhs, rhs))
345
        case Left(_)  => None
346 347 348 349
      }
    }
  }

350
  val expr: Parser[full.Expr] =
351
    timedSymExpr | staticExpr
352

353 354
  val timepoint: P[StaticExpr] =
    staticExpr.namedFilter(_.typ.isSubtypeOf(Type.Integers), "of-type-integer")
355

356
  val interval: Parser[Interval] =
357
    ("[" ~/
358
      ((timepoint ~/ ("," ~/ timepoint).?).map {
359
        case (tp, None)       => (tp, tp) // "[end]" becomes "[end, end]"
360 361
        case (tp1, Some(tp2)) => (tp1, tp2)
      } |
362 363
        P("all").map(_ => {
          (ctx.findTimepoint("start"), ctx.findTimepoint("end")) match {
364
            case (Some(st), Some(ed)) => (CommonTerm(st), CommonTerm(ed))
365 366 367
            case _                    => sys.error("Start and/or end timepoints are not defined.")
          }
        })) ~
368
      "]").map {
369
      case (tp1, tp2) => Interval(tp1, tp2)
370 371 372
    }

  val timedAssertion: Parser[TimedAssertion] = {
373
    // variable that hold the first two parsed token to facilitate type checking logic
374
    var id: String = null
375
    var fluent: TimedExpr = null
376

377 378 379
    def compatibleTypes(t1: Type, t2: Type): Boolean = t1.isSubtypeOf(t2) || t2.isSubtypeOf(t1)

    /** Reads a static symbolic expressions whose type is compatible with the one of the fluent */
380 381
    val rightSideExpr: Parser[StaticExpr] =
      staticExpr.namedFilter(expr => compatibleTypes(fluent.typ, expr.typ), "has-compatible-type")
382 383

    /** Reads an identifier or construct a default one otherwise */
384
    val assertionId: Parser[String] =
385 386
      (freeIdent ~ ":" ~/ Pass).?.map {
        case Some(id) => id
387
        case None     => defaultId()
388
      }
389

390 391 392 393 394 395
    assertionId.sideEffect(id = _).silent ~
      (timedSymExpr.sideEffect(fluent = _).silent ~
        (("==" ~/ rightSideExpr ~ (":->" ~/ rightSideExpr).?).map {
          case (expr, None)     => TimedEqualAssertion(fluent, expr, Some(ctx), id)
          case (from, Some(to)) => TimedTransitionAssertion(fluent, from, to, Some(ctx), id)
        } | (":=" ~/ rightSideExpr).map(e => TimedAssignmentAssertion(fluent, e, Some(ctx), id))))
396 397
  }

398 399 400 401 402 403
  val qualifier: Parser[TemporalQualifier] =
    (interval ~/ containsKW.!.?).map {
      case (it, None)    => Equals(it)
      case (it, Some(_)) => Contains(it)
    }

404
  val temporallyQualifiedAssertion: Parser[Seq[TemporallyQualifiedAssertion]] = {
405
    (qualifier ~/
406 407 408 409
      ((("{" ~ (!"}" ~/ timedAssertion ~ ";").rep ~ "}") |
        timedAssertion.map(Seq(_)))
        ~ ";"))
      .map { case (it, assertions) => assertions.map(TemporallyQualifiedAssertion(it, _)) }
410 411
  }

412
  val staticAssertion: Parser[StaticAssertion] = {
413 414
    var leftExpr: StaticExpr = null
    (staticExpr.sideEffect(leftExpr = _) ~/
415
      (":=".! ~/ staticExpr.namedFilter(_.typ.overlaps(leftExpr.typ), "has-compatible-type")).? ~
416
      ";")
417 418
      .namedFilter({
        case (_: Constant, Some(_)) => true
419 420
        case (_, Some(_))           => false
        case (_, None)              => true
421
      }, "assignment-to-const-func-only")
422 423
      .namedFilter({
        case (_, Some(_)) => true
424
        case (expr, None) => expr.typ.isSubtypeOf(predef.Boolean)
425
      }, "boolean-if-not-assignment")
426
      .map {
427
        case (left: Constant, Some((":=", right))) => StaticAssignmentAssertion(left, right)
428 429
        case (expr, None)                          => BooleanAssertion(expr)
        case _                                     => sys.error("Something is wrong with this parser.")
430
      }
431
  }
432 433 434 435 436 437 438 439 440 441 442 443 444 445 446 447 448 449 450
}

/** Second phase parser that extracts all ANML elements expects types that should
  *  be already present in the initial model. */
class AnmlModuleParser(val initialModel: Model) extends AnmlParser(initialModel) {

  /** Parser for instance declaration.
    * "instance Type id1, id2, id3;" */
  val instancesDeclaration: Parser[Seq[InstanceDeclaration]] = {

    /** Parses a sequences of yet unused *distinct* identifiers. */
    def distinctFreeIdents(previous: Seq[String], sep: String, term: String): Parser[Seq[String]] =
      Pass ~ freeIdent
        .namedFilter(!previous.contains(_), "not-in-same-instance-list")
        .flatMap(name =>
          Pass ~ sep ~/ distinctFreeIdents(previous :+ name, sep, term)
            | Pass ~ term ~ PassWith(previous :+ name))

    (instanceKW ~/ declaredType ~/ distinctFreeIdents(Nil, ",", ";"))
451 452 453 454
      .map {
        case (typ, instanceNames) => instanceNames.map(name => new Instance(ctx.id(name), typ))
      }
  }.map(instances => instances.map(new InstanceDeclaration(_)))
455

456 457 458 459 460
  /** Parser that to read the kind and type of a function declaration. For instance:
    * "fluent T", "constant T", "function T", "variable T", "predicate" where T is a type already declared.
    * Returns either ("fluent", T) or ("constant", T) considering that
    * 1) "variable" and "function" are alias for "fluent"
    * 2) "predicate" is an alias for "fluent boolean" */
461
  private[this] val functionKindAndType: Parser[(String, Type)] = {
462 463 464 465
    (word
      .filter(w => w == "fluent" || w == "variable" || w == "function")
      .opaque("fluent")
      .silent ~/ declaredType).map(("fluent", _)) |
466
      (constantKW ~/ declaredType).map(("constant", _)) |
467 468 469 470 471
      word
        .filter(_ == "predicate")
        .opaque("predicate")
        .optGet(_ => ctx.findType("boolean"), "with-boolean-type-in-scope")
        .map(("fluent", _))
472 473
  }

474
  val functionDeclaration: Parser[FunctionDeclaration] = {
475
    (functionKindAndType ~ freeIdent ~ argList ~ ";")
476 477
      .map {
        case ("fluent", typ, svName, args) =>
478 479
          new FluentTemplate(ctx.id(svName), typ, args.map {
            case (name, argType) => new Arg(new Id(ctx.scope + svName, name), argType)
480 481
          })
        case ("constant", typ, svName, args) =>
482 483
          new ConstantTemplate(ctx.id(svName), typ, args.map {
            case (name, argType) => new Arg(new Id(ctx.scope + svName, name), argType)
484 485 486
          })
        case _ => sys.error("Match failed")
      }
487
      .map(new FunctionDeclaration(_))
488 489 490 491 492 493 494 495 496 497 498 499 500
  }

  /** Extract the functions declared in a type. This consumes the whole type declaration.
    * Note that the type should be already present in the module.
    * Typically consumed:
    *   "type B < A with { fluent f(C c); };" */
  val inTypeFunctionDeclaration: Parser[Seq[FunctionDeclaration]] =
    (typeKW ~/
      declaredType
      ~ ("<" ~/ declaredType.!).asInstanceOf[Parser[Type]].?
      ~ (withKW ~/ "{" ~/ functionDeclaration.rep ~ "}").?
      ~ ";")
      .map {
501
        case (_, _, None) => Seq()
502 503
        case (t, _, Some(funcDecl)) =>
          funcDecl.map(fd => {
504
            val id = Id(t.asScope, fd.id.name)
505
            val functionScope = t.asScope + id.name
506
            val selfArg = Arg(Id(functionScope, "self"), t)
507
            val params = selfArg +: fd.func.params.map(arg =>
508
              Arg(Id(functionScope, arg.id.name), arg.typ))
509
            val template = fd.func match {
510 511
              case _: FluentTemplate   => FluentTemplate(id, fd.func.typ, params)
              case _: ConstantTemplate => ConstantTemplate(id, fd.func.typ, params)
512
            }
513
            FunctionDeclaration(template)
514 515 516
          })
      }

517 518
  val action: Parser[ActionTemplate] = new AnmlActionParser(this).parser

519
  val elem: Parser[Seq[InModuleBlock]] =
520 521 522 523
    inTypeFunctionDeclaration |
      instancesDeclaration |
      functionDeclaration.map(Seq(_)) |
      timepointDeclaration.map(Seq(_)) |
524 525 526
      temporallyQualifiedAssertion |
      staticAssertion.map(Seq(_)) |
      action.map(Seq(_))
527

528
  def currentModel: Model = ctx match {
529
    case m: Model => m
530
    case _        => sys.error("Current context is not a model")
531 532
  }

533 534
  private[this] val anmlParser: Parser[Model] =
    // for each elem parsed, update the current model
535 536
    (Pass ~ elem.optGet(currentModel ++ _).sideEffect(updateContext(_)).silent).rep ~ End.map(_ =>
      currentModel)
537

538
  def parse(input: String): Parsed[Model] = {
539 540 541 542 543
    updateContext(initialModel)
    anmlParser.parse(input)
  }
}

544 545
class AnmlActionParser(superParser: AnmlModuleParser)(implicit predef: Predef)
    extends AnmlParser(superParser.currentModel) {
546 547 548

  private def currentAction: ActionTemplate = ctx match {
    case a: ActionTemplate => a
549
    case _                 => sys.error("Current context is not an action.")
550 551 552 553 554 555 556 557
  }

  /** Creates an action template with the given name and its default content (i.e. start/end timepoints). */
  private[this] def buildEmptyAction(actionName: String): ActionTemplate = {
    val container = ctx match {
      case m: Model => m
      case _        => sys.error("Starting to parse an action while the context is not a model.")
    }
558
    val emptyAct = new ActionTemplate(actionName, container)
559
    emptyAct +
560 561
      LocalVarDeclaration(Timepoint(Id(emptyAct.scope, "start"))) +
      LocalVarDeclaration(Timepoint(Id(emptyAct.scope, "end")))
562 563
  }

564 565
  /** FIXME: this interprets a "constant" as a local variable. This is is compatible with FAPE but not with official ANML. */
  val variableDeclaration: Parser[LocalVarDeclaration] = {
566 567
    (constantKW ~/ declaredType ~/ freeIdent ~/ ";").map {
      case (typ, id) =>
568
        LocalVarDeclaration(LocalVar(Id(ctx.scope, id), typ))
569 570 571
    }
  }

572 573 574 575 576 577 578 579 580 581 582
  val parser: Parser[ActionTemplate] =
    (Pass ~ actionKW.sideEffect(x => {
      // set context to the current model in order to access previous declarations
      updateContext(superParser.currentModel)
    }) ~/
      freeIdent.sideEffect(actionName => {
        // set context to the current action
        updateContext(buildEmptyAction(actionName))
      }) ~/
      argList // parse arguments and update the current action
        .map(_.map {
583
          case (name, typ) => ArgDeclaration(Arg(Id(ctx.scope, name), typ))
584 585 586
        })
        .sideEffect(argDeclarations => updateContext(currentAction ++ argDeclarations)) ~
      "{" ~/
587
      (temporallyQualifiedAssertion |
588
        staticAssertion.map(Seq(_)) |
589
        variableDeclaration.map(Seq(_)))
590 591 592 593 594 595 596
        .sideEffect(x => updateContext(currentAction ++ x)) // add assertions to the current action
        .rep ~
      "}" ~/
      ";")
      .flatMap(_ => PassWith(currentAction))
}

597
/** First phase parser used to extract all type declarations from a given ANML string. */
598 599
class AnmlTypeParser(val initialModel: Model)(implicit predef: Predef)
    extends AnmlParser(initialModel) {
600

601
  val nonTypeToken: Parser[String] =
602
    (word | int | CharIn("{}[]();=:<>-+.,!/*")).!.namedFilter(_ != "type", "non-type-token")
603 604
  val typeDeclaration: Parser[TypeDeclaration] =
    (typeKW ~/ freeIdent ~ ("<" ~ declaredType).? ~ (";" | withKW)).map {
605 606 607 608
      case (name, None)                    => TypeDeclaration(Type.ObjSubType(ctx.id(name), Type.ObjectTop))
      case (name, Some(t: Type.ObjType))   => TypeDeclaration(Type.ObjSubType(ctx.id(name), t))
      case (name, Some(t: Type.IIntType))  => TypeDeclaration(Type.IntSubType(ctx.id(name), t))
      case (name, Some(t: Type.IRealType)) => TypeDeclaration(Type.RealSubType(ctx.id(name), t))
609
    }
610 611 612 613 614 615

  private[this] def currentModel: Model = ctx match {
    case m: Model => m
    case x        => sys.error("Current context is not a model")
  }

616
  private[this] val parser: Parser[Model] = {
617 618
    val typeDeclarationWithUpdate =
      typeDeclaration.optGet(currentModel + _).sideEffect(updateContext(_))
619 620
    ((Pass ~ (nonTypeToken | typeDeclarationWithUpdate).silent).rep ~ End).map(_ => currentModel)
  }
621
  def parse(input: String): Parsed[Model] = {
622 623 624 625 626 627 628 629
    updateContext(initialModel)
    parser.parse(input)
  }
}

object Parser {

  /** ANML model with default definitions already added */
630
  def baseAnmlModel(implicit predef: Predef): Model = predef.baseModel
631

632 633 634
  /** Parses an ANML string. If the previous model parameter is Some(m), then the result
    * of parsing will be appended to m.
    **/
635 636
  def parse(input: String, previousModel: Option[Model] = None): ParseResult[Model] = {
    def formatFailure(failure: Failure[Char, String]): ParserFailure = {
637
      def toLineAndColumn(lines: Seq[String], index: Int, lineNumber: Int = 0): (String, Int, Int) =
638 639 640
        lines match {
          case Seq(head, _*) if index <= head.length =>
            (lines.head, lineNumber, index)
641
          case Seq(head, tail @ _*) =>
642 643 644 645
            toLineAndColumn(tail, index - head.length - 1, lineNumber + 1)
          case _ =>
            sys.error("Index is not in the provided lines")
        }
646 647 648

      val (faultyLine, faultyLineNumber, faultyColumnNumber) =
        toLineAndColumn(input.split('\n'), failure.index)
649
      ParserFailure(faultyLine, faultyLineNumber, faultyColumnNumber, failure.lastParser, None)
650
    }
651

652 653 654 655
    Try {
      new AnmlTypeParser(previousModel.getOrElse(baseAnmlModel)).parse(input) match {
        case Success(modelWithTypes, _) =>
          new AnmlModuleParser(modelWithTypes).parse(input) match {
656
            case Success(fullModel, _)    => ParseSuccess(fullModel)
657 658 659 660 661 662
            case x: Failure[Char, String] => formatFailure(x)
          }
        case x: Failure[Char, String] => formatFailure(x)
      }
    } match {
      case scala.util.Success(x) => x
663
      case scala.util.Failure(e) => ParserCrash(e, None)
664 665 666
    }
  }

667
  private def parseFromFile(file: File, previousModel: Option[Model] = None): ParseResult[Model] = {
668
    Try {
669
      val source = scala.io.Source.fromFile(file)
670
      val input: String = source.getLines.mkString("\n")
671
      source.close()
672
      parse(input, previousModel) match {
673 674
        case x: ParserFailure => x.copy(file = Some(file))
        case x                => x
675 676 677 678 679
      }
    } match {
      case scala.util.Success(x) => x
      case scala.util.Failure(e: IOException) =>
        FileAccessError(file, e)
680
      case scala.util.Failure(e) => ParserCrash(e, Some(file))
681

682 683
    }
  }
684 685 686 687 688

  /** Parses an ANML file.
    * If the file name is formated as "XXXX.YYY.pb.anml", the file "XXXX.dom.anml" will be parsed
    * first and its content prepended to the model.
    **/
689
  def parse(file: File): ParseResult[Model] = {
690
    file.getName.split('.') match {
691 692 693 694 695 696 697 698 699 700
      case Array(domId, pbId, "pb", "anml") =>
        // file name formated as domainID.pbID.pb.anml, load domainID.dom.anml first
        val domainFile = new File(file.getParentFile, domId + ".dom.anml")
        Parser
          .parseFromFile(domainFile)
          .flatMap(domainModel => parseFromFile(file, Some(domainModel)))
      case _ =>
        // not a problem file, load the file standalone
        Parser.parseFromFile(file)
    }
701
  }
702
}