commit 3f68337b4bd42addfa6f5f3b1c49a9324f4ab45e
parent e2dbc607501d98772fa74792d822547c74d8f619
Author: Robert Russell <robertrussell.72001@gmail.com>
Date: Sun, 7 Jan 2024 17:49:23 -0800
Return to a compiling state
Some things are a disaster
Diffstat:
| M | Makefile | | | 12 | ++++++++++-- |
| M | abstract.ml | | | 81 | +++++++++++++++++++++++++++++++++++++++++++++++-------------------------------- |
| D | cnctoabs.ml | | | 89 | ------------------------------------------------------------------------------- |
| D | common.ml | | | 29 | ----------------------------- |
| M | concrete.ml | | | 54 | +++++++++++++++++++++++++++--------------------------- |
| A | ctx.ml | | | 95 | +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ |
| A | desugar.ml | | | 88 | +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ |
| M | elab.ml | | | 647 | ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++--------------------- |
| A | env.ml | | | 32 | ++++++++++++++++++++++++++++++++ |
| M | eval.ml | | | 206 | ++++++++++++++++++++++++++++++++++++++++++++++++++++++------------------------- |
| A | global.ml | | | 25 | +++++++++++++++++++++++++ |
| A | index.ml | | | 45 | +++++++++++++++++++++++++++++++++++++++++++++ |
| M | internal.ml | | | 148 | +++++++++++++++++++++++++++++++++++++++++-------------------------------------- |
| M | lexer.mll | | | 2 | +- |
| M | loc.ml | | | 3 | ++- |
| M | main.ml | | | 48 | ++++++++++++++++++++++++++++++++++++------------ |
| A | name.ml | | | 7 | +++++++ |
| M | node.ml | | | 14 | ++++++++++++-- |
| M | parser.mly | | | 194 | +++++++++++++++++++++++++++++++++++++++++++++++++++---------------------------- |
| A | plicity.ml | | | 2 | ++ |
| A | porig.ml | | | 65 | +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ |
| A | repr.ml | | | 10 | ++++++++++ |
| A | shadow_stack.ml | | | 31 | +++++++++++++++++++++++++++++++ |
| A | tag.ml | | | 7 | +++++++ |
| M | tsil.ml | | | 14 | +++++++------- |
| A | uniq.ml | | | 19 | +++++++++++++++++++ |
| M | value.ml | | | 56 | +++++++++++++++++++++++++++++++++++++++++++------------- |
27 files changed, 1431 insertions(+), 592 deletions(-)
diff --git a/Makefile b/Makefile
@@ -1,12 +1,20 @@
SRC = \
+ tsil.ml \
+ name.ml \
+ tag.ml \
+ uniq.ml \
+ index.ml \
loc.ml \
node.ml \
- common.ml \
+ plicity.ml \
+ global.ml \
+ env.ml \
+ ctx.ml \
concrete.ml \
parser.ml \
lexer.ml \
abstract.ml \
- cnctoabs.ml \
+ desugar.ml \
internal.ml \
value.ml \
eval.ml \
diff --git a/abstract.ml b/abstract.ml
@@ -1,46 +1,61 @@
-open Common
+open Global
-(* Abstract syntax is "desugared" concrete syntax. *)
+(* Abstract syntax is "desugared" concrete syntax. See desugar.ml. *)
-(* TODO: Here (and elsewhere) we should be using arrays instead of lists.
- * Keeps lists in concrete syntax for convenience when parsing, but change
- * to arrays in abstract syntax and make the conversion in conctoabs. *)
+(*
+module type Definer = sig
+ type definiendum
+ type definiens
+
+ type t =
+ | NonRec of definiendum * definiens
+ | Rec of definiendum * definiens
+
+ val to_string : t -> string
+end
-type term = term' Node.t
+module type Binder = sig
+ type definiendum
+ type definiens
+ type t =
+ | NonRec of definiendum * definiens
+ | Rec of definiendum * definiens
+end
+*)
+
+type term = term' node
and term' =
-| Let of patt * term * term
-| Match of term * case list
+| Let of definer array * term
+| Match of term * case array
| Annot of term * term
-| App of term * term list
-| Fn of patt list * term
-| FnType of annot list * term
-| Data of ident * term list
-| DataType of annot list IdentMap.t
+| App of term * term array
+| Fn of param array * term
+| FnType of domain array * term
+| Data of Tag.t * term array
+| DataType of domain array Tag.Map.t
| Nat of Z.t
-| Var of ident * int
-(*
-| Access of term * ident
-| Index of term * term
-| Deref of term
-*)
+| Var of Name.t * int
+| Type (* Impossible to write in concrete syntax. Arises during desugaring. *)
+
+and definer = definer' node
+and definer' =
+| Definer of patt * term
+| RecDefiner of patt * term
-and case = case' Node.t
+and case = case' node
and case' = Case of patt * term
-and patt = patt' Node.t
+and 'a binder = 'a binder' node
+and 'a binder' = Binder of Plicity.t * patt * 'a
+and param = unit binder
+and domain = term binder
+
+and patt = patt' node
and patt' =
| PWild
-| PBind of ident
+| PBind of Name.t
| PAnnot of patt * term
-| PData of ident * patt list
-(*
-| PNat of Z.t
-| PAnd of patt * patt
-| POr of patt * patt
-| PAssign of term
-*)
+| PData of Tag.t * patt array
-and annot = annot' Node.t
-and annot' =
-| AType of term
-| APatt of patt * term
+let param_annot {data = Binder (_, _, ()); _} = None
+let domain_annot {data = Binder (_, _, (a : term)); _} = Some a
diff --git a/cnctoabs.ml b/cnctoabs.ml
@@ -1,89 +0,0 @@
-(* TODO: rename this module to "desugar" *)
-
-open Common
-module C = Concrete
-module A = Abstract
-
-exception Error of Loc.t * string
-
-let error loc msg = raise (Error (loc, msg))
-
-let rec term = function
-| Node.Of (_, C.Paren t) -> term t
-| n -> Node.map term' n
-and term' = function
-| C.Let (p, scrutinee, body) -> A.Let (patt p, term scrutinee, term body)
-(* TODO: Maybe Seq should desugar to a unit pattern instead, so that
- * non-unit results can't be ignored. *)
-| C.Seq (t, u) -> A.Let (Loc.None @$ A.PWild, term t, term u)
-| C.Match (scrutinee, cs) -> A.Match (term scrutinee, List.map case cs)
-| C.If (cond, t, f) ->
- A.Match (term cond, [
- Loc.None @$ A.Case (Loc.None @$ A.PData ("true", []), term t);
- Loc.None @$ A.Case (Loc.None @$ A.PWild, term f);
- ])
-| C.Iff (cond, t) ->
- A.Match (term cond, [
- Loc.None @$ A.Case (Loc.None @$ A.PData ("true", []), term t);
- Loc.None @$ A.Case (Loc.None @$ A.PWild, Loc.None @$ A.Data ("", []));
- ])
-| C.Annot (t, u) -> A.Annot (term t, term u)
-(* TODO: Idea: Apply variables in abstract syntax to have a level instead of
- * an index (encoded as a negative index) and desugar binary operators to
- * level 0 (i.e., index -1), so that they always refer to the top-level/
- * built-in operations. *)
-| C.Add (t, u) -> A.App (Loc.None @$ A.Var ("`+", 0), [term t; term u])
-| C.Mul (t, u) -> A.App (Loc.None @$ A.Var ("`*", 0), [term t; term u])
-| C.Or (t, u) -> A.App (Loc.None @$ A.Var ("`|", 0), [term t; term u])
-| C.And (t, u) -> A.App (Loc.None @$ A.Var ("`&", 0), [term t; term u])
-| C.App (f, args) -> A.App (term f, List.map term args)
-| C.Fn (ps, body) -> A.Fn (List.map patt ps, term body)
-| C.FnType (anns, cod) -> A.FnType (List.map annot anns, term cod)
-| C.Variant (tag, ts) -> A.Data (tag, List.map term ts)
-| C.VariantType ctors ->
- let rec go m = function
- | Node.Of (loc, C.Ctor (tag, anns)) :: ctors ->
- if IdentMap.mem tag m then
- error loc "duplicate constructor"
- else
- go (IdentMap.add tag (List.map annot anns) m) ctors
- | [] -> m in
- A.DataType (go IdentMap.empty ctors)
-| C.Tuple ts -> A.Data ("", List.map term ts)
-| C.TupleType anns ->
- A.DataType (IdentMap.singleton "" (List.map annot anns))
-| C.Record _ -> failwith "no impl: records"
-| C.RecordType _ -> failwith "no impl: record types"
-| C.Unit -> A.Data ("", [])
-| C.Nat n -> A.Nat n
-| C.Underscore -> failwith "no impl: holes"
-| C.IdentIndex (x, i) -> A.Var (x, i)
-| C.Ident x -> A.Var (x, 0)
-| C.Paren _ -> failwith "impossible"
-
-and case c = Node.map case' c
-and case' (C.Case (p, t)) = A.Case (patt p, term t)
-
-and patt = function
-| Node.Of (_, C.PParen p) -> patt p
-| n -> Node.map patt' n
-and patt' = function
-| C.PWild -> A.PWild
-| C.PBind x -> A.PBind x
-| C.PAnnot (p, t) -> A.PAnnot (patt p, term t)
-| C.PVariant (tag, ps) -> A.PData (tag, List.map patt ps)
-| C.PTuple ps -> A.PData ("", List.map patt ps)
-| C.PRecord pfs -> failwith "no impl: record patterns"
-| C.PUnit -> A.PData ("", [])
-| C.PNat _ -> failwith "no impl: nat patterns"
-| C.PAnd _ -> failwith "no impl: and patterns"
-| C.POr _ -> failwith "no impl: or patterns"
-| C.PParen _ -> failwith "impossible"
-
-and annot = function
-| Node.Of (_, C.AParen a) -> annot a
-| n -> Node.map annot' n
-and annot' = function
-| C.AType t -> A.AType (term t)
-| C.APatt (p, t) -> A.APatt (patt p, term t)
-| C.AParen _ -> failwith "impossible"
diff --git a/common.ml b/common.ml
@@ -1,29 +0,0 @@
-type name = Name of string
-
-module NameMap = Map.Make(struct
- type t = name
- let compare (Name x) (Name y) = String.compare x y
-end)
-
-type tag = Tag of string
-
-module TagMap = Map.Make(struct
- type t = tag
- let compare (Tag x) (Tag y) = String.compare x y
-end)
-
-type index = Index of int
-
-type plicity =
-| Implicit
-| Explicit
-
-let impossible () = failwith "impossible"
-
-(* Short-circuiting AND fold_left. *)
-let rec all2 f xs ys = match xs, ys with
-| x :: xs, y :: ys -> f x y && all2 f xs ys
-| [], [] -> true
-| _, _ -> invalid_arg "all2: lists have different length"
-
-let (@$) loc x = Node.Of (loc, x)
diff --git a/concrete.ml b/concrete.ml
@@ -1,54 +1,54 @@
-open Common
+open Global
-type term = term' Node.t
+type term = term' node
and term' =
-| Let of defn list * term
+| Let of definer array * term
| Seq of term * term
-| Match of term * case list
+| Match of term * case array
| If of term * term * term
| Iff of term * term
| Annot of term * term
| Add of term * term
| Mul of term * term
-| App of term * term list
-| Fn of param list * term
-| FnType of domain list * term
-| Variant of tag * term list
-| VariantType of ctor list
-| Tuple of term list
-| TupleType of domain list
+| App of term * term array
+| Fn of param array * term
+| FnType of domain array * term
+| Variant of Tag.t * term array
+| VariantType of ctor array
+| Tuple of term array
+| TupleType of domain array
| Unit
| Nat of Z.t
-| Var of name * int option
+| Var of Name.t * int option
| Paren of term
-and defn = defn' Node.t
-and defn' =
-| Defn of patt * term
-| RecDefn of patt * term
+and definer = definer' node
+and definer' =
+| Definer of patt * term
+| RecDefiner of patt * term
-and case = case' Node.t
+and case = case' node
and case' = Case of patt * term
-and param = param' Node.t
-and param' = Param of plicity * patt
+and param = param' node
+and param' = Param of Plicity.t * patt
-and domain = domain' Node.t
+and domain = domain' node
and domain' =
| DExplicit of term
| DImplicitType of patt
-| DAnnot of plicity * patt * term
+| DAnnot of Plicity.t * patt * term
-and ctor = ctor' Node.t
-and ctor' = Ctor of tag * domain list
+and ctor = ctor' node
+and ctor' = Ctor of Tag.t * domain array
-and patt = patt' Node.t
+and patt = patt' node
and patt' =
| PWild
-| PBind of name
+| PBind of Name.t
| PAnnot of patt * term
-| PVariant of tag * patt list
-| PTuple of patt list
+| PVariant of Tag.t * patt array
+| PTuple of patt array
| PUnit
| PNat of Z.t
| PAnd of patt * patt
diff --git a/ctx.ml b/ctx.ml
@@ -0,0 +1,95 @@
+open Global
+
+(* TODO: Rename to bindee. *)
+type 'a prebinding = {
+ names: Name.t array;
+ vtyp: 'a;
+}
+
+(* TODO: Rename to binding. *)
+type 'a postbinding = {
+ var: Uniq.t;
+ mutable vtyp: 'a;
+ mutable active: bool;
+}
+
+(* We decouple names in the surface language from variables in the core
+ * language/semantic domain. This means we can have multiple surface names
+ * that refer to the same variable (which happens with AND patterns), or we
+ * can have no surface names that refer to a variable.
+ *
+ * Since we have multi-binders (n-ary lambdas and mutually recursive lets), we
+ * use pairs of de Bruijn indices in core syntax. The first index refers to the
+ * multi-binder, and the second index refers to the binding in that
+ * multi-binder. We have
+ * index_to_uniq: Uniq.t array Index.Map.t
+ * instead of
+ * index_to_uniq: Uniq.t array <some efficient but mutable stack type>
+ * because index_to_uniq gets saved in closures, so it must not be mutable. *)
+type 'a context = {
+ names: 'a postbinding tsil Name.Map.t; (* XXX: tsil! Yuck! *)
+ indices: Index.UniqTsil.t;
+ values: 'a Uniq.Map.t;
+}
+
+let empty = {
+ names = Name.Map.empty;
+ indices = Index.UniqTsil.lin;
+ values = Uniq.Map.empty;
+}
+
+let to_env {indices; values; _} = {Env.indices; Env.values}
+
+let extend ctx vars = {ctx with indices = Index.UniqTsil.snoc ctx.indices vars}
+
+let extend_fresh ctx n =
+ let vars = Uniq.fresh_array n in
+ (extend ctx vars, vars)
+
+(* TODO: Somewhere we need to forbid multi-binders and multi-definers from
+ * declaring the same name twice. *)
+
+let bind_one ctx {names; vtyp} var =
+ let postb = {var; vtyp; active = true} in
+ let ctx = ref ctx in
+ for i = 0 to Array.length names - 1 do
+ let shadow = function
+ | None -> Some (Snoc (Lin, postb))
+ | Some l -> Some (Snoc (l, postb)) in
+ ctx := {!ctx with names = Name.Map.update names.(i) shadow !ctx.names}
+ done;
+ (!ctx, postb)
+
+(* TODO: Rename to extend_fresh_and_bind_blah? That's pretty long. Pick a
+ * more reasonable naming scheme. *)
+
+let extend_and_bind_one ctx preb =
+ let (ctx, vars) = extend_fresh ctx 1 in
+ let (ctx, postb) = bind_one ctx preb vars.(0) in
+ (ctx, postb)
+
+let extend_and_bind_all ctx prebs =
+ let n = Array.length prebs in
+ let postbs = Array.make n (Obj.magic 0) in
+ let (ctx, vars) = extend_fresh ctx n in
+ let ctx = ref ctx in
+ for i = 0 to n - 1 do
+ let (ctx', postb) = bind_one !ctx prebs.(i) vars.(i) in
+ ctx := ctx';
+ postbs.(i) <- postb
+ done;
+ (!ctx, postbs)
+
+let define ctx var value =
+ {ctx with values = Uniq.Map.add var value ctx.values}
+
+let lookup ctx x i =
+ match Name.Map.find_opt x ctx.names with
+ | Some l ->
+ let rec go i l = match i, l with
+ | 0, Snoc (_, ({active = true; _} as preb)) -> Some preb
+ | i, Snoc (l, {active = true; _}) -> go (i + 1) l
+ | i, Snoc (l, {active = false; _}) -> go i l
+ | _, Lin -> None in
+ go i l
+ | None -> None
diff --git a/desugar.ml b/desugar.ml
@@ -0,0 +1,88 @@
+open Global
+module C = Concrete
+module A = Abstract
+
+exception Error of Loc.t * string
+let error loc msg = raise (Error (loc, msg))
+
+let rec term = function
+(* Handle parens specially, so that we use the inner loc. *)
+| {data = C.Paren t; _} -> term t
+| n -> Node.map term' n
+and term' = function
+| C.Let (definers, body) -> A.Let (Array.map definer definers, term body)
+(* TODO: Maybe Seq should desugar to a unit pattern instead, so that
+ * non-unit results can't be ignored. *)
+| C.Seq (t, u) ->
+ A.Let ([|Loc.None @$ A.Definer (Loc.None @$ A.PWild, term t)|], term u)
+| C.Match (scrutinee, cases) -> A.Match (term scrutinee, Array.map case cases)
+| C.If (c, t, f) ->
+ A.Match (term c, [|
+ Loc.None @$ A.Case (Loc.None @$ A.PData (Tag.Of "true", [||]), term t);
+ Loc.None @$ A.Case (Loc.None @$ A.PWild, term f);
+ |])
+| C.Iff (c, t) ->
+ A.Match (term c, [|
+ Loc.None @$ A.Case (Loc.None @$ A.PData (Tag.Of "true", [||]), term t);
+ Loc.None @$ A.Case (Loc.None @$ A.PWild, Loc.None @$ A.Data (Tag.Of "", [||]));
+ |])
+| C.Annot (t, u) -> A.Annot (term t, term u)
+(* TODO: Idea: Modify variables in abstract syntax to have a level instead of
+ * an index (encoded as a negative index) and desugar binary operators to
+ * level 0 (i.e., index -1), so that they always refer to the top-level/
+ * built-in operations. *)
+| C.Add (t, u) -> A.App (Loc.None @$ A.Var (Name.Of "`+", 0), [|term t; term u|])
+| C.Mul (t, u) -> A.App (Loc.None @$ A.Var (Name.Of "`*", 0), [|term t; term u|])
+| C.App (f, args) -> A.App (term f, Array.map term args)
+| C.Fn (params, body) -> A.Fn (Array.map param params, term body)
+| C.FnType (doms, cod) -> A.FnType (Array.map domain doms, term cod)
+| C.Variant (tag, fields) -> A.Data (tag, Array.map term fields)
+| C.VariantType ctors ->
+ let insert m {loc; data = C.Ctor (tag, doms)} =
+ if Tag.Map.mem tag m then
+ error loc "duplicate constructor"
+ else
+ Tag.Map.add tag (Array.map domain doms) m in
+ let ctors = Array.fold_left insert Tag.Map.empty ctors in
+ A.DataType ctors
+| C.Tuple fields -> A.Data (Tag.Of "", Array.map term fields)
+| C.TupleType doms ->
+ A.DataType (Tag.Map.singleton (Tag.Of "") (Array.map domain doms))
+| C.Unit -> A.Data (Tag.Of "", [||])
+| C.Nat n -> A.Nat n
+| C.Var (x, Some i) -> A.Var (x, i)
+| C.Var (x, None) -> A.Var (x, 0)
+| C.Paren _ -> impossible ()
+
+and definer d = Node.map definer' d
+and definer' = function
+| C.Definer (p, t) -> A.Definer (patt p, term t)
+| C.RecDefiner (p, t) -> A.RecDefiner (patt p, term t)
+
+and case c = Node.map case' c
+and case' (C.Case (p, t)) = A.Case (patt p, term t)
+
+and param par = Node.map param' par
+and param' (C.Param (plicity, p)) = A.Binder (plicity, patt p, ())
+
+and domain dom = Node.map domain' dom
+and domain' = function
+| C.DExplicit t -> A.Binder (Plicity.Ex, Loc.None @$ A.PWild, term t)
+| C.DImplicitType p -> A.Binder (Plicity.Im, patt p, Loc.None @$ A.Type)
+| C.DAnnot (plicity, p, t) -> A.Binder (plicity, patt p, term t)
+
+and patt = function
+(* Handle parens specially, so that we use the inner loc. *)
+| {data = C.PParen p; _} -> patt p
+| n -> Node.map patt' n
+and patt' = function
+| C.PWild -> A.PWild
+| C.PBind x -> A.PBind x
+| C.PAnnot (p, t) -> A.PAnnot (patt p, term t)
+| C.PVariant (tag, ps) -> A.PData (tag, Array.map patt ps)
+| C.PTuple ps -> A.PData (Tag.Of "", Array.map patt ps)
+| C.PUnit -> A.PData (Tag.Of "", [||])
+| C.PNat _ -> failwith "no impl: nat patterns"
+| C.PAnd _ -> failwith "no impl: and patterns"
+| C.POr _ -> failwith "no impl: or patterns"
+| C.PParen _ -> impossible ()
diff --git a/elab.ml b/elab.ml
@@ -1,121 +1,132 @@
-open Common
+open Global
open Eval
module A = Abstract
module I = Internal
module V = Value
-exception Error of Loc.t * string
-
-let error loc msg = raise (Error (loc, msg))
-
-(* TODO: use a hashtable or map for ident -> level lookups *)
-type context = {
- len: int;
- names: ident option list;
- values: V.value list;
- types: V.value list;
-}
-
-(* Add `x := v : a` to ctx. *)
-let bind_def ctx x v a = {
- len = 1 + ctx.len;
- names = x :: ctx.names;
- values = v :: ctx.values;
- types = a :: ctx.types;
-}
-
-(* Add `x : a` to ctx. *)
-let bind_arb ctx x a = bind_def ctx x (V.Arb ctx.len) a
-
-let default_ctx =
- let nat_op_type = V.FnType (
- [],
- 2,
- [I.Intrin I.NatType; I.Intrin I.NatType],
- I.Intrin I.NatType
- ) in
- let ctx = {len = 0; names = []; values = []; types = []} in
- let ctx = bind_def ctx (Some "Type") (V.Intrin I.Type) (V.Intrin I.Type) in
- let ctx = bind_def ctx (Some "Nat") (V.Intrin I.NatType) (V.Intrin I.Type) in
- let ctx = bind_def ctx (Some "`+") (V.Intrin I.NatOpAdd) nat_op_type in
- let ctx = bind_def ctx (Some "`*") (V.Intrin I.NatOpAdd) nat_op_type in
- ctx
-
-let rec infer ctx (Node.Of (loc, t)) = match t with
-| A.Let (p, scrutinee, body) ->
- let (x, scrutinee, va) =
- match infer_patt ctx p with
- | Some (x, va) ->
- let scrutinee = check ctx scrutinee va in
- (x, scrutinee, va)
- | None ->
- let (scrutinee, va) = infer ctx scrutinee in
- let x = check_patt ctx p va in
- (x, scrutinee, va) in
- let vscrutinee = eval ctx.values scrutinee in
- let (body, vb) = infer (bind_def ctx x vscrutinee va) body in
- (I.Let (scrutinee, body), vb)
-| A.Match _ -> failwith "no impl: match"
+(*module R = Porig.NoneOneTons*)
+
+type error = error' node
+and error' =
+| ErrNonInferablePatt of A.patt
+| ErrCantInferData
+| ErrNoLetAnnot of error
+| ErrNoBinderAnnot of error
+| ErrUnannotImplicitIsType of error
+| ErrApplyNonFunction of V.value
+| ErrUnifyFail of V.value * V.value
+| ErrDataPattRefutable
+| ErrBinderCycle of Name.t list
+| ErrAppArityMismatch
+| ErrUnboundVar of Name.t * int
+| ErrExpectedFnType of V.value
+| ErrExpectedDataType of V.value
+| ErrFnArityMismatch
+| ErrFnPlicityMismatch
+
+exception Exn of error
+
+let abort e = raise (Exn e)
+
+exception BlockedOnFuture of Uniq.t * int * Name.t
+
+(* Convention: An elaboration-related function can have two versions, one whose
+ * name ends with a prime, which can throw an Exn, and one whose name does not
+ * end with a prime, which returns in the result monad.
+ * TODO: Make sure every function has both versions, even if unused? *)
+
+let rec infer ctx t =
+ try Ok (infer' ctx t) with
+ | Exn e -> Error e
+and infer' ctx {loc; data = t} = match t with
+| A.Let (definers, body) ->
+ let (ctx, definers) = infer_definers' ctx definers in
+ let (body, va) = infer' ctx body in
+ (I.Let (definers, body), va)
+
+| A.Match _ -> no_impl "infer match"
+
| A.Annot (t, a) ->
- let a = check ctx a (V.Intrin I.Type) in
- let va = eval ctx.values a in
- let t = check ctx t va in
+ let a = check' ctx a V.Type in
+ let va = eval (Ctx.to_env ctx) a in
+ let t = check' ctx t va in
(t, va)
-| A.App (f, args) ->
- let (f, fty) = infer ctx f in
- begin match fty with
- | V.FnType (env, arity, doms, cod) ->
- if List.length args <> arity then
- error loc "arity mismatch in application"
- else
- let rec go env args doms = match args, doms with
- | arg :: args, dom :: doms ->
- let vdom = eval env dom in
- let arg = check ctx arg vdom in
- let varg = eval ctx.values arg in
- let (args, vcod) = go (varg :: env) args doms in
- (arg :: args, vcod)
- | [], [] -> ([], eval env cod)
- | _, _ -> failwith "impossible" in
- let (args, vcod) = go env args doms in
- (I.App (f, args), vcod)
- | _ -> error loc "expected function in application"
+
+| A.App (fn, explicit_args) ->
+ let explicit_count = Array.length explicit_args in
+
+ let (fn, va) = infer' ctx fn in
+ begin match force va with
+ | V.FnType (V.Closure (cloenv, {sorted_binders; codomain})) ->
+ let I.SortedBinders (binders, order) = sorted_binders in
+ let arity = Array.length binders in
+ let implicit_count = ref 0 in
+
+ (* Elaborate explicits to Some and implicits to None. *)
+ let args = Array.make arity (Obj.magic 0) in
+ for i = 0 to arity - 1 do
+ (* TODO: Once we allow passing an implicit argument explicitly,
+ * this logic will need to change a bit. *)
+ args.(i) <- match binders.(i) with
+ | I.Binder (Plicity.Ex, _) ->
+ let j = i - !implicit_count in
+ begin if explicit_count <= j then
+ abort (loc @$ ErrAppArityMismatch)
+ end;
+ Some (explicit_args.(j))
+ | I.Binder (Plicity.Im, _) ->
+ incr implicit_count;
+ None
+ done;
+
+ (* We had enough explicit args, but do we have too many? *)
+ begin if explicit_count > arity - !implicit_count then
+ abort (loc @$ ErrAppArityMismatch)
+ end;
+
+ (* Check arguments. *)
+ let args' = Array.make arity (Obj.magic 0) in
+ let (cloenv, vars) = Env.extend_fresh cloenv arity in
+ let cloenv = ref cloenv in
+ for i = 0 to arity - 1 do
+ let j = order.(i) in
+ let I.Binder (_, typ) = binders.(j) in
+ let vtyp = eval !cloenv typ in
+ let (arg, varg) = match args.(j) with
+ | Some arg ->
+ let arg = check' ctx arg vtyp in
+ let varg = eval (Ctx.to_env ctx) arg in
+ (arg, varg)
+ | None -> no_impl "implict args" in
+ args'.(j) <- arg;
+ cloenv := Env.define !cloenv vars.(j) varg
+ done;
+
+ let vcodomain = eval !cloenv codomain in
+ (I.App (fn, args'), vcodomain)
+ | va -> abort (loc @$ ErrApplyNonFunction va)
end
-| A.Fn (ps, body) ->
- let rec go ctx = function
- | Node.Of (_, A.PAnnot (p, dom)) :: ps ->
- let dom = check ctx dom (V.Intrin I.Type) in
- let vdom = eval ctx.values dom in
- let x = check_patt ctx p vdom in
- let (body, doms, vcod) = go (bind_arb ctx x vdom) ps in
- (body, dom :: doms, vcod)
- | Node.Of (loc, _) :: _ ->
- error loc "can not infer type for unannotated function parameter"
- | [] ->
- let (body, vcod) = infer ctx body in
- (body, [], vcod) in
- let (body, doms, vcod) = go ctx ps in
- let arity = List.length doms in
- let cod = quote (ctx.len + arity) vcod in
- (I.Fn (arity, body), V.FnType (ctx.values, arity, doms, cod))
-| A.FnType (anns, cod) ->
- let arity = List.length anns in
- let rec go ctx = function
- | Node.Of (_, ann) :: anns ->
- let (p, dom) = match ann with
- | A.AType dom -> (Loc.None @$ A.PWild, dom)
- | A.APatt (p, dom) -> (p, dom) in
- let dom = check ctx dom (V.Intrin I.Type) in
- let vdom = eval ctx.values dom in
- let x = check_patt ctx p vdom in
- let (doms, cod) = go (bind_arb ctx x vdom) anns in
- (dom :: doms, cod)
- | [] -> ([], check ctx cod (V.Intrin I.Type)) in
- let (doms, cod) = go ctx anns in
- (I.FnType (arity, doms, cod), V.Intrin I.Type)
-| A.Data _ ->
- error loc "can not infer type for data constructor"
-| A.DataType ctors ->
+
+| A.Fn (params, body) ->
+ no_impl "ahh"
+ (*
+ let (ctx', sorted_binders) = infer_binders' ctx loc A.param_annot params in
+ let (body, vcodomain) = infer' ctx' body in
+ let codomain = quote (Ctx.to_env ctx') vcodomain in
+ let fn = I.Fn {sorted_binders; body; codomain} in
+ let fntype = V.FnType (V.Closure (Ctx.to_env ctx, {sorted_binders; codomain})) in
+ (fn, fntype)
+ *)
+
+| A.FnType (domains, codomain) ->
+ let (ctx, sorted_binders) = infer_binders' ctx loc A.domain_annot domains in
+ let codomain = check' ctx codomain V.Type in
+ (I.FnType {sorted_binders; codomain}, V.Type)
+
+| A.Data _ -> abort (loc @$ ErrCantInferData)
+
+| A.DataType ctors -> no_impl "infer datatype"
+ (*
let rec go ctx = function
| Node.Of (_, ann) :: anns ->
let (p, a) = match ann with
@@ -127,56 +138,80 @@ let rec infer ctx (Node.Of (loc, t)) = match t with
a :: go (bind_arb ctx x va) anns
| [] -> [] in
(I.DataType (IdentMap.map (go ctx) ctors), V.Intrin I.Type)
-| A.Nat n -> (I.Nat n, V.Intrin I.NatType)
+ *)
+
+| A.Nat n -> (I.Nat n, V.Builtin I.NatType)
+
| A.Var (x, i) ->
- let rec go j i' ys ts = match i', ys, ts with
- | 0, Some y :: ys, t :: ts ->
- if x = y then (I.Var j, t) else go (j + 1) 0 ys ts
- | i', Some y :: ys, t :: ts ->
- go (j + 1) (if x = y then i' - 1 else i') ys ts
- | i', None :: ys, t :: ts ->
- go (j + 1) i' ys ts
- | _, [], [] ->
- (* TODO: We should print x ^ "#0" iff the programmer wrote x ^ "#0". *)
- let x = if i = 0 then x else x ^ "#" ^ string_of_int i in
- error loc ("unbound identifier: " ^ x)
- | _, _, _ -> failwith "impossible" in
- go 0 i ctx.names ctx.types
-
-and check ctx (Node.Of (loc, t') as t) va = match t', va with
-(* TODO: Match *)
-| A.Let (p, scrutinee, body), vb ->
- let (x, scrutinee, va) =
- match infer_patt ctx p with
- | Some (x, va) ->
- let scrutinee = check ctx scrutinee va in
- (x, scrutinee, va)
- | None ->
- let (scrutinee, va) = infer ctx scrutinee in
- let x = check_patt ctx p va in
- (x, scrutinee, va) in
- let vscrutinee = eval ctx.values scrutinee in
- let body = check (bind_def ctx x vscrutinee va) body vb in
- I.Let (scrutinee, body)
-| A.Fn (ps, body), V.FnType (env, arity, doms, cod) ->
- if List.length ps <> arity then
- error loc "arity mismatch"
- else
- let rec go ctx ps env doms = match ps, doms with
- | p :: ps, dom :: doms ->
- let vdom = eval env dom in
- let x = check_patt ctx p vdom in
- let ctx = bind_arb ctx x vdom in
- let env = List.hd ctx.values :: env in
- go ctx ps env doms
- | [], [] ->
- let vcod = eval env cod in
- check ctx body vcod
- | _, _ -> failwith "impossible" in
- let cbody = go ctx ps env doms in
- I.Fn (arity, cbody)
-| A.Fn _, _ -> error loc "expected function type"
-| A.Data (tag, args), V.DataType (env, ctors) ->
+ begin match Ctx.lookup ctx x i with
+ | Some {vtyp = V.Future (id, j); _} -> raise (BlockedOnFuture (id, j, x))
+ | Some {var; vtyp; _} -> (quote (Ctx.to_env ctx) (V.Arb var), vtyp)
+ | None -> abort (loc @$ ErrUnboundVar (x, i))
+ end
+
+| A.Type -> (I.Type, V.Type)
+
+and check ctx t va =
+ try Ok (check' ctx t va) with
+ | Exn e -> Error e
+and check' ctx ({loc; data = t'} as t) va = match t', force va with
+| A.Let (definers, body), va ->
+ let (ctx, definers) = infer_definers' ctx definers in
+ let body = check' ctx body va in
+ I.Let (definers, body)
+
+| A.Match _, _ -> no_impl "check match"
+
+| A.Fn (params, body), V.FnType (V.Closure (cloenv, {sorted_binders; codomain})) ->
+ let I.SortedBinders (domains, order) = sorted_binders in
+
+ begin if Array.length params <> Array.length domains then
+ abort (loc @$ ErrFnArityMismatch)
+ end;
+ let arity = Array.length params in
+
+ (* TODO: Should the domain(s) of V.FnTypes (and similarly V.DataTypes) be
+ * stored already applied to variables? This would avoid the need to extend
+ * here (and elsewhere?), but it may require quoting elsewhere. *)
+
+ (* Weaken both ctx and cloenv with the same variables. *)
+ let vars = Uniq.fresh_array arity in
+ let ctx = Ctx.extend ctx vars in
+ let cloenv = Env.extend cloenv vars in
+
+ let ctx = ref ctx in
+ let params' = Array.make arity (Obj.magic 0) in
+ for i = 0 to arity - 1 do
+ let j = order.(i) in
+ let A.Binder (param_plicity, patt, ()) = params.(j).data in
+ let I.Binder (domain_plicity, domain) = domains.(j) in
+
+ begin if param_plicity <> domain_plicity then
+ abort (loc @$ ErrFnPlicityMismatch)
+ end;
+ let plicity = param_plicity in
+
+ let vdomain = eval cloenv domain in
+ let names = check_irrefutable_patt' !ctx patt vdomain in
+ params'.(j) <- I.Binder (plicity, quote (Ctx.to_env !ctx) vdomain);
+
+ (* Bind the names in ctx, since the other binders may refer to them. *)
+ let (ctx', _) = Ctx.bind_one !ctx {names; vtyp = vdomain} vars.(j) in
+ ctx := ctx';
+ done;
+ let vcodomain = eval cloenv codomain in
+ let body = check' !ctx body vcodomain in
+ I.Fn {
+ sorted_binders = I.SortedBinders (params', order);
+ body;
+ codomain = quote (Ctx.to_env !ctx) vcodomain;
+ }
+
+| A.Fn _, va -> abort (loc @$ ErrExpectedFnType va)
+
+| A.Data (tag, fields), V.DataType (V.Closure (cloenv, ctors)) ->
+ no_impl "check data"
+ (*
begin match IdentMap.find_opt tag ctors with
| Some bs ->
if List.length args <> List.length bs then
@@ -194,28 +229,297 @@ and check ctx (Node.Of (loc, t') as t) va = match t', va with
I.Data (tag, go args env bs)
| None -> error loc ("expected data type with constructor \"" ^ tag ^ "\"")
end
-| A.Data _, _ -> error loc "expected data type"
-(* TODO: use (A _ | B _ | C _) idiom for catch-all case so that we get an
- * exhaustiveness warning when we add new variants *)
-| _, vexpected ->
- let (t, vinferred) = infer ctx t in
- if conv ctx.len vexpected vinferred then
+ *)
+
+| A.Data _, va -> abort (loc @$ ErrExpectedDataType va)
+
+| (A.Annot _ | A.App _ | A.FnType _ | A.DataType _ | A.Nat _ | A.Var _ | A.Type), vexpected ->
+ let (t, vinferred) = infer' ctx t in
+ if conv (Ctx.to_env ctx) vexpected vinferred then
t
else
- (* TODO: pretty-print types *)
- error loc "expected/inferred mismatch"
+ abort (loc @$ ErrUnifyFail (vexpected, vinferred))
+
+(* TODO: We need to check for circularity. Idea for algorithm: Have elaboration
+ * routines track not only variable usage, but also the least "delay"
+ * asssociated with each variable usage. E.g., a variable x uses x with delay 0
+ * (it's used immediately), a function uses the variables used in its body with
+ * their delay + 1, and a β-redex uses the variables in the lambda with their
+ * delay - 1. *)
+and infer_definers' ctx = function
+| [||] -> impossible ()
+(* Single non-recursive definer. These are handled specially, because
+ * the type annotation on the definiendum is optional. *)
+| [|{loc = definer_loc; data = A.Definer (patt, t)}|] ->
+ let (ctx, vars) = Ctx.extend_fresh ctx 1 in
+ let (xs, t, va) =
+ (* First try to infer from definiendum (and check definiens). *)
+ match infer_irrefutable_patt ctx patt with
+ | Ok {Ctx.names; Ctx.vtyp} ->
+ let t = check' ctx t vtyp in
+ (names, t, vtyp)
+ | Error {data = ErrNonInferablePatt _; _} ->
+ (* Next try to infer from definiens (and check definiendum). *)
+ let (t, va) = infer' ctx t in
+ let xs = check_irrefutable_patt' ctx patt va in
+ (xs, t, va)
+ | Error e -> abort e in
+ let (ctx, postb) = Ctx.bind_one ctx {names = xs; vtyp = va} vars.(0) in
+ let vt = lazy_eval (Ctx.to_env ctx) t in
+ let ctx = Ctx.define ctx vars.(0) vt in
+ (ctx, [|I.Definer t|])
+(* Single recursive definer or many mutually-recursive definers. In either
+ * case, a type annotation on the definiendum is mandatory. Probably, we could
+ * use meta-variables/unification to permit omitting annotations in some cases
+ * (e.g., like OCaml does with let rec ... and ...), but we generally try to
+ * avoid meta-variables/unification, because they're unpredictable; inference
+ * should only rely on local information. *)
+| definers ->
+ let ndefiners = Array.length definers in
+ let bindings = Array.map (infer_definiendum' ctx) definers in
+ let (ctx, postbs) = Ctx.extend_and_bind_all ctx bindings in
+ let definers' = Array.make ndefiners (Obj.magic 0) in
+ let ctx' = ref ctx in
+ for i = 0 to ndefiners - 1 do
+ let {Ctx.vtyp; _} : V.value Ctx.prebinding = bindings.(i) in
+ let t = match definers.(i).data with
+ | A.Definer (_, t) ->
+ (* The definiendum of non-recursive definers is not in scope when
+ * checking the definiens, so mark the definiendum as inactive.
+ * XXX: Using references like this is efficient, but ugly. *)
+ postbs.(i).active <- false;
+ let t = check' ctx t vtyp in
+ postbs.(i).active <- true;
+ definers'.(i) <- I.Definer t;
+ t
+ | A.RecDefiner (_, t) ->
+ let t = check' ctx t vtyp in
+ definers'.(i) <- I.Definer t;
+ t in
+ (* Lazily evaluate each definiens in the old context ctx, and
+ * accumulate a new context !ctx' with the definitions. *)
+ let vt = lazy_eval (Ctx.to_env ctx) t in
+ ctx' := Ctx.define !ctx' postbs.(i).var vt
+ done;
+ (!ctx', definers')
+
+and infer_definiendum' ctx {loc; data = A.Definer (p, _) | A.RecDefiner (p, _)} =
+ match infer_irrefutable_patt ctx p with
+ | Ok binding -> binding
+ | Error ({data = ErrNonInferablePatt _; _} as root) ->
+ abort (loc @$ ErrNoLetAnnot root)
+ | Error e -> abort e
+
+and (infer_binders' :
+ V.value Ctx.context
+ -> Loc.t
+ -> ('a A.binder -> A.term option)
+ -> 'a A.binder array
+ -> V.value Ctx.context * I.sorted_binders)
+= fun ctx loc annot binders ->
+ let arity = Array.length binders in
+
+ (* We associate two different orders on the variables of a multibinder.
+ * The *type dependency ordering* is the order where x <T y iff the type of
+ * y depends on x. The *representation dependency ordering* is the order
+ * where x <R y iff the representation of y depends on x. Note that x <R y
+ * implies x <T y, but these two orders needn't coincide; for instance,
+ * (A: Type) <T (p: Ptr A), but not A <R p. We type check multibinders
+ * along a topological sorting of the variables with respect to the type
+ * dependency order. There may be multiple such sortings, but the
+ * programmer has no reason to care which one we choose. On the other hand,
+ * the representation dependency order comes in to play when laying out
+ * arguments of a function or fields of a data structure in memory.
+ * Specifically, arguments/fields must be topologically sorted in memory
+ * with respect to the representation dependency order. Again, there may
+ * be multiple such sortings, but this time, the programmer might actually
+ * care which one we use. For maximum flexibility, we let the source code
+ * ordering of arguments/fields determine which topological sorting of <R
+ * we use for memory layout, and we permit this sorting to NOT be a
+ * topological sorting of <T, in contrast to existing implementations of
+ * dependent types. (In practice, it is desirable to have the source code
+ * order of arguments/fields be completely irrelevant by default, and have
+ * the compiler automatically pick a reasonable memory layout (e.g., a
+ * memory layout that minimizes padding). However, it is crucial for
+ * low-level applications (e.g., modelling network packets) that there is
+ * some way to request a particular memory layout.) This flexibility leads
+ * to some difficulties here in the implementation, because we can't know
+ * <T until we try type-checking each binder. To solve the problem, we
+ * introduce a new kind of value, namely a Future, which represents the
+ * type of a binder that we haven't type-checked yet. Whenever a future is
+ * encountered, we raise the BlockedOnFuture exception, and indictate which
+ * (multibinder, binder) pair we got stuck on. This exception is caught by
+ * the associated multibinder, which updates a graph-like data structure
+ * that tracks dependencies between binders. Every time a binder
+ * successfully type-checks, we retry type checking each of its dependents.
+ * This means that, after trying to type-check each binder at least once,
+ * we're guaranteed to have found a topological sorting of <T if there is
+ * one, and otherwise there is a cycle, which we report as an error. *)
+
+ (* We generate a globally unique id for this multibinder so that we can
+ * determine which BlockedOnFuture exceptions belong to us (see below). *)
+ let id = Uniq.fresh () in
+
+ (* Add future bindings to ctx. *)
+ let future_preb i {data = A.Binder (_, p, _); _} =
+ {Ctx.names = irrefutable_patt_names p; Ctx.vtyp = V.Future (id, i)} in
+ let prebs = Array.mapi future_preb binders in
+ let (ctx, postbs) = Ctx.extend_and_bind_all ctx prebs in
+
+ (* The data structure used to track binder states. *)
+ let open struct
+ module IntSet = Set.Make(Int)
+
+ type state =
+ | Pending of {
+ dependency: (int * Name.t) option;
+ dependents: IntSet.t;
+ }
+ | Typed of int
+
+ let start_state = Pending {
+ dependency = None;
+ dependents = IntSet.empty;
+ }
+
+ let find_cycle states start =
+ let rec search cycle i = match states.(i) with
+ | Pending {dependency = Some (j, x); _} ->
+ if i = j then
+ x :: cycle
+ else
+ search (x :: cycle) j
+ | Pending {dependency = None; _} -> impossible ()
+ | Typed _ -> impossible () in
+ search [] start
+ end in
+ let states = Array.make arity start_state in
+
+ (* Next index in the topological sorting of <T. *)
+ let next = ref 0 in
+
+ let rec try_binder i =
+ let dependents = match states.(i) with
+ | Pending {dependency = None; dependents} -> dependents
+ | Pending {dependency = Some _; _} -> impossible ()
+ | Typed _ -> impossible () in
+ match infer_binder' ctx annot binders.(i) with
+ | va ->
+ postbs.(i).vtyp <- va;
+ states.(i) <- Typed !next;
+ incr next;
+ IntSet.iter remove_dependency dependents
+ | exception BlockedOnFuture (id', j, x) when id' = id ->
+ states.(i) <- Pending {dependency = Some (j, x); dependents};
+ states.(j) <- match states.(j) with
+ | Pending r -> Pending {r with dependents = IntSet.add i r.dependents}
+ | Typed _ -> impossible ()
+ and remove_dependency j =
+ let dependents = match states.(j) with
+ | Pending {dependency = Some _; dependents} -> dependents
+ | Pending {dependency = None; _} -> impossible ()
+ | Typed _ -> impossible () in
+ states.(j) <- Pending {dependency = None; dependents};
+ try_binder j in
+
+ for i = 0 to arity - 1 do try_binder i done;
+
+ (* Put the binders in the topologically sorted (wrt <T) order that we just
+ * found, but also keep track of the original order, because we need it for
+ * code generation. Abort if a cycle is discovered. *)
+ let binders' = Array.make arity (Obj.magic 0) in
+ let order = Array.make arity (-1) in
+ for i = 0 to arity - 1 do
+ match states.(i) with
+ | Typed j ->
+ let A.Binder (plicity, _, _) = binders.(i).data in
+ let typ = quote (Ctx.to_env ctx) postbs.(i).vtyp in
+ binders'.(i) <- I.Binder (plicity, typ);
+ order.(j) <- i
+ | Pending _ ->
+ abort (loc @$ ErrBinderCycle (find_cycle states i))
+ done;
+ (ctx, I.SortedBinders (binders', order))
+
+and infer_binder' ctx annot binder = match annot binder with
+| Some a ->
+ let {data = A.Binder (_, patt, _); _} = binder in
+ let a = check' ctx a V.Type in
+ let va = eval (Ctx.to_env ctx) a in
+ let _ = check_irrefutable_patt' ctx patt va in
+ va
+| None ->
+ let {data = A.Binder (plicity, patt, _); loc} = binder in
+ match infer_irrefutable_patt ctx patt, plicity with
+ | Ok {Ctx.vtyp = va; _}, _ -> va
+ | Error {data = ErrNonInferablePatt _; _}, Plicity.Im ->
+ begin match check_irrefutable_patt ctx patt V.Type with
+ | Ok _ -> V.Type
+ | Error e -> abort (loc @$ ErrUnannotImplicitIsType e)
+ end
+ | Error ({data = ErrNonInferablePatt _; _} as root), Plicity.Ex ->
+ abort (loc @$ ErrNoBinderAnnot root)
+ | Error e, _ -> abort e
+
+(* TODO: Should this really return a binding? *)
+and infer_irrefutable_patt ctx p =
+ try Ok (infer_irrefutable_patt' ctx p) with
+ | Exn e -> Error e
+and infer_irrefutable_patt' ctx ({loc; data = p} as n) = match p with
+| A.PWild -> abort (loc @$ ErrNonInferablePatt n)
+| A.PBind _ -> abort (loc @$ ErrNonInferablePatt n)
+| A.PAnnot (p, a) ->
+ let a = check' ctx a V.Type in
+ let vtyp = eval (Ctx.to_env ctx) a in
+ let names = check_irrefutable_patt' ctx p vtyp in
+ {names; vtyp}
+| A.PData _ -> abort (loc @$ ErrDataPattRefutable)
+
+and check_irrefutable_patt ctx p va =
+ try Ok (check_irrefutable_patt' ctx p va) with
+ | Exn e -> Error e
+and check_irrefutable_patt' ctx {loc; data = p} va = match p with
+| A.PWild -> [||]
+| A.PBind x -> [|x|]
+| A.PAnnot (p, b) ->
+ let b = check' ctx b V.Type in
+ let vb = eval (Ctx.to_env ctx) b in
+ if conv (Ctx.to_env ctx) va vb then
+ check_irrefutable_patt' ctx p va
+ else
+ abort (loc @$ ErrUnifyFail (va, vb))
+| A.PData _ -> abort (loc @$ ErrDataPattRefutable)
+
+and irrefutable_patt_names {loc; data = p} = match p with
+| A.PWild -> [||]
+| A.PBind x -> [|x|]
+| A.PAnnot (p, _) -> irrefutable_patt_names p
+| A.PData _ -> abort (loc @$ ErrDataPattRefutable)
+
+
+
+
+
+
+
+
+
+
+
+
(* Currently, we can only infer types from annotation patterns, but in the
* future we will be able to infer type T from p & q if we can infer it from
* p (resp., q) and check q (resp., p) against T. *)
-(* TODO: consistency issue: infer_patt returns option, whereas infer raises an
- * exception *)
+(* TODO: Consistency issue: infer_patt returns option, whereas infer raises an
+ * exception. *)
+ (*
and infer_patt ctx (Node.Of (_, p)) = match p with
| A.PWild -> None
| A.PBind _ -> None
| A.PAnnot (p, a) ->
- let a = check ctx a (V.Intrin I.Type) in
- let va = eval ctx.values a in
+ let a = check ctx a V.Type in
+ let va = eval (Ctx.to_env ctx) a in
let x = check_patt ctx p va in
Some (x, va)
| A.PData _ -> None
@@ -232,7 +536,6 @@ and check_patt ctx (Node.Of (loc, p)) va = match p, va with
(* TODO: pretty-print types *)
error loc "pattern does not unify with inferred type"
| A.PData _, _ -> failwith "no impl"
-(*
| A.PData (tag, ps), V.DataType (env, ctors) ->
(* TODO: this is similar to checking code for Data. *)
begin match IdentMap.find_opt tag ctors with
diff --git a/env.ml b/env.ml
@@ -0,0 +1,32 @@
+open Global
+
+(* TODO: Smarter use modules to avoid redundancy between Env and Ctx. *)
+
+type 'a environment = {
+ indices: Index.UniqTsil.t; (* TODO: This is a terrible name. *)
+ values: 'a Uniq.Map.t;
+}
+
+let empty = {
+ indices = Index.UniqTsil.lin;
+ values = Uniq.Map.empty;
+}
+
+let extend env vars = {env with indices = Index.UniqTsil.snoc env.indices vars}
+
+let extend_fresh env n =
+ let vars = Uniq.fresh_array n in
+ (extend env vars, vars)
+
+let define env var value =
+ {env with values = Uniq.Map.add var value env.values}
+
+let get env var = Uniq.Map.find_opt var env.values
+
+let var_of env ij = Index.UniqTsil.var_of env.indices ij
+
+(* TODO temp *)
+let index_of env var =
+ match Index.UniqTsil.index_of env.indices var with
+ | Some ij -> Some ij
+ | None -> failwith "uh oh"
diff --git a/eval.ml b/eval.ml
@@ -1,87 +1,165 @@
-open Common
+open Global
module I = Internal
module V = Value
-(* Extend env with values Arb l, Arb (l+1), ..., Arb (l+n-1). *)
-let weaken env l n =
- let rec go env i =
- if i < l + n then go (V.Arb i :: env) (i + 1) else env in
- go env l
+(* TODO: "Glued" evaluation? I.e., evaluation that only lazily unfolds in some
+ * cases? *)
let rec eval env = function
-| I.Let (scrutinee, body) -> eval (eval env scrutinee :: env) body
-| I.Switch _ -> failwith "no impl: switch"
-| I.App (callee, args) -> apply (eval env callee) (List.map (eval env) args)
-| I.Fn (arity, body) -> V.Fn (env, arity, body)
-| I.FnType (arity, doms, cod) -> V.FnType (env, arity, doms, cod)
-| I.Data (tag, args) -> V.Data (tag, List.map (eval env) args)
-| I.DataType ctors -> V.DataType (env, ctors)
+| I.Let (definers, body) ->
+ let ndefiners = Array.length definers in
+ let (env, vars) = Env.extend_fresh env ndefiners in
+ let env' = ref env in
+ for i = 0 to ndefiners - 1 do
+ let Definer definiens = definers.(i) in
+ (* In typical programs, most definitions will not appear inside a type
+ * and hence won't be tested for conversion. Thus, it's important that
+ * we only lazily evaluate each definiens. *)
+ let vdefiniens = lazy_eval env definiens in
+ env' := Env.define !env' vars.(i) vdefiniens
+ done;
+ eval !env' body
+(* TODO: Lazily evaluate args? *)
+| I.App (fn, args) -> apply env (eval env fn) (Array.map (eval env) args)
+| I.Fn fn -> V.Fn (V.Closure (env, fn))
+| I.FnType fntype -> V.FnType (V.Closure (env, fntype))
+| I.Data _ -> no_impl "eval data"
+| I.DataType _ -> no_impl "eval datatype"
| I.Nat n -> V.Nat n
-| I.Var i -> List.nth env i
-| I.Intrin a -> V.Intrin a
+| I.Var ij ->
+ let var = Env.var_of env ij in
+ begin match Env.get env var with
+ | Some v -> v
+ | None -> Arb var
+ end
+| I.Type -> V.Type
+| I.Builtin b -> V.Builtin b
-and apply callee args = match callee, args with
-(* β-reduction *)
-| V.Fn (cap, _, body), args -> eval (List.rev_append args cap) body
-| V.Intrin I.NatOpAdd, [V.Nat m; V.Nat n] -> V.Nat (Z.add m n)
-| V.Intrin I.NatOpMul, [V.Nat m; V.Nat n] -> V.Nat (Z.mul m n)
-(* neutral application *)
-| callee, args -> App (callee, args)
+(* TODO: Optimization: Change `Lazy of value lazy` to `Lazy of env * term`,
+ * so that when later forcing/reevaluating a lazy value, we can just change the
+ * "values" field of the environment (but not the "indices" field, because that
+ * is sensitive to term structure) and call eval. *)
+and lazy_eval env t = V.Lazy (lazy (eval env t))
-(* TODO: quote of eval normalizes terms; in some cases below, we probabaly
- * don't need to eval before quoting *)
-let rec quote l = function
-| V.Switch _ -> failwith "no impl: switch"
-| V.App (f, args) -> I.App (quote l f, List.map (quote l) args)
-| V.Fn (env, arity, body) ->
- let vbody = eval (weaken env l arity) body in
- I.Fn (arity, quote (l + arity) vbody)
-| V.FnType (env, arity, doms, cod) ->
- let rec go l env = function
- | dom :: doms ->
- let cdom = quote l (eval env dom) in
- let (cdoms, ccod) = go (l + 1) (V.Arb l :: env) doms in
- (cdom :: cdoms, ccod)
- | [] -> ([], quote l (eval env cod)) in
- let (cdoms, ccod) = go l env doms in
- I.FnType (arity, cdoms, ccod)
-| V.Data (tag, args) -> I.Data (tag, List.map (quote l) args)
-| V.DataType (env, ctors) ->
+and apply env fn args = match fn, args with
+| V.Lazy fn, args -> V.Lazy (lazy (apply env (Lazy.force fn) args))
+| V.Fn (V.Closure (cloenv, {sorted_binders; body; _})), args ->
+ let I.SortedBinders (binders, _) = sorted_binders in
+ let arity = Array.length binders in
+ let (cloenv, vars) = Env.extend_fresh cloenv arity in
+ let cloenv = fold_left2 Env.define cloenv vars args in
+ eval cloenv body
+| V.Builtin I.NatOpAdd, [|V.Nat m; V.Nat n|] -> V.Nat (Z.add m n)
+| V.Builtin I.NatOpMul, [|V.Nat m; V.Nat n|] -> V.Nat (Z.mul m n)
+| fn, args -> V.App (fn, args)
+
+(* TODO: Quote of eval normalizes terms; in many cases, this does unnecessary
+ * work. *)
+let rec quote env = function
+| V.App (fn, args) -> I.App (quote env fn, Array.map (quote env) args)
+| V.Fn (V.Closure (cloenv, {sorted_binders; body; codomain})) ->
+ (* TODO: This is definitely duplicated somewhere. *)
+ let I.SortedBinders (binders, order) = sorted_binders in
+ let arity = Array.length binders in
+ let (cloenv, vars) = Env.extend_fresh cloenv arity in
+ let env = Env.extend env vars in
+ let binders' = Array.make arity (Obj.magic 0) in
+ for i = 0 to arity - 1 do
+ let j = order.(i) in
+ let I.Binder (plicity, ty) = binders.(j) in
+ let ty = quote env (eval cloenv ty) in
+ binders'.(j) <- I.Binder (plicity, ty)
+ done;
+ let sorted_binders = I.SortedBinders (binders', order) in
+ let body = quote env (eval cloenv body) in
+ let codomain = quote env (eval cloenv codomain) in
+ I.Fn {sorted_binders; body; codomain}
+| V.FnType (V.Closure (cloenv, {sorted_binders; codomain})) ->
+ (* TODO: This is definitely duplicated somewhere. *)
+ let I.SortedBinders (binders, order) = sorted_binders in
+ let arity = Array.length binders in
+ let (cloenv, vars) = Env.extend_fresh cloenv arity in
+ let env = Env.extend env vars in
+ let binders' = Array.make arity (Obj.magic 0) in
+ for i = 0 to arity - 1 do
+ let j = order.(i) in
+ let I.Binder (plicity, ty) = binders.(j) in
+ let ty = quote env (eval cloenv ty) in
+ binders'.(j) <- I.Binder (plicity, ty)
+ done;
+ let sorted_binders = I.SortedBinders (binders', order) in
+ let codomain = quote env (eval cloenv codomain) in
+ I.FnType {sorted_binders; codomain}
+| V.Data _ -> no_impl "quote data"
+ (*
+ I.Data (tag, List.map (quote l) args)
+ *)
+| V.DataType _ -> no_impl "quote datatype"
+ (*
let rec go l env = function
| b :: bs ->
let b = quote l (eval env b) in
b :: go (l + 1) (V.Arb l :: env) bs
| [] -> [] in
I.DataType (IdentMap.map (go l env) ctors)
+ *)
| V.Nat n -> I.Nat n
-| V.Arb i -> I.Var (l - i - 1)
-| V.Intrin a -> I.Intrin a
+| V.Arb var ->
+ begin match Env.index_of env var with
+ | Some ij -> I.Var ij
+ | None -> impossible () (* XXX: Is it? *)
+ end
+| V.Type -> I.Type
+| V.Builtin b -> I.Builtin b
+| V.Lazy l -> quote env (Lazy.force l)
+| V.Future _ -> impossible () (* XXX: Is it? *)
-let rec quote_telescope l = function
-| t :: ts -> quote l t :: quote_telescope (l + 1) ts
-| [] -> []
+let force = function
+| V.Lazy l -> Lazy.force l
+| v -> v
-let rec conv l a b = match a, b with
+let rec conv env v0 v1 = match force v0, force v1 with
(* TODO: η-equality? *)
(* TODO: Switch *)
-| V.App (af, aargs), V.App (bf, bargs) ->
+| V.App (fn0, args0), V.App (fn1, args1) ->
(* Note that aargs and bargs aren't necessarily the same length, but they
- * are if af and bf are convertible, so the short-circuiting of (&&)
+ * are if fn0 and fn1 are convertible, so the short-circuiting of (&&)
* ensures that all2 can't fail. *)
- conv l af bf && all2 (conv l) aargs bargs
-| V.Fn (aenv, arity, abody), V.Fn (benv, _, bbody) ->
- let vabody = eval (weaken aenv l arity) abody in
- let vbbody = eval (weaken benv l arity) bbody in
- conv (l + arity) vabody vbbody
-| V.FnType (aenv, arity, adoms, acod), V.FnType (benv, _, bdoms, bcod) ->
- let rec go l aenv adoms benv bdoms = match adoms, bdoms with
- | adom :: adoms, bdom :: bdoms ->
- conv l (eval aenv adom) (eval benv bdom)
- && go (l + 1) (V.Arb l :: aenv) adoms (V.Arb l :: benv) bdoms
- | [], [] -> conv l (eval aenv acod) (eval benv bcod)
- | _, _ -> failwith "impossible" in
- go l aenv adoms benv bdoms
+ conv env fn0 fn1 && all2 (conv env) args0 args1
+| V.Fn (V.Closure (cloenv0, fn0)), V.Fn (V.Closure (cloenv1, fn1)) ->
+ let I.SortedBinders (binders, _) = fn0.sorted_binders in
+ let arity = Array.length binders in
+ let {I.body = body0; _}, {I.body = body1; _} = fn0, fn1 in
+ let vars = Uniq.fresh_array arity in
+ let env = Env.extend env vars in
+ let cloenv0, cloenv1 = Env.extend cloenv0 vars, Env.extend cloenv1 vars in
+ let vbody0, vbody1 = eval cloenv0 body0, eval cloenv1 body1 in
+ conv env vbody0 vbody1
+| V.FnType (V.Closure (cloenv0, fntype0)), V.FnType (V.Closure (cloenv1, fntype1)) ->
+ let {I.sorted_binders = sorted_binders0; I.codomain = codomain0} = fntype0 in
+ let {I.sorted_binders = sorted_binders1; I.codomain = codomain1} = fntype1 in
+ let I.SortedBinders (binders0, order0) = sorted_binders0 in
+ let I.SortedBinders (binders1, order1) = sorted_binders1 in
+ if Array.length binders0 <> Array.length binders1 then false else
+ let arity = Array.length binders0 in
+ let vars = Uniq.fresh_array arity in
+ let env = Env.extend env vars in
+ let cloenv0, cloenv1 = Env.extend cloenv0 vars, Env.extend cloenv1 vars in
+ let domain_conv i0 i1 =
+ if i0 <> i1 then false else
+ let I.Binder (_, ty0) = binders0.(i0) in
+ let I.Binder (_, ty1) = binders1.(i1) in
+ let vty0 = eval cloenv0 ty0 in
+ let vty1 = eval cloenv1 ty1 in
+ conv env vty0 vty1 in
+ if not (all2 domain_conv order0 order1) then false else
+ let vcodomain0 = eval cloenv0 codomain0 in
+ let vcodomain1 = eval cloenv1 codomain1 in
+ conv env vcodomain0 vcodomain1
| V.Nat n, V.Nat m -> Z.equal n m
-| V.Arb i, V.Arb j -> i = j
-| V.Intrin a, V.Intrin b -> a = b
+| V.Arb var0, V.Arb var1 -> var0 = var1
+| V.Type, V.Type -> true
+| V.Builtin b0, V.Builtin b1 -> b0 = b1
+| Future _, _ -> impossible ()
+| _, Future _ -> impossible ()
| _, _ -> false
diff --git a/global.ml b/global.ml
@@ -0,0 +1,25 @@
+include Tsil.Global
+include Node.Global
+
+let impossible () = failwith "impossible"
+let no_impl feature = failwith ("no impl: " ^ feature)
+
+(* For some reason this isn't in the standard Array module... *)
+let rec fold_left2 f acc xs ys =
+ begin if Array.length xs <> Array.length ys then
+ invalid_arg "fold_left2"
+ end;
+ let acc = ref acc in
+ for i = 0 to Array.length xs - 1 do
+ acc := f !acc xs.(i) ys.(i)
+ done;
+ !acc
+
+(* Short-circuiting AND fold_left2. *)
+let rec all2 f xs ys =
+ begin if Array.length xs <> Array.length ys then
+ invalid_arg "all2"
+ end;
+ let len = Array.length xs in
+ let rec go i = (i = len) || (f xs.(i) ys.(i) && go (i + 1)) in
+ go 0
diff --git a/index.ml b/index.ml
@@ -0,0 +1,44 @@
+(* Pairs of de Bruijn indices. *)
+
+type index = Of of int * int
+type t = index
+
+let to_string (Of (i, j)) = string_of_int i ^ "." ^ string_of_int j
+
+module UniqTsil = struct
+ module IntMap = Map.Make(Int)
+
+ type t = {
+ length: int;
+ index_to_var: Uniq.t array IntMap.t;
+ var_to_index: (int * int) Uniq.Map.t;
+ }
+
+ let lin = {
+ length = 0;
+ index_to_var = IntMap.empty;
+ var_to_index = Uniq.Map.empty;
+ }
+
+ let snoc {length; index_to_var; var_to_index} vars =
+ let var_to_index = ref var_to_index in
+ for j = 0 to Array.length vars - 1 do
+ var_to_index := Uniq.Map.add vars.(j) (length, j) !var_to_index
+ done;
+ {
+ length = length + 1;
+ index_to_var = IntMap.add length vars index_to_var;
+ var_to_index = !var_to_index
+ }
+
+ let var_of {length; index_to_var; _} (Of (i, j)) =
+ let l = length - i - 1 in (* Convert index i to level l. *)
+ (IntMap.find l index_to_var).(j)
+
+ let index_of {length; var_to_index; _} var =
+ match Uniq.Map.find_opt var var_to_index with
+ | Some (l, j) ->
+ let i = length - l - 1 in (* Convert level l to index i. *)
+ Some (Of (i, j))
+ | None -> None
+end
+\ No newline at end of file
diff --git a/internal.ml b/internal.ml
@@ -1,93 +1,99 @@
-open Common
+open Global
-(*
-type ('a, 'b) telescope = 'a list * 'b
-(* fn's have telescope domain AND codomain? i.e., telescopic mappings a la
- * de bruijn. *)
-(* subst's are (term, int) telescope (but the terminology breaks down here) *)
-
-let (@:) tm ty = {tm; ty}
-*)
-
-type intrin =
-| Type
+type builtin =
| NatType
| NatOpAdd
| NatOpMul
type term =
-| Let of term * term
-| Switch of term * case list
-| App of term * term list
-| Fn of int * term
-| FnType of int * term list * term
-| Data of ident * term list
-| DataType of term list IdentMap.t
+| Let of definer array * term
+(*| Switch of term * case array*)
+| App of term * term array
+| Fn of fn
+| FnType of fntype
+| Data of Tag.t * term array
+| DataType of datatype
| Nat of Z.t
-| Var of index
-| Intrin of intrin
-(*| Subst of term * subst*)
+| Var of Index.t
+| Type
+| Builtin of builtin
+
+and definer = Definer of term
+
+and binder = Binder of Plicity.t * term
+
+and sorted_binders = SortedBinders of binder array * int array
+
+and fn = {
+ sorted_binders: sorted_binders;
+ body: term;
+ codomain: term;
+}
+
+and fntype = {
+ sorted_binders: sorted_binders;
+ codomain: term;
+}
+
+and datatype = sorted_binders Tag.Map.t
+
+(*
and case = patt * term
and patt = patt_discriminant list
and patt_discriminant =
| PattDiscTrue
| PattDiscFalse
| PattDiscData of ident * int
-(*
-and subst =
-| Shift of int (* Nonnegative shift. Shift 0 is the identity substitution. *)
-| Cons of subst * term
-
-let rec cons_vars s = function
-| 0 -> s
-| n -> cons_vars (Cons (s, Var (n - 1))) (n - 1)
-
-(* compose f g is the substitution that does f first and g second. *)
-let rec compose f g = match f, g with
-| Shift 0, g -> g
-| Shift i, Shift j -> Shift (i + j)
-| Shift i, Cons (g, _) -> compose (Shift (i - 1)) g
-| Cons (f, t), g -> Cons (compose f g, substitute t g)
-
-and substitute t f = match t, f with
-| Let (t, u), f -> Let (Subst t f, Subst u f)
-| Switch _, _ -> failwith "no impl: switch"
-| App (t, us), f -> App (Subst (t, f), List.map (fun u -> Subst (u, f)) us)
-| Fn (arity, t), f ->
- let f = cons_vars (compose f (Shift arity)) arity in
- Fn (arity, Subst (t, f))
-| Fn (arity, )
-| Nat n -> Nat n
-| Var i, Shift j -> Var (i + j)
-| Var 0, Cons (_, u) -> u
-| Var i, Cons (f, _) -> substitute (Var (i - 1)) f
-| Intrin i, _ -> Intrin i
-| Subst (t, g), f -> substitute t (compose g f)
*)
-let rec to_string m e =
+let rec to_string m t =
let s = to_string in
- let (c, s) = match e with
- | Let (scrutinee, body) -> (0, s 1 scrutinee ^ "; " ^ s 0 body)
- | Switch _ -> failwith "no impl: switch"
- | App (f, args) -> (6, String.concat " " (List.map (s 7) (f :: args)))
- | Fn (arity, body) -> (1, "fn " ^ string_of_int arity ^ " => " ^ s 1 body)
- | FnType (_, doms, cod) ->
- let sdoms = match doms with
- | dom :: [] -> s 4 dom
- | doms -> "(" ^ String.concat ", " (List.map (s 1) doms) ^ ")" in
- (3, sdoms ^ " -> " ^ s 3 cod)
- | Data (tag, ts) ->
+ let (c, s) = match t with
+ | Let (definers, body) ->
+ (0,
+ String.concat ", " (Array.to_list (Array.map (definer_to_string 1) definers))
+ ^ "; "
+ ^ s 0 body
+ )
+ | App (fn, args) -> (6, String.concat " " (List.map (s 7) (fn :: Array.to_list args)))
+ | Fn {sorted_binders = SortedBinders (binders, _); body; codomain} ->
+ (1,
+ "fn "
+ ^ String.concat ", " (Array.to_list (Array.map (binder_to_string 2) binders))
+ ^ " : "
+ ^ s 2 codomain
+ ^ " => "
+ ^ s 1 body
+ )
+ | FnType {sorted_binders = SortedBinders (binders, _); codomain} ->
+ (3,
+ "["
+ ^ String.concat ", " (Array.to_list (Array.map (binder_to_string 3) binders))
+ ^ "]"
+ ^ " -> "
+ ^ s 3 codomain
+ )
+ | Data _ -> no_impl "print data"
+ (*
(6, "@" ^ String.concat " " (tag :: List.map (s 7) ts))
- | DataType ctors ->
+ *)
+ | DataType _ -> no_impl "print datatype"
+ (*
let string_of_ctor (tag, ts) =
"@" ^ String.concat " " (tag :: List.map (s 7) ts) in
let ctors = List.map string_of_ctor (IdentMap.to_list ctors) in
(8, "#[" ^ String.concat "; " ctors ^ "]")
- | Nat n -> (8, Z.to_string n)
- | Var i -> (8, "?" ^ string_of_int i)
- | Intrin Type -> (8, "`Type")
- | Intrin NatType -> (8, "`Nat")
- | Intrin NatOpAdd -> (8, "`+")
- | Intrin NatOpMul -> (8, "`*") in
+ *)
+ | Nat n -> (7, Z.to_string n)
+ | Var ij -> (7, "#" ^ Index.to_string ij)
+ | Type -> (7, "`Type")
+ | Builtin NatType -> (7, "`Nat")
+ | Builtin NatOpAdd -> (7, "`+")
+ | Builtin NatOpMul -> (7, "`*") in
if c < m then "(" ^ s ^ ")" else s
+
+and definer_to_string m (Definer t) = to_string m t
+
+and binder_to_string m = function
+| Binder (Plicity.Ex, t) -> to_string m t
+| Binder (Plicity.Im, t) -> "?" ^ to_string m t
diff --git a/lexer.mll b/lexer.mll
@@ -1,5 +1,5 @@
{
-open Common
+open Global
open Parser
exception Error of Loc.t * string
diff --git a/loc.ml b/loc.ml
@@ -1,6 +1,7 @@
-type t =
+type loc =
| None
| Loc of int * int
+type t = loc
let of_position (pos: Lexing.position) =
Loc (pos.pos_lnum, pos.pos_cnum - pos.pos_bol)
diff --git a/main.ml b/main.ml
@@ -1,14 +1,15 @@
-(*
+module I = Internal
+module V = Value
+
let buf = Lexing.from_string "\
- N := (A: Type, A, A -> A) -> A;\n\
- add: (N, N) -> N := fn m, n => fn A, z, s => m A (n A z s) s;\n\
+ N := [A: Type, A, A -> A] -> A;\n\
+ add: [N, N] -> N := fn m, n => fn A, z, s => m A (n A z s) s;\n\
two: N := fn A, z, s => s (s z);\n\
three: N := fn A, z, s => s (s (s z));\n\
five := add two three;\n\
five Nat 0 (fn n => n + 1)\n\
"
-*)
-let buf = Lexing.from_string "T := #[@a Nat; @b #()]; @a 3 : T"
+(*let buf = Lexing.from_string "T := #[@a Nat; @b #()]; @a 3 : T"*)
let concrete =
try Parser.start Lexer.token buf with
@@ -16,16 +17,39 @@ let concrete =
let () = print_endline ("[" ^ Loc.to_string loc ^ "] " ^ msg) in
exit 1
-let abstract = Cnctoabs.term concrete
+let abstract = Desugar.term concrete
+
+(* TODO: We should allow names to map to values in the context, so we don't
+ * need a a psuedo-binder to create the top-level/default context. *)
+
+let ctx =
+ let nat_binder = I.Binder (Plicity.Ex, I.Builtin I.NatType) in
+ let nat_op_type = V.FnType (V.Closure (Env.empty, {
+ sorted_binders = I.SortedBinders ([|nat_binder; nat_binder|], [|0; 1|]);
+ codomain = I.Builtin I.NatType;
+ })) in
+ let (ctx, postbs) = Ctx.extend_and_bind_all Ctx.empty [|
+ {names = [|Name.Of "Type"|]; vtyp = V.Type};
+ {names = [|Name.Of "Nat"|]; vtyp = V.Type};
+ {names = [|Name.Of "`+"|]; vtyp = nat_op_type};
+ {names = [|Name.Of "`*"|]; vtyp = nat_op_type};
+ |] in
+ let ctx = Ctx.define ctx postbs.(0).var V.Type in
+ let ctx = Ctx.define ctx postbs.(1).var (V.Builtin I.NatType) in
+ let ctx = Ctx.define ctx postbs.(2).var (V.Builtin I.NatOpAdd) in
+ let ctx = Ctx.define ctx postbs.(3).var (V.Builtin I.NatOpMul) in
+ ctx
-let ctx = Elab.default_ctx
+(*
let (internal, ty) =
- try Elab.infer ctx abstract with
- | Elab.Error (loc, msg) ->
- let () = print_endline ("[" ^ Loc.to_string loc ^ "] " ^ msg) in
- exit 1
+ match Elab.infer ctx abstract with
+ | Ok r -> r
+ | Error e -> print_endline "elab error"; exit 1
+*)
+let (internal, ty) = Elab.infer' ctx abstract
-let internal' = Eval.quote ctx.len (Eval.eval ctx.values internal)
+let env = Ctx.to_env ctx
+let internal' = Eval.quote env (Eval.eval env internal)
let () = print_endline (Internal.to_string 0 internal)
let () = print_endline (Internal.to_string 0 internal')
diff --git a/name.ml b/name.ml
@@ -0,0 +1,7 @@
+type name = Of of string
+type t = name
+
+module Map = Map.Make(struct
+ type t = name
+ let compare (Of x) (Of y) = String.compare x y
+end)
diff --git a/node.ml b/node.ml
@@ -1,3 +1,13 @@
-type 'a t = Of of Loc.t * 'a
+(* TODO: I think I've been inconsistent about the order of loc and data when
+ * pattern matching on nodes... Check this. *)
-let map f (Of (loc, x)) = Of (loc, f x)
+type 'a t = {loc: Loc.t; data: 'a}
+
+let (@$) loc data = {loc; data}
+
+let map f {loc; data} = {loc; data = f data}
+
+module Global = struct
+ type 'a node = 'a t = {loc: Loc.t; data: 'a}
+ let (@$) = (@$)
+end
diff --git a/parser.mly b/parser.mly
@@ -1,17 +1,21 @@
%{
-open Common
+open Global
module C = Concrete
(* To avoid reduce-reduce conflicts, we need to distinguish between raw
* expressions (which are generally of unknown syntactic category) and concrete
- * terms/patternss/etc. In a hand-written parser, we'd have more control over
+ * terms/patterns/etc. In a hand-written parser, we'd have more control over
* when to decide the syntactic category of the top of the token stack, so
- * we wouldn't need to make this distinction. *)
-type expr = expr' Node.t
+ * we wouldn't need to make this distinction.
+ *
+ * We use (length, list) pairs in exprs, because we don't know how long the
+ * lists will be until after parsing, but we convert to arrays for efficiency
+ * and convenience in concrete syntax and onwards. *)
+type expr = expr' node
and expr' =
-| Let of C.defn list * expr
+| Let of int * C.definer list * expr
| Seq of expr * expr
-| Match of expr * C.case list
+| Match of expr * int * C.case list
| If of expr * expr * expr
| Iff of expr * expr
| Annot of expr * expr
@@ -19,27 +23,37 @@ and expr' =
| Mul of expr * expr
| Or of expr * expr
| And of expr * expr
-| App of expr * expr list
-| Fn of C.param list * expr
-| FnType of C.domain list * expr
-| Variant of tag * expr list
-| VariantType of C.ctor list
-| Tuple of expr list
-| TupleType of C.domain list
+| App of expr * int * expr list
+| Fn of int * C.param list * expr
+| FnType of int * C.domain list * expr
+| Variant of Tag.t * int * expr list
+| VariantType of int * ctor list
+| Tuple of int * expr list
+| TupleType of int * C.domain list
| Unit
| Nat of Z.t
| Underscore
-| IndexedVar of name * int
-| Var of name
+| IndexedVar of Name.t * int
+| Var of Name.t
| Paren of expr
+and ctor = ctor' node
+and ctor' = Ctor of Tag.t * int * C.domain list
+
+let cons a (len, lst) = (len + 1, a :: lst)
+
+let to_array len lst =
+ let arr = Array.make len (Obj.magic 0) in
+ List.iteri (Array.set arr) lst;
+ arr
+
exception Error of Loc.t * string
let error loc msg = raise (Error (loc, msg))
-let rec term (Node.Of (loc, e)) = loc @$ match e with
-| Let (defns, body) -> C.Let (defns, term body)
+let rec term {loc; data = e} = loc @$ match e with
+| Let (len, definers, body) -> C.Let (to_array len definers, term body)
| Seq (a, b) -> C.Seq (term a, term b)
-| Match (scrutinee, cases) -> C.Match (term scrutinee, cases)
+| Match (scrutinee, len, cases) -> C.Match (term scrutinee, to_array len cases)
| If (c, t, f) -> C.If (term c, term t, term f)
| Iff (c, t) -> C.Iff (term c, term t)
| Annot (a, b) -> C.Iff (term a, term b)
@@ -47,13 +61,13 @@ let rec term (Node.Of (loc, e)) = loc @$ match e with
| Mul (a, b) -> C.Mul (term a, term b)
| Or _ -> error loc "'|' can not appear in terms (yet)"
| And _ -> error loc "'&' can not appear in terms (yet)"
-| App (f, args) -> C.App (term f, List.map term args)
-| Fn (params, body) -> C.Fn (params, term body)
-| FnType (doms, cod) -> C.FnType (doms, term cod)
-| Variant (tag, fields) -> C.Variant (tag, List.map term fields)
-| VariantType ctors -> C.VariantType ctors
-| Tuple fields -> C.Tuple (List.map term fields)
-| TupleType doms -> C.TupleType doms
+| App (f, len, args) -> C.App (term f, Array.map term (to_array len args))
+| Fn (len, params, body) -> C.Fn (to_array len params, term body)
+| FnType (len, doms, cod) -> C.FnType (to_array len doms, term cod)
+| Variant (tag, len, fields) -> C.Variant (tag, Array.map term (to_array len fields))
+| VariantType (len, ctors) -> C.VariantType (Array.map ctor (to_array len ctors))
+| Tuple (len, fields) -> C.Tuple (Array.map term (to_array len fields))
+| TupleType (len, doms) -> C.TupleType (to_array len doms)
| Unit -> C.Unit
| Nat n -> C.Nat n
| Underscore -> error loc "'_' can not appear in terms (yet)"
@@ -61,12 +75,15 @@ let rec term (Node.Of (loc, e)) = loc @$ match e with
| Var x -> C.Var (x, None)
| Paren e -> C.Paren (term e)
-let rec patt (Node.Of (loc, e)) = loc @$ match e with
+and ctor {loc; data = Ctor (tag, len, doms)} =
+ loc @$ C.Ctor (tag, to_array len doms)
+
+and patt {loc; data = e} = loc @$ match e with
| Annot (p, t) -> C.PAnnot (patt p, term t)
| Or (p, q) -> C.POr (patt p, patt q)
| And (p, q) -> C.PAnd (patt p, patt q)
-| Variant (tag, ps) -> C.PVariant (tag, List.map patt ps)
-| Tuple ps -> C.PTuple (List.map patt ps)
+| Variant (tag, len, ps) -> C.PVariant (tag, Array.map patt (to_array len ps))
+| Tuple (len, ps) -> C.PTuple (Array.map patt (to_array len ps))
| Unit -> C.PUnit
| Nat n -> C.PNat n
| Underscore -> C.PWild
@@ -106,32 +123,53 @@ let (@$) i x = loc_of i @$ x
%%
+(* TODO: Syntactic sugar: Allow application in patterns, with the following
+ * interpretation:
+ * p p1 ... pn := b ---> p := rec fn p1, ..., pn => b
+ * fn ..., p(p1 ... pn), ... => b ---> fn ..., p1, ..., pn, ... => b
+ * The pattern p in the second rule is restricted to being a composition of
+ * PParen and PAnnot. Note that application of the first rule may make the
+ * second rule applicable, and application of the first rule may need to be
+ * iterated until we reach a "normal form". *)
+
start: expr0 EOF { term $1 }
expr0: (* Definitions and sequencing *)
- | defn_list SEMICOLON expr0 { 2 @$ Let ($1, $3) }
+ | definer_list SEMICOLON expr0 {
+ let (len, lst) = $1 in
+ 2 @$ Let (len, lst, $3)
+ }
| expr1 SEMICOLON expr0 { 2 @$ Seq ($1, $3) }
| expr1 { $1 }
expr1: (* Control flow *)
(* TODO: Probably we should change the match syntax when staging is added,
* so that we can write something like `$match x ...` instead of
* `$(x match ...)` for conditional compilation. *)
- | expr2 MATCH LBRACE case_list RBRACE { 2 @$ Match($1, $4) }
+ | expr2 MATCH LBRACE case_list RBRACE {
+ let (len, lst) = $4 in
+ 2 @$ Match($1, len, lst)
+ }
(* TODO: if let: "if p := x then t else f"; and similarly for iff *)
| IF expr1 THEN expr1 ELSE expr1 { 1 @$ If ($2, $4, $6) }
| IFF expr1 DO expr1 { 1 @$ Iff ($2, $4) }
- | FN param_list DOUBLE_ARROW expr1 { 1 @$ Fn ($2, $4) }
+ | FN param_list DOUBLE_ARROW expr1 {
+ let (len, lst) = $2 in
+ 1 @$ Fn (len, lst, $4)
+ }
| expr2 { $1 }
expr2: (* Type annotations (right-associative) *)
| expr3 COLON expr2 { 2 @$ Annot ($1, $3) }
| expr3 { $1 }
expr3: (* Function arrow (right-associative) *)
- | expr6 SINGLE_ARROW expr3 { 2 @$ FnType ([1 @$ C.DExplicit (term $1)], $3) }
+ | expr6 SINGLE_ARROW expr3 {
+ 2 @$ FnType (1, [1 @$ C.DExplicit (term $1)], $3)
+ }
| LBRACK domain_list RBRACK SINGLE_ARROW expr3 {
- if List.length $2 = 0 then
+ let (len, lst) = $2 in
+ if len = 0 then
error (loc_of 2) "empty domain"
else
- 4 @$ FnType ($2, $5)
+ 4 @$ FnType (len, lst, $5)
}
| expr4 { $1 }
expr4: (* Additive binary operators (left-associative;
@@ -157,66 +195,84 @@ expr5and:
| expr5and AMPERSAND expr6 { 2 @$ And ($1, $3) }
| expr6 { $1 }
expr6:
- | expr7 expr7 expr7_apposition_list { 1 @$ App ($1, $2 :: $3) }
- | TAG expr7_apposition_list { 1 @$ Variant (Tag $1, $2) }
+ | expr7 expr7 expr7_apposition_list {
+ let (len, lst) = $3 in
+ 1 @$ App ($1, len + 1, $2 :: lst)
+ }
+ | TAG expr7_apposition_list {
+ let (len, lst) = $2 in
+ 1 @$ Variant (Tag.Of $1, len, lst)
+ }
| expr7 { $1 }
expr7:
| UNDERSCORE { 1 @$ Underscore }
- | INDEXED_NAME { 1 @$ let (x, i) = $1 in IndexedVar (Name x, i) }
- | NAME { 1 @$ Var (Name $1) }
+ | INDEXED_NAME { 1 @$ let (x, i) = $1 in IndexedVar (Name.Of x, i) }
+ | NAME { 1 @$ Var (Name.Of $1) }
| NAT { 1 @$ Nat $1 }
- | HASH_LBRACE ctor_list RBRACE { 1 @$ VariantType $2 }
- | HASH_LPAREN domain_list RPAREN { 1 @$ TupleType $2 }
- | LPAREN expr1 COMMA expr1_comma_list RPAREN { 1 @$ Tuple ($2 :: $4) }
+ | HASH_LBRACE ctor_list RBRACE {
+ let (len, lst) = $2 in
+ 1 @$ VariantType (len, lst)
+ }
+ | HASH_LPAREN domain_list RPAREN {
+ let (len, lst) = $2 in
+ 1 @$ TupleType (len, lst)
+ }
+ | LPAREN expr1 COMMA expr1_comma_list RPAREN {
+ let (len, lst) = $4 in
+ 1 @$ Tuple (len + 1, $2 :: lst)
+ }
| LPAREN RPAREN { 1 @$ Unit }
| LPAREN expr0 RPAREN { 1 @$ Paren $2 }
expr1_comma_list:
- | expr1 COMMA expr1_comma_list { $1 :: $3 }
- | expr1 { [$1] }
- | { [] }
+ | expr1 COMMA expr1_comma_list { cons $1 $3 }
+ | expr1 { (1, [$1]) }
+ | { (0, []) }
expr7_apposition_list:
- | expr7 expr7_apposition_list { $1 :: $2 }
- | { [] }
+ | expr7 expr7_apposition_list { cons $1 $2 }
+ | { (0, []) }
-defn:
- | expr2 COLON_EQ expr1 { 2 @$ C.Defn (patt $1, term $3) }
- | expr2 COLON_EQ REC expr1 { 2 @$ C.RecDefn (patt $1, term $4) }
-defn_list:
- | defn COMMA defn_list { $1 :: $3 }
- | defn { [$1] }
+definer:
+ | expr2 COLON_EQ expr1 { 2 @$ C.Definer (patt $1, term $3) }
+ | expr2 COLON_EQ REC expr1 { 2 @$ C.RecDefiner (patt $1, term $4) }
+definer_list:
+ | definer COMMA definer_list { cons $1 $3 }
+ | definer { (1, [$1]) }
case:
| expr2 DOUBLE_ARROW expr1 { 2 @$ C.Case (patt $1, term $3) }
case_list:
- | case COMMA case_list { $1 :: $3 }
- | case { [$1] }
- | { [] }
+ | case COMMA case_list { cons $1 $3 }
+ | case { (1, [$1]) }
+ | { (0, []) }
param:
- | expr2 { 1 @$ C.Param (Explicit, patt $1) }
- | QUESTION expr2 { 1 @$ C.Param (Implicit, patt $2) }
+ | expr2 { 1 @$ C.Param (Plicity.Ex, patt $1) }
+ | QUESTION expr2 { 1 @$ C.Param (Plicity.Im, patt $2) }
param_list:
- | param COMMA param_list { $1 :: $3 }
- | param { [$1] }
- | { [] }
+ | param COMMA param_list { cons $1 $3 }
+ | param { (1, [$1]) }
+ | { (0, []) }
domain:
| expr3 { 1 @$ C.DExplicit (term $1) }
| QUESTION expr3 { 1 @$ C.DImplicitType (patt $2) }
- | expr3 COLON expr2 { 2 @$ C.DAnnot (Explicit, patt $1, term $3) }
- | QUESTION expr3 COLON expr2 { 3 @$ C.DAnnot (Implicit, patt $2, term $4) }
+ | expr3 COLON expr2 { 2 @$ C.DAnnot (Plicity.Ex, patt $1, term $3) }
+ | QUESTION expr3 COLON expr2 { 3 @$ C.DAnnot (Plicity.Im, patt $2, term $4) }
domain_list:
- | domain COMMA domain_list { $1 :: $3 }
- | domain { [$1] }
- | { [] }
+ | domain COMMA domain_list { cons $1 $3 }
+ | domain { (1, [$1]) }
+ | { (0, []) }
ctor:
- | TAG domain_list { 1 @$ C.Ctor (Tag $1, $2) }
+ | TAG domain_list {
+ let (len, lst) = $2 in
+ 1 @$ Ctor (Tag.Of $1, len, lst)
+ }
ctor_list:
- | ctor SEMICOLON ctor_list { $1 :: $3 }
- | ctor { [$1] }
- | { [] }
+ | ctor SEMICOLON ctor_list { cons $1 $3 }
+ | ctor { (1, [$1]) }
+ | { (0, []) }
%%
diff --git a/plicity.ml b/plicity.ml
@@ -0,0 +1,2 @@
+type plicity = Im | Ex
+type t = plicity
diff --git a/porig.ml b/porig.ml
@@ -0,0 +1,65 @@
+(* TODO: Generalize to partial add and mul? *)
+module type Porig = sig
+ type t
+ val zero : t
+ val one : t
+ val default : t
+ val add : t -> t -> t
+ val mul : t -> t -> t
+ val leq : t -> t -> bool
+ val lub : t -> t -> t option
+ val to_string : t -> string
+end
+
+(* TODO: How does compilation work with this porig? What gets erased? *)
+module Intuitionistic: Porig = struct
+ type t = Zero
+
+ let zero = Zero
+ let one = Zero
+ let default = Zero
+
+ let add Zero Zero = Zero
+
+ let mul Zero Zero = Zero
+
+ let leq Zero Zero = true
+
+ let lub Zero Zero = Zero
+
+ let to_string Zero = "0"
+end
+
+(* TODO: discrete boolean porig and total boolean porig *)
+
+module NoneOneTons: Porig = struct
+ type t = Zero | One | Omega
+
+ let zero = Zero
+ let one = One
+ let default = Omega
+
+ let add a b = match a, b with
+ | Zero, b -> b
+ | a, Zero -> a
+ | _, _ -> Omega
+
+ let mul a b = match a, b with
+ | Zero, _ -> Zero
+ | _, Zero -> Zero
+ | One, b -> b
+ | _, _ -> Omega
+
+ let leq a b = a = b ||
+ match a, b with
+ | Zero, Omega -> true
+ | One, Omega -> true
+ | _, _ -> false
+
+ let lub a b = if a = b then a else Omega
+
+ let to_string = function
+ | Zero -> "0"
+ | One -> "1"
+ | Omega -> "ω"
+end
diff --git a/repr.ml b/repr.ml
@@ -0,0 +1,10 @@
+type repr =
+| RAlign of repr * int
+(*| Pad of repr * int*)
+| RPtr
+| RNat of int
+| RRepeat of repr * term1
+| RUnion of repr list
+| RStruct of repr list (* What about dependent representations? *)
+| RDepPair of repr * term1 * repr (* Left repr, left type, right repr in weakened context *)
+(* E.g., #(n: Nat, a: Arr T n) has repr RDepPair (RNat, ↑Nat, n: ↑Nat ⊢ RRepeat (reprof T) n) *)
diff --git a/shadow_stack.ml b/shadow_stack.ml
@@ -0,0 +1,31 @@
+type 'a t = {len: int; mutable arr: 'a array}
+
+let length s = s.len
+
+let capacity s = Array.length s.arr
+
+let make n = {len = 0; arr = Array.make n (Obj.magic 0)}
+
+let singleton v = {len = 1; arr = [|v|]}
+
+let get s i =
+ if i < 0 || i >= s.len then
+ invalid_arg "Shadow_stack.get"
+ else
+ s.arr.(len - i - 1)
+
+let reserve s n =
+ let cap = capacity s in
+ let rem = cap - s.len in
+ if n > rem then begin
+ let need = n - rem in
+ let cap = cap + max cap need in (* Ensure cap at least doubles. *)
+ let arr' = Array.make cap (Obj.magic 0) in
+ Array.blit s.arr 0 arr' 0 s.len;
+ s.arr <- arr'
+ end
+
+let push s v =
+ reserve s 1;
+ s.arr.(len) <- v;
+ Ss (s.len + 1, s.arr)
diff --git a/tag.ml b/tag.ml
@@ -0,0 +1,7 @@
+type tag = Of of string
+type t = tag
+
+module Map = Map.Make(struct
+ type t = tag
+ let compare (Of x) (Of y) = String.compare x y
+end)
diff --git a/tsil.ml b/tsil.ml
@@ -5,11 +5,11 @@
* reversed tsils and vice versa (and this module provides functions in service
* of this viewpoint), so disciplined use of tsils and lists can help prevent
* accidental telescope/context reversal via static checks in OCaml. That is,
- * if a tsil is a context and a list is a reversed context, then it is
- * impossible to accidentally reverse a context, because that would amount to
- * an OCaml type error. *)
+ * if a tsil represents a context and a list represents a reversed context,
+ * then it is impossible to accidentally reverse a context, because that would
+ * amount to an OCaml type error. *)
-type 'a t = Nil | Snoc of 'a t * 'a
+type 'a t = Lin | Snoc of 'a t * 'a
(* Miraculously, (<@) is left associative and (@>) is right associative, as
* one would expect. *)
@@ -21,15 +21,15 @@ let appendl = (<@)
let rec (@>) l r = match l with
| Snoc (l, x) -> l @> (x :: r)
-| Nil -> r
+| Lin -> r
let appendr = (@>)
-let of_list r = Nil <@ r
+let of_list r = Lin <@ r
let to_list l = l @> []
module Global = struct
- type 'a tsil = 'a t = Nil | Snoc of 'a tsil * 'a
+ type 'a tsil = 'a t = Lin | Snoc of 'a tsil * 'a
let (<@) = (<@)
let (@>) = (@>)
end
diff --git a/uniq.ml b/uniq.ml
@@ -0,0 +1,19 @@
+type uniq = Uniq of int
+type t = uniq
+
+exception Exhaustion
+
+let fresh =
+ let i = ref (-1) in
+ fun () ->
+ if !i = max_int then
+ raise Exhaustion (* This should never happen in practice. *)
+ else
+ (incr i; Uniq !i)
+
+let fresh_array n = Array.init n (fun _ -> fresh ())
+
+module Map = Map.Make(struct
+ type t = uniq
+ let compare (Uniq x) (Uniq y) = Int.compare x y
+end)
diff --git a/value.ml b/value.ml
@@ -1,17 +1,47 @@
-open Common
+open Global
module I = Internal
-(* TODO: Abstract notion of closure, so we don't need to manage environments
- * ad-hoc while elaborating (it's error prone!). *)
-
type value =
-| Switch of value * environ * I.case list
-| App of value * value list
-| Fn of environ * int * I.term
-| FnType of environ * int * I.term list * I.term
-| Data of ident * value list
-| DataType of environ * I.term list IdentMap.t
+(*| Switch of ...*)
+| App of value * value array
+| Fn of I.fn closure
+| FnType of I.fntype closure
+| Data of Tag.t * value array
+| DataType of I.datatype closure
| Nat of Z.t
-| Arb of level
-| Intrin of I.intrin
-and environ = value list
+| Arb of Uniq.t
+(*| Meta of Uniq.t*)
+| Type
+| Builtin of I.builtin
+| Lazy of value Lazy.t (* TODO: See comment in eval.ml about optimization. *)
+| Future of Uniq.t * int
+
+(* TODO: Does this belong in the Env module? *)
+and 'a closure = Closure of value Env.environment * 'a
+
+(*
+let rec to_string m e =
+ let s = to_string in
+ let (c, s) = match e with
+ | App (fn, args) -> (6, (s 7 fn) String.concat " " (List.map (s 7) (f :: args)))
+ | Fn (arity, body) -> (1, "fn " ^ string_of_int arity ^ " => " ^ s 1 body)
+ | FnType (_, doms, cod) ->
+ let sdoms = match doms with
+ | dom :: [] -> s 4 dom
+ | doms -> "(" ^ String.concat ", " (List.map (s 1) doms) ^ ")" in
+ (3, sdoms ^ " -> " ^ s 3 cod)
+ | Data (tag, ts) ->
+ (6, "@" ^ String.concat " " (tag :: List.map (s 7) ts))
+ | DataType ctors ->
+ let string_of_ctor (tag, ts) =
+ "@" ^ String.concat " " (tag :: List.map (s 7) ts) in
+ let ctors = List.map string_of_ctor (IdentMap.to_list ctors) in
+ (8, "#[" ^ String.concat "; " ctors ^ "]")
+ | Nat n -> (8, Z.to_string n)
+ | Var i -> (8, "?" ^ string_of_int i)
+ | Intrin Type -> (8, "`Type")
+ | Intrin NatType -> (8, "`Nat")
+ | Intrin NatOpAdd -> (8, "`+")
+ | Intrin NatOpMul -> (8, "`*") in
+ if c < m then "(" ^ s ^ ")" else s
+*)