commit 796bd77fe16a3575fa41d22b26510dbd470f4acd
parent 3f68337b4bd42addfa6f5f3b1c49a9324f4ab45e
Author: Robert Russell <robertrussell.72001@gmail.com>
Date: Mon, 8 Jan 2024 17:58:29 -0800
Rework some stuff; bring back data and datatype support
Diffstat:
| M | Makefile | | | 1 | + |
| M | abstract.ml | | | 51 | +++++++++++++++++---------------------------------- |
| M | ctx.ml | | | 135 | +++++++++++++++++++++++++++++++++++++++++++------------------------------------ |
| M | desugar.ml | | | 35 | +++++++++++++++++++++-------------- |
| M | elab.ml | | | 609 | ++++++++++++++++++++++++++++++++++++------------------------------------------- |
| M | env.ml | | | 30 | ++++++++++++------------------ |
| M | eval.ml | | | 194 | ++++++++++++++++++++++++++++++++++++------------------------------------------- |
| M | global.ml | | | 16 | +++++++++++++++- |
| M | index.ml | | | 6 | +++++- |
| M | internal.ml | | | 130 | ++++++++++++++++++++++++++++++++++++++++++++++--------------------------------- |
| M | main.ml | | | 27 | +++++++++++---------------- |
| A | modules.ml | | | 72 | ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ |
| M | node.ml | | | 4 | ++-- |
| M | plicity.ml | | | 6 | ++++++ |
| A | pretty.ml | | | 24 | ++++++++++++++++++++++++ |
| D | shadow_stack.ml | | | 31 | ------------------------------- |
| M | tsil.ml | | | 2 | ++ |
| M | uniq.ml | | | 12 | ++++++------ |
| M | value.ml | | | 9 | +++------ |
19 files changed, 716 insertions(+), 678 deletions(-)
diff --git a/Makefile b/Makefile
@@ -1,5 +1,6 @@
SRC = \
tsil.ml \
+ pretty.ml \
name.ml \
tag.ml \
uniq.ml \
diff --git a/abstract.ml b/abstract.ml
@@ -2,53 +2,39 @@ open Global
(* Abstract syntax is "desugared" concrete syntax. See desugar.ml. *)
-(*
-module type Definer = sig
- type definiendum
- type definiens
-
- type t =
- | NonRec of definiendum * definiens
- | Rec of definiendum * definiens
-
- val to_string : t -> string
-end
-
-module type Binder = sig
- type definiendum
- type definiens
- type t =
- | NonRec of definiendum * definiens
- | Rec of definiendum * definiens
-end
-*)
-
type term = term' node
and term' =
| Let of definer array * term
| Match of term * case array
| Annot of term * term
| App of term * term array
-| Fn of param array * term
-| FnType of domain array * term
+| Fn of binder array * term
+| FnType of binder array * term
| Data of Tag.t * term array
-| DataType of domain array Tag.Map.t
+| DataType of ctor Tag.Map.t
| Nat of Z.t
| Var of Name.t * int
| Type (* Impossible to write in concrete syntax. Arises during desugaring. *)
and definer = definer' node
-and definer' =
-| Definer of patt * term
-| RecDefiner of patt * term
+and definer' = {
+ recursive: bool;
+ lhs: patt;
+ rhs: term;
+}
and case = case' node
and case' = Case of patt * term
-and 'a binder = 'a binder' node
-and 'a binder' = Binder of Plicity.t * patt * 'a
-and param = unit binder
-and domain = term binder
+and binder = binder' node
+and binder' = {
+ plicity: Plicity.t;
+ patt: patt;
+ typ: term option;
+}
+
+and ctor = ctor' node
+and ctor' = Ctor of binder array
and patt = patt' node
and patt' =
@@ -56,6 +42,3 @@ and patt' =
| PBind of Name.t
| PAnnot of patt * term
| PData of Tag.t * patt array
-
-let param_annot {data = Binder (_, _, ()); _} = None
-let domain_annot {data = Binder (_, _, (a : term)); _} = Some a
diff --git a/ctx.ml b/ctx.ml
@@ -1,68 +1,75 @@
open Global
-(* TODO: Rename to bindee. *)
-type 'a prebinding = {
- names: Name.t array;
- vtyp: 'a;
-}
+(* TODO: Somewhere we need to forbid multi-binders and multi-definers from
+ * declaring the same name twice. *)
-(* TODO: Rename to binding. *)
-type 'a postbinding = {
- var: Uniq.t;
- mutable vtyp: 'a;
+type 'a assignment = {
+ mutable typ: 'a;
+ mutable value: 'a;
mutable active: bool;
}
-(* We decouple names in the surface language from variables in the core
- * language/semantic domain. This means we can have multiple surface names
- * that refer to the same variable (which happens with AND patterns), or we
- * can have no surface names that refer to a variable.
- *
- * Since we have multi-binders (n-ary lambdas and mutually recursive lets), we
- * use pairs of de Bruijn indices in core syntax. The first index refers to the
- * multi-binder, and the second index refers to the binding in that
- * multi-binder. We have
- * index_to_uniq: Uniq.t array Index.Map.t
- * instead of
- * index_to_uniq: Uniq.t array <some efficient but mutable stack type>
- * because index_to_uniq gets saved in closures, so it must not be mutable. *)
type 'a context = {
- names: 'a postbinding tsil Name.Map.t; (* XXX: tsil! Yuck! *)
- indices: Index.UniqTsil.t;
- values: 'a Uniq.Map.t;
+ assignments: 'a assignment tsil Name.Map.t; (* XXX: tsil! Yuck! *)
+ bindings: Index.UniqTsil.t;
+ definitions: 'a Uniq.Map.t;
}
let empty = {
- names = Name.Map.empty;
- indices = Index.UniqTsil.lin;
- values = Uniq.Map.empty;
+ assignments = Name.Map.empty;
+ bindings = Index.UniqTsil.lin;
+ definitions = Uniq.Map.empty;
}
-let to_env {indices; values; _} = {Env.indices; Env.values}
+let to_env {bindings; definitions; _} = {Env.bindings; Env.definitions}
-let extend ctx vars = {ctx with indices = Index.UniqTsil.snoc ctx.indices vars}
+let assign ctx names typ value =
+ let asn = {typ; value; active = true} in
+ let shadow ctx name =
+ let add_to_tsil = function
+ | None -> Some (Snoc (Lin, asn))
+ | Some t -> Some (Snoc (t, asn)) in
+ let assignments = Name.Map.update name add_to_tsil ctx.assignments in
+ {ctx with assignments} in
+ (Array.fold_left shadow ctx names, asn)
-let extend_fresh ctx n =
- let vars = Uniq.fresh_array n in
- (extend ctx vars, vars)
+let bind ctx vars =
+ {ctx with bindings = Index.UniqTsil.snoc ctx.bindings vars}
-(* TODO: Somewhere we need to forbid multi-binders and multi-definers from
- * declaring the same name twice. *)
+let define ctx var value =
+ {ctx with definitions = Uniq.Map.add var value ctx.definitions}
-let bind_one ctx {names; vtyp} var =
- let postb = {var; vtyp; active = true} in
- let ctx = ref ctx in
- for i = 0 to Array.length names - 1 do
- let shadow = function
- | None -> Some (Snoc (Lin, postb))
- | Some l -> Some (Snoc (l, postb)) in
- ctx := {!ctx with names = Name.Map.update names.(i) shadow !ctx.names}
- done;
- (!ctx, postb)
+let lookup ctx name i =
+ match Name.Map.find_opt name ctx.assignments with
+ | Some t ->
+ let rec go i t = match i, t with
+ | 0, Snoc (_, ({active = true; _} as asn)) -> Some asn
+ | i, Snoc (t, {active = true; _}) -> go (i - 1) t
+ | i, Snoc (t, {active = false; _}) -> go i t
+ | _, Lin -> None in
+ go i t
+ | None -> None
+
+let index ctx ij = Index.UniqTsil.var_of ctx.bindings ij
+
+let quote ctx var = Index.UniqTsil.index_of ctx.bindings var
+
+let eval ctx var = Uniq.Map.find_opt var ctx.definitions
-(* TODO: Rename to extend_fresh_and_bind_blah? That's pretty long. Pick a
- * more reasonable naming scheme. *)
+
+
+
+
+
+(*
+type 'a assignee = {
+ names: Name.t array;
+ typ: 'a;
+}
+*)
+
+(*
let extend_and_bind_one ctx preb =
let (ctx, vars) = extend_fresh ctx 1 in
let (ctx, postb) = bind_one ctx preb vars.(0) in
@@ -70,7 +77,7 @@ let extend_and_bind_one ctx preb =
let extend_and_bind_all ctx prebs =
let n = Array.length prebs in
- let postbs = Array.make n (Obj.magic 0) in
+ let postbs = Array.make n garbage in
let (ctx, vars) = extend_fresh ctx n in
let ctx = ref ctx in
for i = 0 to n - 1 do
@@ -79,17 +86,23 @@ let extend_and_bind_all ctx prebs =
postbs.(i) <- postb
done;
(!ctx, postbs)
+*)
-let define ctx var value =
- {ctx with values = Uniq.Map.add var value ctx.values}
-
-let lookup ctx x i =
- match Name.Map.find_opt x ctx.names with
- | Some l ->
- let rec go i l = match i, l with
- | 0, Snoc (_, ({active = true; _} as preb)) -> Some preb
- | i, Snoc (l, {active = true; _}) -> go (i + 1) l
- | i, Snoc (l, {active = false; _}) -> go i l
- | _, Lin -> None in
- go i l
- | None -> None
+(*
+let extend_fresh ctx n =
+ let vars = Uniq.fresh_array n in
+ (extend ctx vars, vars)
+*)
+
+(*
+let bind_one ctx {names; vtyp} var =
+ let postb = {var; vtyp; active = true} in
+ let ctx = ref ctx in
+ for i = 0 to Array.length names - 1 do
+ let shadow = function
+ | None -> Some (Snoc (Lin, postb))
+ | Some l -> Some (Snoc (l, postb)) in
+ ctx := {!ctx with names = Name.Map.update names.(i) shadow !ctx.names}
+ done;
+ (!ctx, postb)
+*)
diff --git a/desugar.ml b/desugar.ml
@@ -8,13 +8,18 @@ let error loc msg = raise (Error (loc, msg))
let rec term = function
(* Handle parens specially, so that we use the inner loc. *)
| {data = C.Paren t; _} -> term t
-| n -> Node.map term' n
-and term' = function
+| {loc; data} -> loc @$ term' loc data
+and term' loc = function
| C.Let (definers, body) -> A.Let (Array.map definer definers, term body)
(* TODO: Maybe Seq should desugar to a unit pattern instead, so that
* non-unit results can't be ignored. *)
| C.Seq (t, u) ->
- A.Let ([|Loc.None @$ A.Definer (Loc.None @$ A.PWild, term t)|], term u)
+ let definer = Loc.None @$ {
+ A.recursive = false;
+ A.lhs = Loc.None @$ A.PWild;
+ A.rhs = term t;
+ } in
+ A.Let ([|definer|], term u)
| C.Match (scrutinee, cases) -> A.Match (term scrutinee, Array.map case cases)
| C.If (c, t, f) ->
A.Match (term c, [|
@@ -33,21 +38,23 @@ and term' = function
* built-in operations. *)
| C.Add (t, u) -> A.App (Loc.None @$ A.Var (Name.Of "`+", 0), [|term t; term u|])
| C.Mul (t, u) -> A.App (Loc.None @$ A.Var (Name.Of "`*", 0), [|term t; term u|])
-| C.App (f, args) -> A.App (term f, Array.map term args)
+| C.App (fn, args) -> A.App (term fn, Array.map term args)
| C.Fn (params, body) -> A.Fn (Array.map param params, term body)
| C.FnType (doms, cod) -> A.FnType (Array.map domain doms, term cod)
| C.Variant (tag, fields) -> A.Data (tag, Array.map term fields)
| C.VariantType ctors ->
- let insert m {loc; data = C.Ctor (tag, doms)} =
+ let insert m {loc = ctor_loc; data = C.Ctor (tag, doms)} =
if Tag.Map.mem tag m then
- error loc "duplicate constructor"
+ error ctor_loc "duplicate constructor"
else
- Tag.Map.add tag (Array.map domain doms) m in
+ let ctor = ctor_loc @$ A.Ctor (Array.map domain doms) in
+ Tag.Map.add tag ctor m in
let ctors = Array.fold_left insert Tag.Map.empty ctors in
A.DataType ctors
| C.Tuple fields -> A.Data (Tag.Of "", Array.map term fields)
| C.TupleType doms ->
- A.DataType (Tag.Map.singleton (Tag.Of "") (Array.map domain doms))
+ let ctor = loc @$ A.Ctor (Array.map domain doms) in
+ A.DataType (Tag.Map.singleton (Tag.Of "") ctor)
| C.Unit -> A.Data (Tag.Of "", [||])
| C.Nat n -> A.Nat n
| C.Var (x, Some i) -> A.Var (x, i)
@@ -56,20 +63,20 @@ and term' = function
and definer d = Node.map definer' d
and definer' = function
-| C.Definer (p, t) -> A.Definer (patt p, term t)
-| C.RecDefiner (p, t) -> A.RecDefiner (patt p, term t)
+| C.Definer (lhs, rhs) -> {recursive = false; lhs = patt lhs; rhs = term rhs}
+| C.RecDefiner (lhs, rhs) -> {recursive = false; lhs = patt lhs; rhs = term rhs}
and case c = Node.map case' c
and case' (C.Case (p, t)) = A.Case (patt p, term t)
and param par = Node.map param' par
-and param' (C.Param (plicity, p)) = A.Binder (plicity, patt p, ())
+and param' (C.Param (plicity, p)) = {plicity; patt = patt p; typ = None}
and domain dom = Node.map domain' dom
and domain' = function
-| C.DExplicit t -> A.Binder (Plicity.Ex, Loc.None @$ A.PWild, term t)
-| C.DImplicitType p -> A.Binder (Plicity.Im, patt p, Loc.None @$ A.Type)
-| C.DAnnot (plicity, p, t) -> A.Binder (plicity, patt p, term t)
+| C.DExplicit t -> {plicity = Plicity.Ex; patt = Loc.None @$ A.PWild; typ = Some (term t)}
+| C.DImplicitType p -> {plicity = Plicity.Im; patt = patt p; typ = Some (Loc.None @$ A.Type)}
+| C.DAnnot (plicity, p, t) -> {plicity; patt = patt p; typ = Some (term t)}
and patt = function
(* Handle parens specially, so that we use the inner loc. *)
diff --git a/elab.ml b/elab.ml
@@ -17,12 +17,13 @@ and error' =
| ErrUnifyFail of V.value * V.value
| ErrDataPattRefutable
| ErrBinderCycle of Name.t list
-| ErrAppArityMismatch
+| ErrArityMismatch
| ErrUnboundVar of Name.t * int
| ErrExpectedFnType of V.value
| ErrExpectedDataType of V.value
| ErrFnArityMismatch
| ErrFnPlicityMismatch
+| ErrUnexpectedCtor of Tag.t
exception Exn of error
@@ -41,8 +42,8 @@ let rec infer ctx t =
and infer' ctx {loc; data = t} = match t with
| A.Let (definers, body) ->
let (ctx, definers) = infer_definers' ctx definers in
- let (body, va) = infer' ctx body in
- (I.Let (definers, body), va)
+ let (body, vtyp) = infer' ctx body in
+ (I.Let (definers, body), vtyp)
| A.Match _ -> no_impl "infer match"
@@ -53,272 +54,268 @@ and infer' ctx {loc; data = t} = match t with
(t, va)
| A.App (fn, explicit_args) ->
- let explicit_count = Array.length explicit_args in
-
let (fn, va) = infer' ctx fn in
- begin match force va with
- | V.FnType (V.Closure (cloenv, {sorted_binders; codomain})) ->
- let I.SortedBinders (binders, order) = sorted_binders in
- let arity = Array.length binders in
- let implicit_count = ref 0 in
-
- (* Elaborate explicits to Some and implicits to None. *)
- let args = Array.make arity (Obj.magic 0) in
- for i = 0 to arity - 1 do
- (* TODO: Once we allow passing an implicit argument explicitly,
- * this logic will need to change a bit. *)
- args.(i) <- match binders.(i) with
- | I.Binder (Plicity.Ex, _) ->
- let j = i - !implicit_count in
- begin if explicit_count <= j then
- abort (loc @$ ErrAppArityMismatch)
- end;
- Some (explicit_args.(j))
- | I.Binder (Plicity.Im, _) ->
- incr implicit_count;
- None
- done;
-
- (* We had enough explicit args, but do we have too many? *)
- begin if explicit_count > arity - !implicit_count then
- abort (loc @$ ErrAppArityMismatch)
- end;
-
- (* Check arguments. *)
- let args' = Array.make arity (Obj.magic 0) in
- let (cloenv, vars) = Env.extend_fresh cloenv arity in
- let cloenv = ref cloenv in
- for i = 0 to arity - 1 do
- let j = order.(i) in
- let I.Binder (_, typ) = binders.(j) in
- let vtyp = eval !cloenv typ in
- let (arg, varg) = match args.(j) with
- | Some arg ->
- let arg = check' ctx arg vtyp in
- let varg = eval (Ctx.to_env ctx) arg in
- (arg, varg)
- | None -> no_impl "implict args" in
- args'.(j) <- arg;
- cloenv := Env.define !cloenv vars.(j) varg
- done;
-
- let vcodomain = eval !cloenv codomain in
- (I.App (fn, args'), vcodomain)
+ begin match force (Ctx.to_env ctx) va with
+ | V.FnTypeClosure (clo, {binders; codomain}) ->
+ let (args, clo) = check_args' ctx loc explicit_args clo binders in
+ let vcodomain = eval clo codomain in
+ (I.App (fn, args), vcodomain)
| va -> abort (loc @$ ErrApplyNonFunction va)
end
| A.Fn (params, body) ->
- no_impl "ahh"
- (*
- let (ctx', sorted_binders) = infer_binders' ctx loc A.param_annot params in
+ let (ctx', binders) = infer_binders' ctx loc params in
let (body, vcodomain) = infer' ctx' body in
let codomain = quote (Ctx.to_env ctx') vcodomain in
- let fn = I.Fn {sorted_binders; body; codomain} in
- let fntype = V.FnType (V.Closure (Ctx.to_env ctx, {sorted_binders; codomain})) in
- (fn, fntype)
- *)
+ let fn = I.Fn {binders; body; codomain} in
+ let vfntype = V.FnTypeClosure (Ctx.to_env ctx, {binders; codomain}) in
+ (fn, vfntype)
| A.FnType (domains, codomain) ->
- let (ctx, sorted_binders) = infer_binders' ctx loc A.domain_annot domains in
+ let (ctx, binders) = infer_binders' ctx loc domains in
let codomain = check' ctx codomain V.Type in
- (I.FnType {sorted_binders; codomain}, V.Type)
+ (I.FnType {binders; codomain}, V.Type)
| A.Data _ -> abort (loc @$ ErrCantInferData)
-| A.DataType ctors -> no_impl "infer datatype"
- (*
- let rec go ctx = function
- | Node.Of (_, ann) :: anns ->
- let (p, a) = match ann with
- | A.AType a -> (Loc.None @$ A.PWild, a)
- | A.APatt (p, a) -> (p, a) in
- let a = check ctx a (V.Intrin I.Type) in
- let va = eval ctx.values a in
- let x = check_patt ctx p va in
- a :: go (bind_arb ctx x va) anns
- | [] -> [] in
- (I.DataType (IdentMap.map (go ctx) ctors), V.Intrin I.Type)
- *)
+| A.DataType ctors ->
+ let ctors = Tag.Map.map (infer_ctor' ctx) ctors in
+ (I.DataType ctors, V.Type)
| A.Nat n -> (I.Nat n, V.Builtin I.NatType)
-| A.Var (x, i) ->
- begin match Ctx.lookup ctx x i with
- | Some {vtyp = V.Future (id, j); _} -> raise (BlockedOnFuture (id, j, x))
- | Some {var; vtyp; _} -> (quote (Ctx.to_env ctx) (V.Arb var), vtyp)
- | None -> abort (loc @$ ErrUnboundVar (x, i))
+| A.Var (name, i) ->
+ begin match Ctx.lookup ctx name i with
+ | Some {typ = V.Future (id, j); _} -> raise (BlockedOnFuture (id, j, name))
+ | Some {typ; value; _} -> (quote (Ctx.to_env ctx) value, typ)
+ | None -> abort (loc @$ ErrUnboundVar (name, i))
end
| A.Type -> (I.Type, V.Type)
-and check ctx t va =
- try Ok (check' ctx t va) with
+and check ctx t vtyp =
+ try Ok (check' ctx t vtyp) with
| Exn e -> Error e
-and check' ctx ({loc; data = t'} as t) va = match t', force va with
-| A.Let (definers, body), va ->
+and check' ctx ({loc; data = t} as node) vtyp = match t, force (Ctx.to_env ctx) vtyp with
+| A.Let (definers, body), vtyp ->
let (ctx, definers) = infer_definers' ctx definers in
- let body = check' ctx body va in
+ let body = check' ctx body vtyp in
I.Let (definers, body)
| A.Match _, _ -> no_impl "check match"
-| A.Fn (params, body), V.FnType (V.Closure (cloenv, {sorted_binders; codomain})) ->
- let I.SortedBinders (domains, order) = sorted_binders in
+| A.Fn (params, body), V.FnTypeClosure (clo, fntype) ->
+ let {I.binders = {binders = domains; order}; I.codomain} = fntype in
begin if Array.length params <> Array.length domains then
abort (loc @$ ErrFnArityMismatch)
end;
let arity = Array.length params in
- (* TODO: Should the domain(s) of V.FnTypes (and similarly V.DataTypes) be
- * stored already applied to variables? This would avoid the need to extend
- * here (and elsewhere?), but it may require quoting elsewhere. *)
-
- (* Weaken both ctx and cloenv with the same variables. *)
- let vars = Uniq.fresh_array arity in
- let ctx = Ctx.extend ctx vars in
- let cloenv = Env.extend cloenv vars in
+ let vars = Uniq.fresh arity in
+ let ctx = Ctx.bind ctx vars in
+ let clo = Env.bind clo vars in
let ctx = ref ctx in
- let params' = Array.make arity (Obj.magic 0) in
+ let params' = Array.make arity garbage in
for i = 0 to arity - 1 do
let j = order.(i) in
- let A.Binder (param_plicity, patt, ()) = params.(j).data in
- let I.Binder (domain_plicity, domain) = domains.(j) in
+ let param = params.(j).data in
+ let domain = domains.(j) in
- begin if param_plicity <> domain_plicity then
+ begin if Option.is_some param.typ then
+ impossible ();
+ end;
+
+ begin if param.plicity <> domain.plicity then
abort (loc @$ ErrFnPlicityMismatch)
end;
- let plicity = param_plicity in
+ let plicity = param.plicity in
- let vdomain = eval cloenv domain in
- let names = check_irrefutable_patt' !ctx patt vdomain in
- params'.(j) <- I.Binder (plicity, quote (Ctx.to_env !ctx) vdomain);
+ let vtyp = eval clo domain.typ in
+ let typ = quote (Ctx.to_env !ctx) vtyp in
+ let names = check_irrefutable_patt' !ctx param.patt vtyp in
- (* Bind the names in ctx, since the other binders may refer to them. *)
- let (ctx', _) = Ctx.bind_one !ctx {names; vtyp = vdomain} vars.(j) in
+ (* Assign the names in ctx. The other params may refer to them. *)
+ let (ctx', _) = Ctx.assign !ctx names vtyp (V.Arb vars.(j)) in
ctx := ctx';
+ params'.(j) <- {I.plicity; I.typ}
done;
- let vcodomain = eval cloenv codomain in
+
+ let vcodomain = eval clo codomain in
+ let codomain = quote (Ctx.to_env !ctx) vcodomain in
let body = check' !ctx body vcodomain in
- I.Fn {
- sorted_binders = I.SortedBinders (params', order);
- body;
- codomain = quote (Ctx.to_env !ctx) vcodomain;
- }
-
-| A.Fn _, va -> abort (loc @$ ErrExpectedFnType va)
-
-| A.Data (tag, fields), V.DataType (V.Closure (cloenv, ctors)) ->
- no_impl "check data"
- (*
- begin match IdentMap.find_opt tag ctors with
- | Some bs ->
- if List.length args <> List.length bs then
- error loc "arity mismatch"
- else
- (* TODO: this is similar to Fn checking code. *)
- let rec go args env bs = match args, bs with
- | arg :: args, b :: bs ->
- let vb = eval env b in
- let arg = check ctx arg vb in
- let varg = eval ctx.values arg in
- arg :: go args (varg :: env) bs
- | [], [] -> []
- | _, _ -> failwith "impossible" in
- I.Data (tag, go args env bs)
- | None -> error loc ("expected data type with constructor \"" ^ tag ^ "\"")
+
+ I.Fn {binders = {binders = params'; order}; body; codomain}
+
+| A.Fn _, vtyp -> abort (loc @$ ErrExpectedFnType vtyp)
+
+| A.Data (tag, explicit_args), V.DataTypeClosure (clo, ctors) ->
+ begin match Tag.Map.find_opt tag ctors with
+ | Some binders ->
+ let (args, _) = check_args' ctx loc explicit_args clo binders in
+ I.Data (tag, args)
+ | None ->
+ abort (loc @$ ErrUnexpectedCtor tag)
end
- *)
-| A.Data _, va -> abort (loc @$ ErrExpectedDataType va)
+| A.Data _, vtyp -> abort (loc @$ ErrExpectedDataType vtyp)
| (A.Annot _ | A.App _ | A.FnType _ | A.DataType _ | A.Nat _ | A.Var _ | A.Type), vexpected ->
- let (t, vinferred) = infer' ctx t in
+ let (t, vinferred) = infer' ctx node in
if conv (Ctx.to_env ctx) vexpected vinferred then
t
else
abort (loc @$ ErrUnifyFail (vexpected, vinferred))
+and check_args' ctx loc explicit_args clo {binders; order} =
+ let arity = Array.length binders in
+ let explicit_count = Array.length explicit_args in
+ let implicit_count = ref 0 in
+
+ (* Elaborate explicits to Some and implicits to None. *)
+ let args = Array.make arity garbage in
+ for i = 0 to arity - 1 do
+ (* TODO: Once we allow passing an implicit argument explicitly,
+ * this logic will need to change a bit. *)
+ args.(i) <- match binders.(i).plicity with
+ | Plicity.Ex ->
+ let j = i - !implicit_count in
+ begin if explicit_count <= j then
+ abort (loc @$ ErrArityMismatch)
+ end;
+ Some (explicit_args.(j))
+ | Plicity.Im ->
+ incr implicit_count;
+ None
+ done;
+
+ (* We had enough explicit args, but now check if we have too many. *)
+ begin if explicit_count > arity - !implicit_count then
+ abort (loc @$ ErrArityMismatch)
+ end;
+
+ let vars = Uniq.fresh arity in
+ let clo = Env.bind clo vars in
+
+ (* Check arguments. *)
+ let args' = Array.make arity garbage in
+ let clo = ref clo in
+ for i = 0 to arity - 1 do
+ let j = order.(i) in
+ let vtyp = eval !clo binders.(j).typ in
+ let (arg, varg) = match args.(j) with
+ | Some arg ->
+ let arg = check' ctx arg vtyp in
+ let varg = eval (Ctx.to_env ctx) arg in
+ (arg, varg)
+ | None -> no_impl "implict args" in
+ args'.(j) <- arg;
+ clo := Env.define !clo vars.(j) varg
+ done;
+ (args', !clo)
+
(* TODO: We need to check for circularity. Idea for algorithm: Have elaboration
* routines track not only variable usage, but also the least "delay"
* asssociated with each variable usage. E.g., a variable x uses x with delay 0
* (it's used immediately), a function uses the variables used in its body with
* their delay + 1, and a β-redex uses the variables in the lambda with their
* delay - 1. *)
-and infer_definers' ctx = function
-| [||] -> impossible ()
-(* Single non-recursive definer. These are handled specially, because
- * the type annotation on the definiendum is optional. *)
-| [|{loc = definer_loc; data = A.Definer (patt, t)}|] ->
- let (ctx, vars) = Ctx.extend_fresh ctx 1 in
- let (xs, t, va) =
- (* First try to infer from definiendum (and check definiens). *)
- match infer_irrefutable_patt ctx patt with
- | Ok {Ctx.names; Ctx.vtyp} ->
- let t = check' ctx t vtyp in
- (names, t, vtyp)
- | Error {data = ErrNonInferablePatt _; _} ->
- (* Next try to infer from definiens (and check definiendum). *)
- let (t, va) = infer' ctx t in
- let xs = check_irrefutable_patt' ctx patt va in
- (xs, t, va)
- | Error e -> abort e in
- let (ctx, postb) = Ctx.bind_one ctx {names = xs; vtyp = va} vars.(0) in
- let vt = lazy_eval (Ctx.to_env ctx) t in
- let ctx = Ctx.define ctx vars.(0) vt in
- (ctx, [|I.Definer t|])
-(* Single recursive definer or many mutually-recursive definers. In either
- * case, a type annotation on the definiendum is mandatory. Probably, we could
- * use meta-variables/unification to permit omitting annotations in some cases
- * (e.g., like OCaml does with let rec ... and ...), but we generally try to
- * avoid meta-variables/unification, because they're unpredictable; inference
- * should only rely on local information. *)
-| definers ->
+and infer_definers' ctx definers =
let ndefiners = Array.length definers in
- let bindings = Array.map (infer_definiendum' ctx) definers in
- let (ctx, postbs) = Ctx.extend_and_bind_all ctx bindings in
- let definers' = Array.make ndefiners (Obj.magic 0) in
- let ctx' = ref ctx in
- for i = 0 to ndefiners - 1 do
- let {Ctx.vtyp; _} : V.value Ctx.prebinding = bindings.(i) in
- let t = match definers.(i).data with
- | A.Definer (_, t) ->
- (* The definiendum of non-recursive definers is not in scope when
- * checking the definiens, so mark the definiendum as inactive.
- * XXX: Using references like this is efficient, but ugly. *)
- postbs.(i).active <- false;
- let t = check' ctx t vtyp in
- postbs.(i).active <- true;
- definers'.(i) <- I.Definer t;
- t
- | A.RecDefiner (_, t) ->
- let t = check' ctx t vtyp in
- definers'.(i) <- I.Definer t;
- t in
- (* Lazily evaluate each definiens in the old context ctx, and
- * accumulate a new context !ctx' with the definitions. *)
- let vt = lazy_eval (Ctx.to_env ctx) t in
- ctx' := Ctx.define !ctx' postbs.(i).var vt
- done;
- (!ctx', definers')
-and infer_definiendum' ctx {loc; data = A.Definer (p, _) | A.RecDefiner (p, _)} =
- match infer_irrefutable_patt ctx p with
- | Ok binding -> binding
+ (* To simplify context handling, we work in a weakened contexted from the
+ * very beginning, even though the lhs of each definer is not allowed to
+ * refer to these variables. *)
+ let vars = Uniq.fresh ndefiners in
+ let ctx = Ctx.bind ctx vars in
+
+ match definers with
+ | [||] -> impossible ()
+
+ (* Single non-recursive definer. These are handled specially, because
+ * the type annotation on the lhs is optional. *)
+ | [|{loc; data = {recursive = false; lhs; rhs}}|] ->
+ let (names, rhs, vtyp) =
+ (* First try to infer from lhs (and check rhs). *)
+ match infer_irrefutable_patt ctx lhs with
+ | Ok (names, vtyp) ->
+ let rhs = check' ctx rhs vtyp in
+ (names, rhs, vtyp)
+ | Error {data = ErrNonInferablePatt _; _} ->
+ (* Next try to infer from rhs (and check lhs). *)
+ let (rhs, vtyp) = infer' ctx rhs in
+ let names = check_irrefutable_patt' ctx lhs vtyp in
+ (names, rhs, vtyp)
+ | Error e -> abort e in
+ let typ = quote (Ctx.to_env ctx) vtyp in
+ let (ctx, _) = Ctx.assign ctx names vtyp (V.Arb vars.(0)) in
+ let vrhs = lazy_eval (Ctx.to_env ctx) rhs in
+ let ctx = Ctx.define ctx vars.(0) vrhs in
+ (ctx, [|{rhs; typ}|])
+
+ (* Single recursive definer or many mutually-recursive definers. In either
+ * case, a type annotation on the lhs is mandatory. Probably, we could use
+ * meta-variables/unification to permit omitting annotations in some cases
+ * (e.g., like OCaml does with let rec ... and ...), but we generally try
+ * to avoid meta-variables/unification, because they're unpredictable;
+ * inference should only rely on local information. *)
+ | definers ->
+ (* Infer each definer (in the original context, because definitions are
+ * not allowed to have mutually dependent types), and make all
+ * assignments. We later set the active flag to false on assignments
+ * for non-recusive definers, so that the rhs of non-recursive definers
+ * can not access their own assignment. *)
+ let ctx_with_asns = ref ctx in
+ let asns = Array.make ndefiners garbage in
+ for i = 0 to ndefiners - 1 do
+ let (names, vtyp) = infer_lhs' ctx definers.(i) in
+ let (ctx', asn) = Ctx.assign !ctx_with_asns names vtyp (V.Arb vars.(i)) in
+ ctx_with_asns := ctx';
+ asns.(i) <- asn
+ done;
+ let ctx = !ctx_with_asns in
+
+ (* Check each rhs with the appropriate assignments active, but not any
+ * definitions. This avoids any non-terminating mutual recursion during
+ * elaboration. It also means that each definition is "opaque" during
+ * the elaboration of each rhs, i.e., the let-bound variables behave
+ * like fn-bound variables. *)
+ let ctx_with_defs = ref ctx in
+ let definers' = Array.make ndefiners garbage in
+ for i = 0 to ndefiners - 1 do
+ let rhs = definers.(i).data.rhs in
+ let vtyp = asns.(i).typ in
+
+ let rhs =
+ if definers.(i).data.recursive then
+ check' ctx rhs vtyp
+ else begin
+ (* The lhs of non-recursive definers is not in scope when
+ * checking the rhs, so mark the lhs as inactive. *)
+ asns.(i).active <- false;
+ let rhs = check' ctx rhs vtyp in
+ asns.(i).active <- true;
+ rhs
+ end in
+
+ let vrhs = lazy_eval (Ctx.to_env ctx) rhs in
+ ctx_with_defs := Ctx.define !ctx_with_defs vars.(i) vrhs;
+
+ let typ = quote (Ctx.to_env ctx) vtyp in
+ definers'.(i) <- {I.rhs; I.typ}
+ done;
+
+ (!ctx_with_defs, definers')
+
+and infer_lhs' ctx {loc; data = {lhs; _}} =
+ match infer_irrefutable_patt ctx lhs with
+ | Ok (names, vtyp) -> (names, vtyp)
| Error ({data = ErrNonInferablePatt _; _} as root) ->
abort (loc @$ ErrNoLetAnnot root)
| Error e -> abort e
-and (infer_binders' :
- V.value Ctx.context
- -> Loc.t
- -> ('a A.binder -> A.term option)
- -> 'a A.binder array
- -> V.value Ctx.context * I.sorted_binders)
-= fun ctx loc annot binders ->
- let arity = Array.length binders in
-
+and infer_binders' ctx loc binders =
(* We associate two different orders on the variables of a multibinder.
* The *type dependency ordering* is the order where x <T y iff the type of
* y depends on x. The *representation dependency ordering* is the order
@@ -344,27 +341,40 @@ and (infer_binders' :
* low-level applications (e.g., modelling network packets) that there is
* some way to request a particular memory layout.) This flexibility leads
* to some difficulties here in the implementation, because we can't know
- * <T until we try type-checking each binder. To solve the problem, we
- * introduce a new kind of value, namely a Future, which represents the
- * type of a binder that we haven't type-checked yet. Whenever a future is
+ * <T until we try elaborating each binder. To solve the problem, we
+ * introduce a new kind of value, namely a *future*, which is the type of
+ * a binder that we haven't elaborated yet. Whenever a future is
* encountered, we raise the BlockedOnFuture exception, and indictate which
* (multibinder, binder) pair we got stuck on. This exception is caught by
* the associated multibinder, which updates a graph-like data structure
* that tracks dependencies between binders. Every time a binder
- * successfully type-checks, we retry type checking each of its dependents.
- * This means that, after trying to type-check each binder at least once,
+ * successfully elaborates, we retry elaborating each of its dependents.
+ * This means that, after trying to elaborate each binder at least once,
* we're guaranteed to have found a topological sorting of <T if there is
* one, and otherwise there is a cycle, which we report as an error. *)
(* We generate a globally unique id for this multibinder so that we can
- * determine which BlockedOnFuture exceptions belong to us (see below). *)
- let id = Uniq.fresh () in
+ * determine which BlockedOnFuture exceptions belong to us (see below).
+ * This uniq is not a variable. *)
+ let id = (Uniq.fresh 1).(0) in
+
+ let arity = Array.length binders in
- (* Add future bindings to ctx. *)
- let future_preb i {data = A.Binder (_, p, _); _} =
- {Ctx.names = irrefutable_patt_names p; Ctx.vtyp = V.Future (id, i)} in
- let prebs = Array.mapi future_preb binders in
- let (ctx, postbs) = Ctx.extend_and_bind_all ctx prebs in
+ let vars = Uniq.fresh arity in
+ let ctx = Ctx.bind ctx vars in
+
+ (* Add future assignments to ctx. *)
+ let ctx = ref ctx in
+ let asns = Array.make arity garbage in
+ for i = 0 to arity - 1 do
+ let names = irrefutable_patt_names binders.(i).data.patt in
+ let typ = V.Future (id, i) in
+ let value = V.Arb vars.(i) in
+ let (ctx', asn) = Ctx.assign !ctx names typ value in
+ ctx := ctx';
+ asns.(i) <- asn
+ done;
+ let ctx = !ctx in
(* The data structure used to track binder states. *)
let open struct
@@ -375,7 +385,7 @@ and (infer_binders' :
dependency: (int * Name.t) option;
dependents: IntSet.t;
}
- | Typed of int
+ | Elaborated of int
let start_state = Pending {
dependency = None;
@@ -384,15 +394,16 @@ and (infer_binders' :
let find_cycle states start =
let rec search cycle i = match states.(i) with
- | Pending {dependency = Some (j, x); _} ->
+ | Pending {dependency = Some (j, name); _} ->
if i = j then
- x :: cycle
+ name :: cycle
else
- search (x :: cycle) j
+ search (name :: cycle) j
| Pending {dependency = None; _} -> impossible ()
- | Typed _ -> impossible () in
+ | Elaborated _ -> impossible () in
search [] start
end in
+
let states = Array.make arity start_state in
(* Next index in the topological sorting of <T. *)
@@ -402,56 +413,54 @@ and (infer_binders' :
let dependents = match states.(i) with
| Pending {dependency = None; dependents} -> dependents
| Pending {dependency = Some _; _} -> impossible ()
- | Typed _ -> impossible () in
- match infer_binder' ctx annot binders.(i) with
- | va ->
- postbs.(i).vtyp <- va;
- states.(i) <- Typed !next;
+ | Elaborated _ -> impossible () in
+ match infer_binder' ctx binders.(i) with
+ | vtyp ->
+ asns.(i).typ <- vtyp;
+ states.(i) <- Elaborated !next;
incr next;
IntSet.iter remove_dependency dependents
- | exception BlockedOnFuture (id', j, x) when id' = id ->
- states.(i) <- Pending {dependency = Some (j, x); dependents};
+ | exception BlockedOnFuture (id', j, name) when id' = id ->
+ states.(i) <- Pending {dependency = Some (j, name); dependents};
states.(j) <- match states.(j) with
| Pending r -> Pending {r with dependents = IntSet.add i r.dependents}
- | Typed _ -> impossible ()
+ | Elaborated _ -> impossible ()
+
and remove_dependency j =
let dependents = match states.(j) with
| Pending {dependency = Some _; dependents} -> dependents
| Pending {dependency = None; _} -> impossible ()
- | Typed _ -> impossible () in
+ | Elaborated _ -> impossible () in
states.(j) <- Pending {dependency = None; dependents};
try_binder j in
- for i = 0 to arity - 1 do try_binder i done;
+ for i = 0 to arity - 1 do
+ try_binder i
+ done;
- (* Put the binders in the topologically sorted (wrt <T) order that we just
- * found, but also keep track of the original order, because we need it for
- * code generation. Abort if a cycle is discovered. *)
- let binders' = Array.make arity (Obj.magic 0) in
+ let binders' = Array.make arity garbage in
let order = Array.make arity (-1) in
for i = 0 to arity - 1 do
match states.(i) with
- | Typed j ->
- let A.Binder (plicity, _, _) = binders.(i).data in
- let typ = quote (Ctx.to_env ctx) postbs.(i).vtyp in
- binders'.(i) <- I.Binder (plicity, typ);
+ | Elaborated j ->
+ let typ = quote (Ctx.to_env ctx) asns.(i).typ in
+ binders'.(i) <- {I.plicity = binders.(i).data.plicity; I.typ};
order.(j) <- i
| Pending _ ->
abort (loc @$ ErrBinderCycle (find_cycle states i))
done;
- (ctx, I.SortedBinders (binders', order))
-and infer_binder' ctx annot binder = match annot binder with
+ (ctx, {binders = binders'; order})
+
+and infer_binder' ctx {loc; data = {plicity; patt; typ}} = match typ with
| Some a ->
- let {data = A.Binder (_, patt, _); _} = binder in
let a = check' ctx a V.Type in
let va = eval (Ctx.to_env ctx) a in
let _ = check_irrefutable_patt' ctx patt va in
va
| None ->
- let {data = A.Binder (plicity, patt, _); loc} = binder in
match infer_irrefutable_patt ctx patt, plicity with
- | Ok {Ctx.vtyp = va; _}, _ -> va
+ | Ok (_, vtyp), _ -> vtyp
| Error {data = ErrNonInferablePatt _; _}, Plicity.Im ->
begin match check_irrefutable_patt ctx patt V.Type with
| Ok _ -> V.Type
@@ -461,98 +470,40 @@ and infer_binder' ctx annot binder = match annot binder with
abort (loc @$ ErrNoBinderAnnot root)
| Error e, _ -> abort e
-(* TODO: Should this really return a binding? *)
-and infer_irrefutable_patt ctx p =
- try Ok (infer_irrefutable_patt' ctx p) with
+and infer_ctor' ctx {loc; data = A.Ctor binders} =
+ let (_, binders) = infer_binders' ctx loc binders in
+ binders
+
+and infer_irrefutable_patt ctx patt =
+ try Ok (infer_irrefutable_patt' ctx patt) with
| Exn e -> Error e
-and infer_irrefutable_patt' ctx ({loc; data = p} as n) = match p with
-| A.PWild -> abort (loc @$ ErrNonInferablePatt n)
-| A.PBind _ -> abort (loc @$ ErrNonInferablePatt n)
-| A.PAnnot (p, a) ->
- let a = check' ctx a V.Type in
- let vtyp = eval (Ctx.to_env ctx) a in
- let names = check_irrefutable_patt' ctx p vtyp in
- {names; vtyp}
+and infer_irrefutable_patt' ctx ({loc; data = patt} as node) = match patt with
+| A.PWild -> abort (loc @$ ErrNonInferablePatt node)
+| A.PBind _ -> abort (loc @$ ErrNonInferablePatt node)
+| A.PAnnot (patt, typ) ->
+ let typ = check' ctx typ V.Type in
+ let vtyp = eval (Ctx.to_env ctx) typ in
+ let names = check_irrefutable_patt' ctx patt vtyp in
+ (names, vtyp)
| A.PData _ -> abort (loc @$ ErrDataPattRefutable)
-and check_irrefutable_patt ctx p va =
- try Ok (check_irrefutable_patt' ctx p va) with
+and check_irrefutable_patt ctx patt vtyp =
+ try Ok (check_irrefutable_patt' ctx patt vtyp) with
| Exn e -> Error e
-and check_irrefutable_patt' ctx {loc; data = p} va = match p with
+and check_irrefutable_patt' ctx {loc; data = patt} vtyp = match patt with
| A.PWild -> [||]
-| A.PBind x -> [|x|]
-| A.PAnnot (p, b) ->
- let b = check' ctx b V.Type in
- let vb = eval (Ctx.to_env ctx) b in
- if conv (Ctx.to_env ctx) va vb then
- check_irrefutable_patt' ctx p va
+| A.PBind name -> [|name|]
+| A.PAnnot (patt, typ') ->
+ let typ' = check' ctx typ' V.Type in
+ let vtyp' = eval (Ctx.to_env ctx) typ' in
+ if conv (Ctx.to_env ctx) vtyp vtyp' then
+ check_irrefutable_patt' ctx patt vtyp
else
- abort (loc @$ ErrUnifyFail (va, vb))
+ abort (loc @$ ErrUnifyFail (vtyp, vtyp'))
| A.PData _ -> abort (loc @$ ErrDataPattRefutable)
-and irrefutable_patt_names {loc; data = p} = match p with
+and irrefutable_patt_names {loc; data = patt} = match patt with
| A.PWild -> [||]
-| A.PBind x -> [|x|]
-| A.PAnnot (p, _) -> irrefutable_patt_names p
+| A.PBind name -> [|name|]
+| A.PAnnot (patt, _) -> irrefutable_patt_names patt
| A.PData _ -> abort (loc @$ ErrDataPattRefutable)
-
-
-
-
-
-
-
-
-
-
-
-
-
-(* Currently, we can only infer types from annotation patterns, but in the
- * future we will be able to infer type T from p & q if we can infer it from
- * p (resp., q) and check q (resp., p) against T. *)
-(* TODO: Consistency issue: infer_patt returns option, whereas infer raises an
- * exception. *)
- (*
-and infer_patt ctx (Node.Of (_, p)) = match p with
-| A.PWild -> None
-| A.PBind _ -> None
-| A.PAnnot (p, a) ->
- let a = check ctx a V.Type in
- let va = eval (Ctx.to_env ctx) a in
- let x = check_patt ctx p va in
- Some (x, va)
-| A.PData _ -> None
-
-and check_patt ctx (Node.Of (loc, p)) va = match p, va with
-| A.PWild, _ -> None (*[]*)
-| A.PBind x, va -> Some x (*[(x, va)]*)
-| A.PAnnot (p, b), va ->
- let b = check ctx b (V.Intrin I.Type) in
- let vb = eval ctx.values b in
- if conv ctx.len va vb then
- check_patt ctx p va
- else
- (* TODO: pretty-print types *)
- error loc "pattern does not unify with inferred type"
-| A.PData _, _ -> failwith "no impl"
-| A.PData (tag, ps), V.DataType (env, ctors) ->
- (* TODO: this is similar to checking code for Data. *)
- begin match IdentMap.find_opt tag ctors with
- | Some bs ->
- if List.length ps <> List.length bs then
- error loc "arity mismatch"
- else
- let rec go ctx ps env bs = match ps, bs with
- | p :: ps, b :: bs ->
- let vb = eval env b in
- let binding = check_patt ctx p vb in
- let v = eval ctx.values arg in
- arg :: go args (varg :: env) bs
- | [], [] -> []
- | _, _ -> failwith "impossible" in
- I.Data (tag, go args env bs)
- | None -> error loc ("expected data type with constructor \"" ^ tag ^ "\"")
- end
-| A.PData _, _ -> error loc "expected data type"
-*)
diff --git a/env.ml b/env.ml
@@ -1,32 +1,26 @@
open Global
-(* TODO: Smarter use modules to avoid redundancy between Env and Ctx. *)
+(* TODO: Smarter use modules to avoid redundancy between Env and Ctx. Or just
+ * store an env on each ctx instead of duplicating fields. *)
type 'a environment = {
- indices: Index.UniqTsil.t; (* TODO: This is a terrible name. *)
- values: 'a Uniq.Map.t;
+ bindings: Index.UniqTsil.t;
+ definitions: 'a Uniq.Map.t;
}
let empty = {
- indices = Index.UniqTsil.lin;
- values = Uniq.Map.empty;
+ bindings = Index.UniqTsil.lin;
+ definitions = Uniq.Map.empty;
}
-let extend env vars = {env with indices = Index.UniqTsil.snoc env.indices vars}
-
-let extend_fresh env n =
- let vars = Uniq.fresh_array n in
- (extend env vars, vars)
+let bind env vars =
+ {env with bindings = Index.UniqTsil.snoc env.bindings vars}
let define env var value =
- {env with values = Uniq.Map.add var value env.values}
+ {env with definitions = Uniq.Map.add var value env.definitions}
-let get env var = Uniq.Map.find_opt var env.values
+let index env ij = Index.UniqTsil.var_of env.bindings ij
-let var_of env ij = Index.UniqTsil.var_of env.indices ij
+let quote env var = Index.UniqTsil.index_of env.bindings var
-(* TODO temp *)
-let index_of env var =
- match Index.UniqTsil.index_of env.indices var with
- | Some ij -> Some ij
- | None -> failwith "uh oh"
+let eval env var = Uniq.Map.find_opt var env.definitions
diff --git a/eval.ml b/eval.ml
@@ -4,31 +4,31 @@ module V = Value
(* TODO: "Glued" evaluation? I.e., evaluation that only lazily unfolds in some
* cases? *)
+(* TODO: Lazily evaluate args? *)
let rec eval env = function
| I.Let (definers, body) ->
let ndefiners = Array.length definers in
- let (env, vars) = Env.extend_fresh env ndefiners in
+ let vars = Uniq.fresh ndefiners in
+ let env = Env.bind env vars in
let env' = ref env in
for i = 0 to ndefiners - 1 do
- let Definer definiens = definers.(i) in
(* In typical programs, most definitions will not appear inside a type
* and hence won't be tested for conversion. Thus, it's important that
- * we only lazily evaluate each definiens. *)
- let vdefiniens = lazy_eval env definiens in
- env' := Env.define !env' vars.(i) vdefiniens
+ * we only lazily evaluate each rhs. *)
+ let vrhs = lazy_eval env definers.(i).rhs in
+ env' := Env.define !env' vars.(i) vrhs
done;
eval !env' body
-(* TODO: Lazily evaluate args? *)
-| I.App (fn, args) -> apply env (eval env fn) (Array.map (eval env) args)
-| I.Fn fn -> V.Fn (V.Closure (env, fn))
-| I.FnType fntype -> V.FnType (V.Closure (env, fntype))
-| I.Data _ -> no_impl "eval data"
-| I.DataType _ -> no_impl "eval datatype"
+| I.App (fn, args) -> apply (eval env fn) (Array.map (eval env) args)
+| I.Fn fn -> V.FnClosure (env, fn)
+| I.FnType fntype -> V.FnTypeClosure (env, fntype)
+| I.Data (tag, args) -> V.Data (tag, Array.map (eval env) args)
+| I.DataType datatype -> V.DataTypeClosure (env, datatype)
| I.Nat n -> V.Nat n
| I.Var ij ->
- let var = Env.var_of env ij in
- begin match Env.get env var with
+ let var = Env.index env ij in
+ begin match Env.eval env var with
| Some v -> v
| None -> Arb var
end
@@ -41,121 +41,105 @@ let rec eval env = function
* is sensitive to term structure) and call eval. *)
and lazy_eval env t = V.Lazy (lazy (eval env t))
-and apply env fn args = match fn, args with
-| V.Lazy fn, args -> V.Lazy (lazy (apply env (Lazy.force fn) args))
-| V.Fn (V.Closure (cloenv, {sorted_binders; body; _})), args ->
- let I.SortedBinders (binders, _) = sorted_binders in
+and apply fn args = match fn, args with
+| V.Lazy fn, args -> V.Lazy (lazy (apply (Lazy.force fn) args))
+| V.FnClosure (clo, {binders = {binders; _}; body; _}), args ->
let arity = Array.length binders in
- let (cloenv, vars) = Env.extend_fresh cloenv arity in
- let cloenv = fold_left2 Env.define cloenv vars args in
- eval cloenv body
+ let vars = Uniq.fresh arity in
+ let clo = Env.bind clo vars in
+ let clo = fold_left2 Env.define clo vars args in
+ eval clo body
| V.Builtin I.NatOpAdd, [|V.Nat m; V.Nat n|] -> V.Nat (Z.add m n)
| V.Builtin I.NatOpMul, [|V.Nat m; V.Nat n|] -> V.Nat (Z.mul m n)
| fn, args -> V.App (fn, args)
-(* TODO: Quote of eval normalizes terms; in many cases, this does unnecessary
- * work. *)
+(* TODO: This is inefficient. We should do something smarter, like lifting
+ * stuck variables to the outside of neutral terms so we can force them without
+ * needing to recurse. *)
+and force env = function
+| V.App (fn, args) -> apply (force env fn) (Array.map (force env) args)
+| V.Arb var ->
+ begin match Env.eval env var with
+ | Some v -> force env v
+ | None -> Arb var
+ end
+| V.Lazy l -> force env (Lazy.force l)
+| v -> v
+
let rec quote env = function
| V.App (fn, args) -> I.App (quote env fn, Array.map (quote env) args)
-| V.Fn (V.Closure (cloenv, {sorted_binders; body; codomain})) ->
- (* TODO: This is definitely duplicated somewhere. *)
- let I.SortedBinders (binders, order) = sorted_binders in
- let arity = Array.length binders in
- let (cloenv, vars) = Env.extend_fresh cloenv arity in
- let env = Env.extend env vars in
- let binders' = Array.make arity (Obj.magic 0) in
- for i = 0 to arity - 1 do
- let j = order.(i) in
- let I.Binder (plicity, ty) = binders.(j) in
- let ty = quote env (eval cloenv ty) in
- binders'.(j) <- I.Binder (plicity, ty)
- done;
- let sorted_binders = I.SortedBinders (binders', order) in
- let body = quote env (eval cloenv body) in
- let codomain = quote env (eval cloenv codomain) in
- I.Fn {sorted_binders; body; codomain}
-| V.FnType (V.Closure (cloenv, {sorted_binders; codomain})) ->
- (* TODO: This is definitely duplicated somewhere. *)
- let I.SortedBinders (binders, order) = sorted_binders in
- let arity = Array.length binders in
- let (cloenv, vars) = Env.extend_fresh cloenv arity in
- let env = Env.extend env vars in
- let binders' = Array.make arity (Obj.magic 0) in
- for i = 0 to arity - 1 do
- let j = order.(i) in
- let I.Binder (plicity, ty) = binders.(j) in
- let ty = quote env (eval cloenv ty) in
- binders'.(j) <- I.Binder (plicity, ty)
- done;
- let sorted_binders = I.SortedBinders (binders', order) in
- let codomain = quote env (eval cloenv codomain) in
- I.FnType {sorted_binders; codomain}
-| V.Data _ -> no_impl "quote data"
- (*
- I.Data (tag, List.map (quote l) args)
- *)
-| V.DataType _ -> no_impl "quote datatype"
- (*
- let rec go l env = function
- | b :: bs ->
- let b = quote l (eval env b) in
- b :: go (l + 1) (V.Arb l :: env) bs
- | [] -> [] in
- I.DataType (IdentMap.map (go l env) ctors)
- *)
+| V.FnClosure (clo, {binders; body; codomain}) ->
+ let (env, clo, binders) = quote_binders env clo binders in
+ let body = quote env (eval clo body) in
+ let codomain = quote env (eval clo codomain) in
+ I.Fn {binders; body; codomain}
+| V.FnTypeClosure (clo, {binders; codomain}) ->
+ let (env, clo, binders) = quote_binders env clo binders in
+ let codomain = quote env (eval clo codomain) in
+ I.FnType {binders; codomain}
+| V.Data (tag, args) -> I.Data (tag, Array.map (quote env) args)
+| V.DataTypeClosure (clo, ctors) ->
+ let quote_ctor binders =
+ let (_, _, binders) = quote_binders env clo binders in
+ binders in
+ I.DataType (Tag.Map.map quote_ctor ctors)
| V.Nat n -> I.Nat n
| V.Arb var ->
- begin match Env.index_of env var with
+ begin match Env.quote env var with
| Some ij -> I.Var ij
| None -> impossible () (* XXX: Is it? *)
end
| V.Type -> I.Type
| V.Builtin b -> I.Builtin b
| V.Lazy l -> quote env (Lazy.force l)
-| V.Future _ -> impossible () (* XXX: Is it? *)
+| V.Future _ -> impossible ()
-let force = function
-| V.Lazy l -> Lazy.force l
-| v -> v
+and quote_binders env clo {binders; order} =
+ let arity = Array.length binders in
+ let vars = Uniq.fresh arity in
+ let env = Env.bind env vars in
+ let clo = Env.bind clo vars in
+ let binders' = Array.make arity garbage in
+ for i = 0 to arity - 1 do
+ let j = order.(i) in
+ let typ = quote env (eval clo binders.(j).typ) in
+ binders'.(j) <- {binders.(j) with typ}
+ done;
+ (env, clo, {binders = binders'; order})
-let rec conv env v0 v1 = match force v0, force v1 with
+let rec conv env v0 v1 = match force env v0, force env v1 with
(* TODO: η-equality? *)
(* TODO: Switch *)
| V.App (fn0, args0), V.App (fn1, args1) ->
(* Note that aargs and bargs aren't necessarily the same length, but they
* are if fn0 and fn1 are convertible, so the short-circuiting of (&&)
- * ensures that all2 can't fail. *)
- conv env fn0 fn1 && all2 (conv env) args0 args1
-| V.Fn (V.Closure (cloenv0, fn0)), V.Fn (V.Closure (cloenv1, fn1)) ->
- let I.SortedBinders (binders, _) = fn0.sorted_binders in
- let arity = Array.length binders in
- let {I.body = body0; _}, {I.body = body1; _} = fn0, fn1 in
- let vars = Uniq.fresh_array arity in
- let env = Env.extend env vars in
- let cloenv0, cloenv1 = Env.extend cloenv0 vars, Env.extend cloenv1 vars in
- let vbody0, vbody1 = eval cloenv0 body0, eval cloenv1 body1 in
+ * ensures that for_all2 can't fail. *)
+ conv env fn0 fn1 && Array.for_all2 (conv env) args0 args1
+| V.FnClosure (clo0, fn0), V.FnClosure (clo1, fn1) ->
+ let arity = Array.length fn0.binders.binders in
+ let vars = Uniq.fresh arity in
+ let env = Env.bind env vars in
+ let clo0, clo1 = Env.bind clo0 vars, Env.bind clo1 vars in
+ let vbody0, vbody1 = eval clo0 fn0.body, eval clo1 fn1.body in
conv env vbody0 vbody1
-| V.FnType (V.Closure (cloenv0, fntype0)), V.FnType (V.Closure (cloenv1, fntype1)) ->
- let {I.sorted_binders = sorted_binders0; I.codomain = codomain0} = fntype0 in
- let {I.sorted_binders = sorted_binders1; I.codomain = codomain1} = fntype1 in
- let I.SortedBinders (binders0, order0) = sorted_binders0 in
- let I.SortedBinders (binders1, order1) = sorted_binders1 in
- if Array.length binders0 <> Array.length binders1 then false else
- let arity = Array.length binders0 in
- let vars = Uniq.fresh_array arity in
- let env = Env.extend env vars in
- let cloenv0, cloenv1 = Env.extend cloenv0 vars, Env.extend cloenv1 vars in
- let domain_conv i0 i1 =
- if i0 <> i1 then false else
- let I.Binder (_, ty0) = binders0.(i0) in
- let I.Binder (_, ty1) = binders1.(i1) in
- let vty0 = eval cloenv0 ty0 in
- let vty1 = eval cloenv1 ty1 in
- conv env vty0 vty1 in
- if not (all2 domain_conv order0 order1) then false else
- let vcodomain0 = eval cloenv0 codomain0 in
- let vcodomain1 = eval cloenv1 codomain1 in
- conv env vcodomain0 vcodomain1
+| V.FnTypeClosure (clo0, fntype0), V.FnTypeClosure (clo1, fntype1) ->
+ let binders0 = fntype0.binders in
+ let binders1 = fntype1.binders in
+ let arity0 = Array.length binders0.binders in
+ let arity1 = Array.length binders1.binders in
+ arity0 = arity1 &&
+ let vars = Uniq.fresh arity0 in
+ let env = Env.bind env vars in
+ let clo0, clo1 = Env.bind clo0 vars, Env.bind clo1 vars in
+ let domain_conv i0 i1 =
+ i0 = i1 &&
+ let vtyp0 = eval clo0 binders0.binders.(i0).typ in
+ let vtyp1 = eval clo1 binders1.binders.(i1).typ in
+ conv env vtyp0 vtyp1 in
+ Array.for_all2 domain_conv binders0.order binders1.order &&
+ let vcodomain0 = eval clo0 fntype0.codomain in
+ let vcodomain1 = eval clo1 fntype1.codomain in
+ conv env vcodomain0 vcodomain1
| V.Nat n, V.Nat m -> Z.equal n m
| V.Arb var0, V.Arb var1 -> var0 = var1
| V.Type, V.Type -> true
diff --git a/global.ml b/global.ml
@@ -4,6 +4,9 @@ include Node.Global
let impossible () = failwith "impossible"
let no_impl feature = failwith ("no impl: " ^ feature)
+(* For creating uninitialized arrays *)
+let garbage = Obj.magic 0
+
(* For some reason this isn't in the standard Array module... *)
let rec fold_left2 f acc xs ys =
begin if Array.length xs <> Array.length ys then
@@ -15,7 +18,7 @@ let rec fold_left2 f acc xs ys =
done;
!acc
-(* Short-circuiting AND fold_left2. *)
+(* Short-circuiting pairwise AND. *)
let rec all2 f xs ys =
begin if Array.length xs <> Array.length ys then
invalid_arg "all2"
@@ -23,3 +26,14 @@ let rec all2 f xs ys =
let len = Array.length xs in
let rec go i = (i = len) || (f xs.(i) ys.(i) && go (i + 1)) in
go 0
+
+let concat_string_array sep ss =
+ let lensum = Array.fold_left (fun len s -> len + String.length s) 0 ss in
+ let bytes = Bytes.create lensum in
+ let pos = ref 0 in
+ for i = 0 to Array.length ss - 1 do
+ let len = String.length ss.(i) in
+ Bytes.blit_string ss.(i) 0 bytes !pos len;
+ pos := !pos + len
+ done;
+ Bytes.to_string bytes
diff --git a/index.ml b/index.ml
@@ -1,4 +1,8 @@
-(* Pairs of de Bruijn indices. *)
+(* Pairs of indices, which we usualy denote ij or (i, j). The first index i is
+ * a de Bruijn index in the usual sense; it refers to a binder, with the
+ * innermost binder being 0. All our binders are multibinders, i.e., they bind
+ * multiple variables simultaneously, and the second index j refers to the
+ * "subbinder" within the multibinder. *)
type index = Of of int * int
type t = index
diff --git a/internal.ml b/internal.ml
@@ -1,4 +1,5 @@
open Global
+module P = Pretty
type builtin =
| NatType
@@ -18,20 +19,29 @@ type term =
| Type
| Builtin of builtin
-and definer = Definer of term
+and definer = {
+ rhs: term;
+ typ: term;
+}
-and binder = Binder of Plicity.t * term
+and binder = {
+ plicity: Plicity.t;
+ typ: term;
+}
-and sorted_binders = SortedBinders of binder array * int array
+and sorted_binders = {
+ binders: binder array;
+ order: int array;
+}
and fn = {
- sorted_binders: sorted_binders;
+ binders: sorted_binders;
body: term;
codomain: term;
}
and fntype = {
- sorted_binders: sorted_binders;
+ binders: sorted_binders;
codomain: term;
}
@@ -46,54 +56,66 @@ and patt_discriminant =
| PattDiscData of ident * int
*)
-let rec to_string m t =
- let s = to_string in
- let (c, s) = match t with
- | Let (definers, body) ->
- (0,
- String.concat ", " (Array.to_list (Array.map (definer_to_string 1) definers))
- ^ "; "
- ^ s 0 body
- )
- | App (fn, args) -> (6, String.concat " " (List.map (s 7) (fn :: Array.to_list args)))
- | Fn {sorted_binders = SortedBinders (binders, _); body; codomain} ->
- (1,
- "fn "
- ^ String.concat ", " (Array.to_list (Array.map (binder_to_string 2) binders))
- ^ " : "
- ^ s 2 codomain
- ^ " => "
- ^ s 1 body
- )
- | FnType {sorted_binders = SortedBinders (binders, _); codomain} ->
- (3,
- "["
- ^ String.concat ", " (Array.to_list (Array.map (binder_to_string 3) binders))
- ^ "]"
- ^ " -> "
- ^ s 3 codomain
- )
- | Data _ -> no_impl "print data"
- (*
- (6, "@" ^ String.concat " " (tag :: List.map (s 7) ts))
- *)
- | DataType _ -> no_impl "print datatype"
- (*
- let string_of_ctor (tag, ts) =
- "@" ^ String.concat " " (tag :: List.map (s 7) ts) in
- let ctors = List.map string_of_ctor (IdentMap.to_list ctors) in
- (8, "#[" ^ String.concat "; " ctors ^ "]")
- *)
- | Nat n -> (7, Z.to_string n)
- | Var ij -> (7, "#" ^ Index.to_string ij)
- | Type -> (7, "`Type")
- | Builtin NatType -> (7, "`Nat")
- | Builtin NatOpAdd -> (7, "`+")
- | Builtin NatOpMul -> (7, "`*") in
- if c < m then "(" ^ s ^ ")" else s
+let rec write_term buf = function
+| Let (definers, body) ->
+ P.parens buf 0 @@ fun () ->
+ P.at 1 P.fold_left buf ", " write_definer definers;
+ P.string buf "; ";
+ P.at 0 write_term buf body
+| App (fn, args) ->
+ P.parens buf 6 @@ fun () ->
+ P.at 7 write_term buf fn;
+ P.string buf " ";
+ P.at 7 P.fold_left buf " " write_term args
+| Fn {binders = {binders; _}; body; codomain} ->
+ P.parens buf 1 @@ fun () ->
+ P.string buf "fn ";
+ P.at 2 P.fold_left buf ", " write_binder binders;
+ P.string buf " : ";
+ P.at 2 write_term buf codomain;
+ P.string buf " => ";
+ P.at 1 write_term buf body
+| FnType {binders = {binders; _}; codomain} ->
+ P.parens buf 3 @@ fun () ->
+ P.string buf "[";
+ P.at 3 P.fold_left buf ", " write_binder binders;
+ P.string buf "] -> ";
+ P.at 3 write_term buf codomain
+| Data _ -> no_impl "write data"
+| DataType _ -> no_impl "write datatype"
+| Nat n ->
+ P.parens buf 7 @@ fun () ->
+ P.string buf (Z.to_string n)
+| Var ij ->
+ P.parens buf 7 @@ fun () ->
+ P.string buf "`";
+ P.string buf (Index.to_string ij) (* TODO: writer for indices *)
+| Type ->
+ P.parens buf 7 @@ fun () ->
+ P.string buf "`Type"
+| Builtin NatType ->
+ P.parens buf 7 @@ fun () ->
+ P.string buf "`Nat"
+| Builtin NatOpAdd ->
+ P.parens buf 7 @@ fun () ->
+ P.string buf "`+"
+| Builtin NatOpMul ->
+ P.parens buf 7 @@ fun () ->
+ P.string buf "`*"
+
+and write_definer buf {rhs; typ} =
+ P.at 3 write_term buf rhs;
+ P.string buf " : ";
+ P.at 2 write_term buf typ
-and definer_to_string m (Definer t) = to_string m t
+and write_binder buf = function
+| {plicity = Plicity.Im; typ} ->
+ P.string buf "?";
+ write_term buf typ
+| {plicity = Plicity.Ex; typ} ->
+ write_term buf typ
-and binder_to_string m = function
-| Binder (Plicity.Ex, t) -> to_string m t
-| Binder (Plicity.Im, t) -> "?" ^ to_string m t
+let to_string t =
+ let buf = P.make 128 in
+ write_term buf t;
+ P.to_string buf
diff --git a/main.ml b/main.ml
@@ -23,21 +23,16 @@ let abstract = Desugar.term concrete
* need a a psuedo-binder to create the top-level/default context. *)
let ctx =
- let nat_binder = I.Binder (Plicity.Ex, I.Builtin I.NatType) in
- let nat_op_type = V.FnType (V.Closure (Env.empty, {
- sorted_binders = I.SortedBinders ([|nat_binder; nat_binder|], [|0; 1|]);
+ let nat_binder = {I.plicity = Plicity.Ex; I.typ = I.Builtin I.NatType} in
+ let nat_op_typ = V.FnTypeClosure (Env.empty, {
+ binders = {binders = [|nat_binder; nat_binder|]; order = [|0; 1|]};
codomain = I.Builtin I.NatType;
- })) in
- let (ctx, postbs) = Ctx.extend_and_bind_all Ctx.empty [|
- {names = [|Name.Of "Type"|]; vtyp = V.Type};
- {names = [|Name.Of "Nat"|]; vtyp = V.Type};
- {names = [|Name.Of "`+"|]; vtyp = nat_op_type};
- {names = [|Name.Of "`*"|]; vtyp = nat_op_type};
- |] in
- let ctx = Ctx.define ctx postbs.(0).var V.Type in
- let ctx = Ctx.define ctx postbs.(1).var (V.Builtin I.NatType) in
- let ctx = Ctx.define ctx postbs.(2).var (V.Builtin I.NatOpAdd) in
- let ctx = Ctx.define ctx postbs.(3).var (V.Builtin I.NatOpMul) in
+ }) in
+ let ctx = Ctx.empty in
+ let (ctx, _) = Ctx.assign ctx [|Name.Of "Type"|] V.Type V.Type in
+ let (ctx, _) = Ctx.assign ctx [|Name.Of "Nat"|] V.Type (V.Builtin I.NatType) in
+ let (ctx, _) = Ctx.assign ctx [|Name.Of "`+"|] nat_op_typ (V.Builtin I.NatOpAdd) in
+ let (ctx, _) = Ctx.assign ctx [|Name.Of "`*"|] nat_op_typ (V.Builtin I.NatOpMul) in
ctx
(*
@@ -51,5 +46,5 @@ let (internal, ty) = Elab.infer' ctx abstract
let env = Ctx.to_env ctx
let internal' = Eval.quote env (Eval.eval env internal)
-let () = print_endline (Internal.to_string 0 internal)
-let () = print_endline (Internal.to_string 0 internal')
+let () = print_endline (I.to_string internal)
+let () = print_endline (I.to_string internal')
diff --git a/modules.ml b/modules.ml
@@ -0,0 +1,72 @@
+module type SURFACE = sig
+ type term
+end
+
+module type CORE = sig
+ type term
+end
+
+module type DOMAIN = sig
+ type value
+end
+
+module type ENVIRONMENT = sig
+end
+
+module type CONTEXT = sig
+ type ctx
+end
+
+module type ELABORATOR = sig
+ module Ctx: CONTEXT
+ module Surf: SURFACE
+ module Core: CORE
+
+ val infer: Ctx.ctx -> Surf.term -> Core.term
+ val
+end
+
+module type BINDER = sig
+ type term
+
+ type t
+
+ val plicity: t -> Plicity.t
+ val names: t -> Name.t array
+ val annot: t -> term option
+ val to_string: t -> string
+end
+
+module type MULTI_BINDER = sig
+ module B: BINDER
+
+ type t
+
+ val reorder: bool
+end
+
+module type PATTERN = sig
+ type t
+
+ val names: t -> Name.t array
+end
+
+module type DEFINER = sig
+ module P: PATTERN
+ type term
+
+ type t
+
+ val recursive: t -> bool
+ val definiendum: t -> P.t
+ val annotation: t -> term option
+ val to_string: t -> string
+end
+
+module type MULTI_DEFINER = sig
+ module D: DEFINER
+
+ type t
+
+ val mutual: bool
+end
diff --git a/node.ml b/node.ml
@@ -1,10 +1,10 @@
-(* TODO: I think I've been inconsistent about the order of loc and data when
- * pattern matching on nodes... Check this. *)
+(* Nodes are simply data together with a location. *)
type 'a t = {loc: Loc.t; data: 'a}
let (@$) loc data = {loc; data}
+(* TODO: Remove uses of this in favor of pattern matching. *)
let map f {loc; data} = {loc; data = f data}
module Global = struct
diff --git a/plicity.ml b/plicity.ml
@@ -1,2 +1,8 @@
+(* Implicit or explicit. *)
+
type plicity = Im | Ex
type t = plicity
+
+let to_string = function
+| Im -> "Im"
+| Ex -> "Ex"
diff --git a/pretty.ml b/pretty.ml
@@ -0,0 +1,24 @@
+(* Pretty printing. *)
+
+let make n = (Buffer.create n, 0)
+
+let to_string (b, _) = Buffer.contents b
+
+let at c f (b, _) = f (b, c)
+
+let parens (b, m) c k =
+ begin if c < m then Buffer.add_char b '(' end;
+ k ();
+ begin if c < m then Buffer.add_char b ')' end
+
+let string (b, _) s =
+ Buffer.add_string b s
+
+let fold_left (b, m) sep f = function
+| [||] -> ()
+| ss ->
+ f (b, m) ss.(0);
+ for i = 1 to Array.length ss - 1 do
+ Buffer.add_string b sep;
+ f (b, m) ss.(i)
+ done
diff --git a/shadow_stack.ml b/shadow_stack.ml
@@ -1,31 +0,0 @@
-type 'a t = {len: int; mutable arr: 'a array}
-
-let length s = s.len
-
-let capacity s = Array.length s.arr
-
-let make n = {len = 0; arr = Array.make n (Obj.magic 0)}
-
-let singleton v = {len = 1; arr = [|v|]}
-
-let get s i =
- if i < 0 || i >= s.len then
- invalid_arg "Shadow_stack.get"
- else
- s.arr.(len - i - 1)
-
-let reserve s n =
- let cap = capacity s in
- let rem = cap - s.len in
- if n > rem then begin
- let need = n - rem in
- let cap = cap + max cap need in (* Ensure cap at least doubles. *)
- let arr' = Array.make cap (Obj.magic 0) in
- Array.blit s.arr 0 arr' 0 s.len;
- s.arr <- arr'
- end
-
-let push s v =
- reserve s 1;
- s.arr.(len) <- v;
- Ss (s.len + 1, s.arr)
diff --git a/tsil.ml b/tsil.ml
@@ -8,6 +8,8 @@
* if a tsil represents a context and a list represents a reversed context,
* then it is impossible to accidentally reverse a context, because that would
* amount to an OCaml type error. *)
+(* TODO: This comment overstates how much we use tsils. We've mostly switched
+ * to arrays. *)
type 'a t = Lin | Snoc of 'a t * 'a
diff --git a/uniq.ml b/uniq.ml
@@ -4,14 +4,14 @@ type t = uniq
exception Exhaustion
let fresh =
- let i = ref (-1) in
- fun () ->
- if !i = max_int then
+ let k = ref (-1) in
+ fun n ->
+ if !k > max_int - n then
raise Exhaustion (* This should never happen in practice. *)
else
- (incr i; Uniq !i)
-
-let fresh_array n = Array.init n (fun _ -> fresh ())
+ let arr = Array.init n (fun i -> Uniq (!k + i)) in
+ k := !k + n;
+ arr
module Map = Map.Make(struct
type t = uniq
diff --git a/value.ml b/value.ml
@@ -4,10 +4,10 @@ module I = Internal
type value =
(*| Switch of ...*)
| App of value * value array
-| Fn of I.fn closure
-| FnType of I.fntype closure
+| FnClosure of value Env.environment * I.fn
+| FnTypeClosure of value Env.environment * I.fntype
| Data of Tag.t * value array
-| DataType of I.datatype closure
+| DataTypeClosure of value Env.environment * I.datatype
| Nat of Z.t
| Arb of Uniq.t
(*| Meta of Uniq.t*)
@@ -16,9 +16,6 @@ type value =
| Lazy of value Lazy.t (* TODO: See comment in eval.ml about optimization. *)
| Future of Uniq.t * int
-(* TODO: Does this belong in the Env module? *)
-and 'a closure = Closure of value Env.environment * 'a
-
(*
let rec to_string m e =
let s = to_string in