commit c5310a3a45f9e009878f3244b461931fdd547542
parent b6e9ede1270d59ca6bde8e6e1cd1885550586527
Author: Robert Russell <robertrussell.72001@gmail.com>
Date: Tue, 16 Jan 2024 21:19:31 -0800
Get some stuff in history before deleting it
Diffstat:
| M | Makefile | | | 17 | ++++++++++------- |
| A | TODO.txt | | | 0 | |
| M | abstract.ml | | | 39 | ++++++++++++++++++++++++++++----------- |
| M | concrete.ml | | | 42 | +++++++++++++++++++++++++++++------------- |
| M | ctx.ml | | | 22 | +++++++++++----------- |
| M | desugar.ml | | | 273 | +++++++++++++++++++++++++++++++++++++++++++++++++++---------------------------- |
| M | elab.ml | | | 788 | ++++++++++++++++++++++++++++++++++++++++++++++++++++++------------------------- |
| M | env.ml | | | 59 | +++++++++++++++++++++++++++++++++++++++++++++++++---------- |
| A | error.ml | | | 96 | +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ |
| M | eval.ml | | | 107 | ++++++++++++++----------------------------------------------------------------- |
| D | global.ml | | | 39 | --------------------------------------- |
| A | inc_array.ml | | | 16 | ++++++++++++++++ |
| M | index.ml | | | 49 | ++++++------------------------------------------- |
| M | internal.ml | | | 264 | +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++---------- |
| M | lexer.mll | | | 71 | +++++++++++++++++++++++++++++++++++++---------------------------------- |
| M | loc.ml | | | 27 | ++++++++++++++++++++------- |
| M | main.ml | | | 80 | +++++++++++++++++++++++++++++++++++++++++++++++++------------------------------ |
| M | name.ml | | | 37 | ++++++++++++++++++++++++++++++------- |
| M | node.ml | | | 10 | +--------- |
| M | parser.mly | | | 272 | +++++++++++++++++++++++++++++++++++++++++++++---------------------------------- |
| M | plicity.ml | | | 4 | ---- |
| M | pretty.ml | | | 23 | +++++++++++++++++------ |
| A | quote.ml | | | 61 | +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ |
| M | repr.ml | | | 40 | +++++++++++++++++++++++++++++++--------- |
| D | tag.ml | | | 9 | --------- |
| M | tsil.ml | | | 3 | +-- |
| A | unify.ml | | | 76 | ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ |
| M | uniq.ml | | | 74 | +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++------- |
| A | util.ml | | | 16 | ++++++++++++++++ |
| M | value.ml | | | 53 | ++++++++++++++++++----------------------------------- |
30 files changed, 1790 insertions(+), 877 deletions(-)
diff --git a/Makefile b/Makefile
@@ -1,24 +1,27 @@
SRC = \
+ util.ml \
tsil.ml \
- pretty.ml \
+ inc_array.ml \
name.ml \
- tag.ml \
uniq.ml \
index.ml \
+ plicity.ml \
loc.ml \
node.ml \
- plicity.ml \
- global.ml \
+ pretty.ml \
env.ml \
ctx.ml \
concrete.ml \
- parser.ml \
- lexer.ml \
abstract.ml \
- desugar.ml \
internal.ml \
value.ml \
+ error.ml \
+ parser.ml \
+ lexer.ml \
+ desugar.ml \
eval.ml \
+ quote.ml \
+ unify.ml \
elab.ml \
main.ml
diff --git a/TODO.txt b/TODO.txt
diff --git a/abstract.ml b/abstract.ml
@@ -1,7 +1,13 @@
-open Global
-
(* Abstract syntax is "desugared" concrete syntax. See desugar.ml. *)
+open Node
+
+type var_name = var_name' node
+and var_name' = Name.Var.t
+
+type ctor_name = ctor_name' node
+and ctor_name' = Name.Ctor.t
+
type term = term' node
and term' =
| Let of definer array * term
@@ -10,10 +16,10 @@ and term' =
| App of term * term array
| Fn of binder array * term
| FnType of binder array * term
-| Data of Tag.t * term array
-| DataType of ctor Tag.Map.t
+| Data of ctor_name * term array
+| DataType of ctor Name.Ctor.Map.t
| Nat of Z.t
-| Var of Name.t * int
+| Var of var_name * int
| Type (* Impossible to write in concrete syntax. Arises during desugaring. *)
and definer = definer' node
@@ -24,7 +30,10 @@ and definer' = {
}
and case = case' node
-and case' = Case of patt * term
+and case' = {
+ patt: patt;
+ body: term;
+}
and binder = binder' node
and binder' = {
@@ -37,8 +46,16 @@ and ctor = ctor' node
and ctor' = Ctor of binder array
and patt = patt' node
-and patt' =
-| PWild
-| PBind of Name.t
-| PAnnot of patt * term
-| PData of Tag.t * patt array
+and patt' = {
+ names: var_name array;
+ typs: term array;
+ disc: discriminant;
+}
+
+and discriminant = discriminant' node
+and discriminant' =
+| DWild
+| DData of {
+ tag: ctor_name;
+ subpatts: patt array;
+}
diff --git a/concrete.ml b/concrete.ml
@@ -1,25 +1,36 @@
-open Global
+open Node
+
+type binop =
+| LOr | LAnd
+| Lt | Le | Eq | Ge | Gt | Ne
+| Add | Sub | Mul | Div | Mod
+
+type var_name = var_name' node
+and var_name' = Name.Var.t
+
+type ctor_name = ctor_name' node
+and ctor_name' = Name.Ctor.t
type term = term' node
and term' =
| Let of definer array * term
| Seq of term * term
| Match of term * case array
-| If of term * term * term
-| Iff of term * term
+| If of cond * term * term
+| Iff of cond * term
| Annot of term * term
-| Add of term * term
-| Mul of term * term
+| BinOp of term * binop * term
| App of term * term array
| Fn of param array * term
| FnType of domain array * term
-| Variant of Tag.t * term array
+| Variant of ctor_name * term array
| VariantType of ctor array
| Tuple of term array
| TupleType of domain array
| Unit
| Nat of Z.t
-| Var of Name.t * int option
+| IndexedVar of var_name * int
+| Var of var_name
| Paren of term
and definer = definer' node
@@ -30,24 +41,29 @@ and definer' =
and case = case' node
and case' = Case of patt * term
+and cond = cond' node
+and cond' =
+| TrueCond of term
+| PattCond of patt * term
+
and param = param' node
and param' = Param of Plicity.t * patt
and domain = domain' node
and domain' =
-| DExplicit of term
-| DImplicitType of patt
-| DAnnot of Plicity.t * patt * term
+| ExplicitDomain of term
+| ImplicitTypeDomain of patt
+| AnnotDomain of Plicity.t * patt * term
and ctor = ctor' node
-and ctor' = Ctor of Tag.t * domain array
+and ctor' = Ctor of ctor_name * domain array
and patt = patt' node
and patt' =
| PWild
-| PBind of Name.t
+| PBind of var_name
| PAnnot of patt * term
-| PVariant of Tag.t * patt array
+| PVariant of ctor_name * patt array
| PTuple of patt array
| PUnit
| PNat of Z.t
diff --git a/ctx.ml b/ctx.ml
@@ -1,4 +1,4 @@
-open Global
+open Node
(* TODO: Somewhere we need to forbid multi-binders and multi-definers from
* declaring the same name twice. *)
@@ -10,12 +10,12 @@ type 'a assignment = {
}
type 'a context = {
- assignments: 'a assignment tsil Name.Map.t; (* XXX: tsil! Yuck! *)
+ assignments: 'a assignment Tsil.t Name.Var.Map.t; (* XXX: tsil! Yuck! *)
env: 'a Env.environment;
}
let empty = {
- assignments = Name.Map.empty;
+ assignments = Name.Var.Map.empty;
env = Env.empty;
}
@@ -23,9 +23,9 @@ let assign ctx names typ value =
let asn = {typ; value; active = true} in
let shadow ctx {data = name; _} =
let add_to_tsil = function
- | None -> Some (Snoc (Lin, asn))
- | Some t -> Some (Snoc (t, asn)) in
- let assignments = Name.Map.update name add_to_tsil ctx.assignments in
+ | None -> Some (Tsil.Snoc (Tsil.Lin, asn))
+ | Some t -> Some (Tsil.Snoc (t, asn)) in
+ let assignments = Name.Var.Map.update name add_to_tsil ctx.assignments in
{ctx with assignments} in
(Array.fold_left shadow ctx names, asn)
@@ -34,13 +34,13 @@ let bind ctx vars = {ctx with env = Env.bind ctx.env vars}
let define ctx var value = {ctx with env = Env.define ctx.env var value}
let lookup ctx name i =
- match Name.Map.find_opt name ctx.assignments with
+ match Name.Var.Map.find_opt name ctx.assignments with
| Some t ->
let rec go i t = match i, t with
- | 0, Snoc (_, ({active = true; _} as asn)) -> Some asn
- | i, Snoc (t, {active = true; _}) -> go (i - 1) t
- | i, Snoc (t, {active = false; _}) -> go i t
- | _, Lin -> None in
+ | 0, Tsil.Snoc (_, ({active = true; _} as asn)) -> Some asn
+ | i, Tsil.Snoc (t, {active = true; _}) -> go (i - 1) t
+ | i, Tsil.Snoc (t, {active = false; _}) -> go i t
+ | _, Tsil.Lin -> None in
go i t
| None -> None
diff --git a/desugar.ml b/desugar.ml
@@ -1,113 +1,194 @@
-open Global
+open Util
+open Node
+open Error
module C = Concrete
module A = Abstract
-exception Error of Loc.t * string
-let error loc msg = raise (Error (loc, msg))
-
-let patt_wild = Loc.None @$ A.PWild
-let patt_true = Loc.None @$ A.PData (Tag.Of "true", [||])
-let term_unit = Loc.None @$ A.Data (Tag.Of "", [||])
-let term_unit_typ =
- Loc.None @$ A.DataType (Tag.Map.singleton (Tag.Of "") (Loc.None @$ A.Ctor [||]))
-let patt_wild_unit =
- Loc.None @$ A.PAnnot (patt_wild, term_unit_typ)
-
-let rec term = function
-(* Handle parens specially, so that we use the inner loc. *)
-| {data = C.Paren t; _} -> term t
-| {loc; data} -> loc @$ term' loc data
-and term' loc = function
-| C.Let (definers, body) -> A.Let (Array.map definer definers, term body)
-(* TODO: Maybe Seq should desugar to a unit pattern instead, so that
- * non-unit results can't be ignored. *)
+let ensure_distinct names =
+ let declare name_set {loc; data = name} =
+ if Name.Var.Set.mem name name_set then
+ abort loc (EDesugarDuplicateBinderName name)
+ else
+ Name.Var.Set.add name name_set in
+ ignore @@ Array.fold_left declare Name.Var.Set.empty names
+
+let add2 (x0, y0) (x1, y1) = (x0 + x1, y0 + y1)
+
+let rec bind_and_annot_count {data = p; _} = match p with
+| C.PWild -> (0, 0)
+| C.PBind _ -> (1, 0)
+| C.PAnnot (p, _) -> add2 (bind_and_annot_count p) (0, 1)
+| C.PVariant _ -> (0, 0)
+| C.PTuple _ -> (0, 0)
+| C.PUnit -> (0, 0)
+| C.PNat _ -> (0, 0)
+| C.PAnd (p, q) -> add2 (bind_and_annot_count p) (bind_and_annot_count q)
+| C.POr _ -> no_impl "or patterns"
+| C.PParen p -> bind_and_annot_count p
+
+let synth_var_name name = Loc.Nowhere @$ Name.Var.Of name
+
+let synth_ctor_name name = Loc.Nowhere @$ Name.Ctor.Of name
+
+let synth_ctor binders = Loc.Nowhere @$ A.Ctor binders
+
+let synth_term_var name i = Loc.Nowhere @$ A.Var (synth_var_name name, i)
+let synth_term_unit_type =
+ let ctor = synth_ctor [||] in
+ let ctors = Name.Ctor.Map.singleton (Name.Ctor.Of "0") ctor in
+ Loc.Nowhere @$ A.DataType ctors
+let synth_term_unit = Loc.Nowhere @$ A.Data (synth_ctor_name "0", [||])
+let synth_term_true = Loc.Nowhere @$ A.Data (synth_ctor_name "true", [||])
+let synth_term_false = Loc.Nowhere @$ A.Data (synth_ctor_name "false", [||])
+
+let synth_case patt body = Loc.Nowhere @$ {A.patt; A.body}
+
+let synth_binder plicity patt typ = Loc.Nowhere @$ {A.plicity; A.patt; A.typ}
+
+let synth_patt names typs disc =
+ Loc.Nowhere @$ {A.names; A.typs; A.disc = Loc.Nowhere @$ disc}
+let synth_patt_wild = synth_patt [||] [||] A.DWild
+let synth_patt_wild_unit = synth_patt [||] [|synth_term_unit_type|] A.DWild
+let synth_patt_data names typs tag subpatts =
+ synth_patt [||] [||] @@ A.DData {
+ tag = synth_ctor_name tag;
+ subpatts;
+ }
+let synth_patt_true = synth_patt_data [||] [||] "true" [||]
+let synth_patt_false = synth_patt_data [||] [||] "false" [||]
+
+let binop = function
+| C.LOr | C.LAnd ->
+ impossible "Desugar.binop: LOr and Land are handled specially"
+| C.Lt -> "`<"
+| C.Le -> "`<="
+| C.Eq -> "`="
+| C.Ge -> "`>`"
+| C.Gt -> "`>"
+| C.Ne -> "`<>"
+| C.Add -> "`+"
+| C.Sub -> "`-"
+| C.Mul -> "`*"
+| C.Div -> "`/"
+| C.Mod -> "`%"
+
+let rec term {loc; data = t} = match t with
+| C.Let (definers, body) ->
+ loc @$ A.Let (Array.map definer definers, term body)
| C.Seq (t, u) ->
- let definer = Loc.None @$ {
+ let definer = Loc.Nowhere @$ {
A.recursive = false;
- A.lhs = patt_wild;
+ A.lhs = synth_patt_wild_unit;
A.rhs = term t;
} in
- A.Let ([|definer|], term u)
-| C.Match (scrutinee, cases) -> A.Match (term scrutinee, Array.map case cases)
-| C.If (c, t, f) ->
- A.Match (term c, [|
- Loc.None @$ A.Case (patt_true, term t);
- Loc.None @$ A.Case (patt_wild, term f);
- |])
-| C.Iff (c, t) ->
- A.Match (term c, [|
- Loc.None @$ A.Case (patt_true, term t);
- Loc.None @$ A.Case (patt_wild, term_unit);
- |])
-| C.Annot (t, u) -> A.Annot (term t, term u)
-(* TODO: Idea: Modify variables in abstract syntax to have a level instead of
- * an index (encoded as a negative index) and desugar binary operators to
- * level 0 (i.e., index -1), so that they always refer to the top-level/
- * built-in operations. *)
-| C.Add (t, u) -> A.App (Loc.None @$ A.Var (Name.Of "`+", 0), [|term t; term u|])
-| C.Mul (t, u) -> A.App (Loc.None @$ A.Var (Name.Of "`*", 0), [|term t; term u|])
-| C.App (fn, args) -> A.App (term fn, Array.map term args)
+ loc @$ A.Let ([|definer|], term u)
+| C.Match (scrutinee, cases) ->
+ loc @$ A.Match (term scrutinee, Array.map case cases)
+| C.If ({data = C.TrueCond s; _}, t, f) ->
+ loc @$ term_if synth_patt_true (term s) (term t) (term f)
+| C.If ({data = C.PattCond (p, s); _}, t, f) ->
+ loc @$ term_if (patt p) (term s) (term t) (term f)
+| C.Iff ({data = C.TrueCond s; _ }, t) ->
+ loc @$ term_if synth_patt_true (term s) (term t) synth_term_unit
+| C.Iff ({data = C.PattCond (p, s); _}, t) ->
+ loc @$ term_if (patt p) (term s) (term t) synth_term_unit
+| C.Annot (t, u) -> loc @$ A.Annot (term t, term u)
+| C.BinOp (t, C.LOr, u) ->
+ loc @$ term_if synth_patt_true (term t) synth_term_true (term u)
+| C.BinOp (t, C.LAnd, u) ->
+ loc @$ term_if synth_patt_true (term t) (term u) synth_term_false
+| C.BinOp (t, o, u) ->
+ loc @$ A.App (synth_term_var (binop o) 0, [|term t; term u|])
+| C.App (fn, args) -> loc @$ A.App (term fn, Array.map term args)
| C.Fn (params, body) ->
let params =
if Array.length params = 0 then
- [|Loc.None @$ {
- A.plicity = Plicity.Ex;
- A.patt = patt_wild_unit;
- A.typ = None;
- }|]
+ [|synth_binder Plicity.Ex synth_patt_wild_unit None|]
else
Array.map param params in
- A.Fn (params, term body)
-| C.FnType (doms, cod) -> A.FnType (Array.map domain doms, term cod)
-| C.Variant (tag, fields) -> A.Data (tag, Array.map term fields)
+ loc @$ A.Fn (params, term body)
+| C.FnType (doms, cod) -> loc @$ A.FnType (Array.map domain doms, term cod)
+| C.Variant (tag, fields) -> loc @$ A.Data (tag, Array.map term fields)
| C.VariantType ctors ->
let insert m {loc = ctor_loc; data = C.Ctor (tag, doms)} =
- if Tag.Map.mem tag m then
- error ctor_loc "duplicate constructor"
+ if Name.Ctor.Map.mem tag.data m then
+ abort tag.loc (EDesugarDuplicateCtor tag.data)
else
let ctor = ctor_loc @$ A.Ctor (Array.map domain doms) in
- Tag.Map.add tag ctor m in
- let ctors = Array.fold_left insert Tag.Map.empty ctors in
- A.DataType ctors
-| C.Tuple fields -> A.Data (Tag.Of "", Array.map term fields)
+ Name.Ctor.Map.add tag.data ctor m in
+ let ctors = Array.fold_left insert Name.Ctor.Map.empty ctors in
+ loc @$ A.DataType ctors
+| C.Tuple fields ->
+ let tag = synth_ctor_name (string_of_int (Array.length fields)) in
+ loc @$ A.Data (tag, Array.map term fields)
| C.TupleType doms ->
- let ctor = loc @$ A.Ctor (Array.map domain doms) in
- A.DataType (Tag.Map.singleton (Tag.Of "") ctor)
-| C.Unit -> A.Data (Tag.Of "", [||])
-| C.Nat n -> A.Nat n
-| C.Var (x, Some i) -> A.Var (x, i)
-| C.Var (x, None) -> A.Var (x, 0)
-| C.Paren _ -> impossible ()
-
-and definer d = Node.map definer' d
-and definer' = function
-| C.Definer (lhs, rhs) -> {recursive = false; lhs = patt lhs; rhs = term rhs}
-| C.RecDefiner (lhs, rhs) -> {recursive = true; lhs = patt lhs; rhs = term rhs}
-
-and case c = Node.map case' c
-and case' (C.Case (p, t)) = A.Case (patt p, term t)
-
-and param par = Node.map param' par
-and param' (C.Param (plicity, p)) = {plicity; patt = patt p; typ = None}
-
-and domain dom = Node.map domain' dom
-and domain' = function
-| C.DExplicit t -> {plicity = Plicity.Ex; patt = Loc.None @$ A.PWild; typ = Some (term t)}
-| C.DImplicitType p -> {plicity = Plicity.Im; patt = patt p; typ = Some (Loc.None @$ A.Type)}
-| C.DAnnot (plicity, p, t) -> {plicity; patt = patt p; typ = Some (term t)}
-
-and patt = function
-(* Handle parens specially, so that we use the inner loc. *)
-| {data = C.PParen p; _} -> patt p
-| n -> Node.map patt' n
-and patt' = function
-| C.PWild -> A.PWild
-| C.PBind x -> A.PBind x
-| C.PAnnot (p, t) -> A.PAnnot (patt p, term t)
-| C.PVariant (tag, ps) -> A.PData (tag, Array.map patt ps)
-| C.PTuple ps -> A.PData (Tag.Of "", Array.map patt ps)
-| C.PUnit -> A.PData (Tag.Of "", [||])
-| C.PNat _ -> failwith "no impl: nat patterns"
-| C.PAnd _ -> failwith "no impl: and patterns"
-| C.POr _ -> failwith "no impl: or patterns"
-| C.PParen _ -> impossible ()
+ let tag = Name.Ctor.Of (string_of_int (Array.length doms)) in
+ let ctor = synth_ctor (Array.map domain doms) in
+ loc @$ A.DataType (Name.Ctor.Map.singleton tag ctor)
+| C.Unit -> loc @$ A.Data (synth_ctor_name "0", [||])
+| C.Nat n -> loc @$ A.Nat n
+| C.IndexedVar (x, i) -> loc @$ A.Var (x, i)
+| C.Var x -> loc @$ A.Var (x, 0)
+| C.Paren t -> term t
+
+and term_if p s t f =
+ A.Match (s, [|
+ synth_case p t;
+ synth_case synth_patt_wild f;
+ |])
+
+and definer {loc; data = d} = loc @$ match d with
+| C.Definer (lhs, rhs) ->
+ {A.recursive = false; A.lhs = patt lhs; A.rhs = term rhs}
+| C.RecDefiner (lhs, rhs) ->
+ {A.recursive = true; A.lhs = patt lhs; A.rhs = term rhs}
+
+and case {loc; data = C.Case (p, t)} =
+ loc @$ {A.patt = patt p; A.body = term t}
+
+and param {loc; data = C.Param (plicity, p)} =
+ loc @$ {A.plicity; A.patt = patt p; A.typ = None}
+
+and domain {loc; data = dom} = loc @$ match dom with
+| C.ExplicitDomain t ->
+ {A.plicity = Plicity.Ex; A.patt = synth_patt_wild; A.typ = Some (term t)}
+| C.ImplicitTypeDomain p ->
+ {A.plicity = Plicity.Im; A.patt = patt p; A.typ = Some (Loc.Nowhere @$ A.Type)}
+| C.AnnotDomain (plicity, p, t) ->
+ {A.plicity; A.patt = patt p; A.typ = Some (term t)}
+
+and patt p =
+ let (bind_count, annot_count) = bind_and_annot_count p in
+ let names = Inc_array.make bind_count in
+ let typs = Inc_array.make annot_count in
+ let disc = discriminant names typs p in
+ let names = Inc_array.to_array names in
+ let typs = Inc_array.to_array typs in
+ ensure_distinct names;
+ p.loc @$ {A.names; A.typs; A.disc}
+
+and discriminant names typs {loc; data = p} = match p with
+| C.PWild -> loc @$ A.DWild
+| C.PBind name ->
+ Inc_array.add names name;
+ loc @$ A.DWild
+| C.PAnnot (p, typ) ->
+ Inc_array.add typs (term typ);
+ discriminant names typs p
+| C.PVariant (tag, ps) -> loc @$ A.DData {tag; subpatts = Array.map patt ps}
+| C.PTuple ps ->
+ let tag = synth_ctor_name (string_of_int (Array.length ps)) in
+ loc @$ A.DData {tag; subpatts = Array.map patt ps}
+| C.PUnit -> loc @$ A.DData {tag = synth_ctor_name "0"; subpatts = [||]}
+| C.PNat _ -> no_impl "nat patterns"
+| C.PAnd (p, q) ->
+ let pdisc = discriminant names typs p in
+ let qdisc = discriminant names typs q in
+ begin match pdisc.data, qdisc.data with
+ | A.DWild, A.DWild -> loc @$ A.DWild
+ | A.DWild, A.DData _ -> qdisc
+ | A.DData _, A.DWild -> pdisc
+ | A.DData _, A.DData _ -> abort loc EDesugarDataAndData
+ end
+| C.POr _ -> no_impl "or patterns"
+| C.PParen p -> discriminant names typs p
diff --git a/elab.ml b/elab.ml
@@ -1,143 +1,174 @@
-open Global
+open Util
+open Node
+open Error
open Eval
+open Quote
+open Unify
module A = Abstract
module I = Internal
module V = Value
(*module R = Porig.NoneOneTons*)
-type error = error' node
-and error' =
-| ErrNonInferablePatt of A.patt
-| ErrCantInferData
-| ErrNoLetAnnot of error
-| ErrNoBinderAnnot of error
-| ErrUnannotImplicitIsType of error
-| ErrApplyNonFunction of V.value
-| ErrUnifyFail of V.value * V.value
-| ErrDataPattRefutable
-| ErrBinderCycle of Name.t list
-| ErrArityMismatch
-| ErrUnboundVar of Name.t * int
-| ErrExpectedFnType of V.value
-| ErrExpectedDataType of V.value
-| ErrFnArityMismatch
-| ErrFnPlicityMismatch
-| ErrUnexpectedCtor of Tag.t
-| ErrDuplicateName of Name.t
-
-let rec string_of_error {loc; data = e} =
- Loc.to_string loc ^ " " ^ string_of_error' e
-and string_of_error' = function
-| ErrNonInferablePatt _ -> "ErrNonInferablePatt"
-| ErrCantInferData -> "ErrCantInferData"
-| ErrNoLetAnnot _ -> "ErrNoLetAnnot"
-| ErrNoBinderAnnot _ -> "ErrNoBinderAnnot"
-| ErrUnannotImplicitIsType _ -> "ErrUnannotImplicitIsType"
-| ErrApplyNonFunction _ -> "ErrApplyNonFunction"
-| ErrUnifyFail _ -> "ErrUnifyFail"
-| ErrDataPattRefutable -> "ErrDataPattRefutable"
-| ErrBinderCycle _ -> "ErrBinderCycle"
-| ErrArityMismatch -> "ErrArityMismatch"
-| ErrUnboundVar _ -> "ErrUnboundVar"
-| ErrExpectedFnType _ -> "ErrExpectedFnType"
-| ErrExpectedDataType _ -> "ErrExpectedDataType"
-| ErrFnArityMismatch -> "ErrFnArityMismatch"
-| ErrFnPlicityMismatch -> "ErrFnPlicityMismatch"
-| ErrUnexpectedCtor _ -> "ErrUnexpectedCtor"
-| ErrDuplicateName _ -> "ErrDuplicateName"
-
-exception Exn of error
-
-let abort e = raise (Exn e)
-
-exception BlockedOnFuture of Uniq.t * int * Name.t
+exception BlockedOnFuture of Uniq.uniq * int * Name.Var.t
+
+type context = V.value Ctx.context
+
+type mode =
+| Infer
+| Check of V.value
(* Convention: An elaboration-related function can have two versions, one whose
* name ends with a prime, which can throw an Exn, and one whose name does not
* end with a prime, which returns in the result monad.
* TODO: Make sure every function has both versions, even if unused? *)
-let declare_name' name_set {loc; data = name} =
- if Name.Set.mem name name_set then
- abort (loc @$ ErrDuplicateName name)
+(* TODO: We check for name distinctness in two places; unify them. *)
+let declare_name'
+: Name.Var.Set.t -> Name.Var.t node -> Name.Var.Set.t
+= fun name_set {loc; data = name} ->
+ if Name.Var.Set.mem name name_set then
+ abort loc @@ EElabDuplicateName name
else
- Name.Set.add name name_set
-
-let rec infer ctx t =
- try Ok (infer' ctx t) with
- | Exn e -> Error e
-and infer' (ctx: 'a Ctx.context) {loc; data = t} = match t with
-| A.Let (definers, body) ->
- let (ctx, definers) = infer_definers' ctx definers in
- let (body, vtyp) = infer' ctx body in
- (I.Let (definers, body), vtyp)
-
-| A.Match (scrutinee, cases) -> no_impl ""
- (*let (scrutinee, vtyp) = infer' ctx scrutinee in *)
+ Name.Var.Set.add name name_set
+
+(* XXX: Misleading name. This also binds and defines things. *)
+let rec assign_patt
+: context -> I.patt -> V.value -> Uniq.uniq -> context
+= fun ctx {names; disc} vtyp var ->
+ let (ctx, _) = Ctx.assign ctx names vtyp (V.Arb var) in
+
+ match disc with
+ | I.DWild -> ctx
+ | I.DData {tag; tagi; subpatts; datatype} ->
+ let arity = Array.length subpatts in
+ let subvars = Uniq.fresh arity in
+ let data = V.Data {
+ tag;
+ tagi;
+ args = Array.init arity (fun i -> V.Arb (Uniq.get subvars i));
+ clo = ctx.env; (* Environment before binding *)
+ datatype;
+ } in
+ let ctx = Ctx.bind ctx subvars in
+ let ctx = Ctx.define ctx var data in
+
+ let ctx = ref ctx in
+ for i = 0 to arity - 1 do
+ (* XXX: Do we need to do this in type-checking order? *)
+ let subtyp = datatype.ctors.(tagi).binders.(i).typ in
+ let vsubtyp = eval !ctx.env subtyp in
+ ctx := assign_patt !ctx subpatts.(i) vsubtyp (Uniq.get subvars i)
+ done;
-| A.Annot (t, a) ->
- let a = check' ctx a V.Type in
- let va = eval ctx.env a in
- let t = check' ctx t va in
- (t, va)
+ !ctx
-| A.App (fn, explicit_args) ->
- let (fn, va) = infer' ctx fn in
- begin match force ctx.env va with
- | V.FnTypeClosure (clo, {binders; codomain}) ->
- let (args, clo) = check_args' ctx loc explicit_args clo binders in
- let vcodomain = eval clo codomain in
- (I.App (fn, args), vcodomain)
- | va -> abort (loc @$ ErrApplyNonFunction va)
- end
+(* Elaborate explicits to Some and implicits to None. *)
+let elaborate_args'
+: 'a. Loc.t -> 'a array -> I.binder array -> 'a option array
+= fun loc explicit_args binders ->
+ let arity = Array.length binders in
+ let explicit_count = Array.length explicit_args in
+ let implicit_count = ref 0 in
-| A.Fn (params, body) ->
- let (ctx', binders) = infer_binders' ctx loc params in
- let (body, vcodomain) = infer' ctx' body in
- let codomain = quote ctx'.env vcodomain in
- let fn = I.Fn {binders; body; codomain} in
- let vfntype = V.FnTypeClosure (ctx.env, {binders; codomain}) in
- (fn, vfntype)
+ let args = Array.make arity garbage in
+ for i = 0 to arity - 1 do
+ (* TODO: Once we allow passing an implicit argument explicitly,
+ * this logic will need to change a bit. *)
+ args.(i) <- match binders.(i).plicity with
+ | Plicity.Ex ->
+ let j = i - !implicit_count in
+ begin if explicit_count <= j then
+ abort loc EElabArityMismatch
+ end;
+ Some (explicit_args.(j))
+ | Plicity.Im ->
+ incr implicit_count;
+ None
+ done;
-| A.FnType (domains, codomain) ->
- let (ctx, binders) = infer_binders' ctx loc domains in
- let codomain = check' ctx codomain V.Type in
- (I.FnType {binders; codomain}, V.Type)
+ (* We had enough explicit args, but now ensure we don't have too many. *)
+ begin if explicit_count > arity - !implicit_count then
+ abort loc EElabArityMismatch
+ end;
-| A.Data _ -> abort (loc @$ ErrCantInferData)
+ args
-| A.DataType ctors ->
- let ctors = Tag.Map.map (infer_ctor' ctx) ctors in
- (I.DataType ctors, V.Type)
+let rec infer
+: context -> A.term -> (I.term * V.value, error) result
+= fun ctx t ->
+ try Ok (infer' ctx t) with
+ | Exn e -> Error e
-| A.Nat n -> (I.Nat n, V.Builtin I.NatType)
+and infer'
+: context -> A.term -> I.term * V.value
+= fun ctx t -> elaborate' ctx t Infer
-| A.Var (name, i) ->
- begin match Ctx.lookup ctx name i with
- | Some {typ = V.Future (id, j); _} -> raise (BlockedOnFuture (id, j, name))
- | Some {typ; value; _} -> (quote ctx.env value, typ)
- | None -> abort (loc @$ ErrUnboundVar (name, i))
- end
+and check
+: context -> A.term -> V.value -> (I.term, error) result
+= fun ctx t vtyp ->
+ try Ok (check' ctx t vtyp) with
+ | Exn e -> Error e
-| A.Type -> (I.Type, V.Type)
+and check'
+: context -> A.term -> V.value -> I.term
+= fun ctx t vtyp ->
+ let (t, _) = elaborate' ctx t (Check (force ctx.env vtyp)) in
+ t
-and check ctx t vtyp =
- try Ok (check' ctx t vtyp) with
+and elaborate
+: context -> A.term -> mode -> (I.term * V.value, error) result
+= fun ctx t mode ->
+ try Ok (elaborate' ctx t mode) with
| Exn e -> Error e
-and check' ctx ({loc; data = t} as node) vtyp = match t, force ctx.env vtyp with
-| A.Let (definers, body), vtyp ->
+
+and elaborate'
+: context -> A.term -> mode -> I.term * V.value
+= fun ctx {loc; data = t} mode -> match t, mode with
+| A.Let (definers, body), mode ->
let (ctx, definers) = infer_definers' ctx definers in
- let body = check' ctx body vtyp in
- I.Let (definers, body)
+ let (body, vtyp) = elaborate' ctx body mode in
+ (I.Let (definers, body), vtyp)
-| A.Match _, _ -> no_impl "check match"
+| A.Match (scrutinee, cases), mode -> no_impl "elaborate match"
+ (*
+ let (scrutinee, scrutinee_vtyp) = infer' ctx scrutinee in
+ let ncases = Array.length cases in
+ let cases' = Array.make ncases garbage in
+ for i = 0 to ncases - 1 do
+ let (names, disc) = check_patt' ctx patt scrutinee_vtyp in
+ done
+ *)
+
+| A.Annot (tm, typ), Infer ->
+ let typ = check' ctx typ V.Type in
+ let vtyp = eval ctx.env typ in
+ let tm = check' ctx tm vtyp in
+ (tm, vtyp)
+
+| A.App (fn, explicit_args), Infer ->
+ let (fn, vtyp) = infer' ctx fn in
+ begin match force ctx.env vtyp with
+ | V.FnTypeClosure (clo, {multibinder; codomain}) ->
+ let (args, clo) = check_args' ctx loc explicit_args clo multibinder in
+ let vcodomain = eval clo codomain in
+ (I.App (fn, args), vcodomain)
+ | vtyp -> abort loc @@ EElabApplyNonFunction (quote ctx.env vtyp)
+ end
+
+| A.Fn (params, body), Infer ->
+ let (ctx', multibinder) = infer_multibinder' ctx loc params in
+ let (body, vcodomain) = infer' ctx' body in
+ let codomain = quote ctx'.env vcodomain in
+ let fn = I.Fn {multibinder; body; codomain} in
+ let vfntype = V.FnTypeClosure (ctx.env, {multibinder; codomain}) in
+ (fn, vfntype)
-| A.Fn (params, body), V.FnTypeClosure (clo, fntype) ->
- let {I.binders = {binders = domains; order}; I.codomain} = fntype in
+| A.Fn (params, body), Check (V.FnTypeClosure (clo, fntype) as vfntype) ->
+ let {I.multibinder = {binders = domains; order}; I.codomain} = fntype in
begin if Array.length params <> Array.length domains then
- abort (loc @$ ErrFnArityMismatch)
+ abort loc EElabFnArityMismatch
end;
let arity = Array.length params in
@@ -153,11 +184,12 @@ and check' ctx ({loc; data = t} as node) vtyp = match t, force ctx.env vtyp with
let domain = domains.(j) in
begin if Option.is_some param.typ then
- impossible ();
+ impossible "Elab.elaborate': params shouldn't have guaranteed \
+ type annotations";
end;
begin if param.plicity <> domain.plicity then
- abort (loc @$ ErrFnPlicityMismatch)
+ abort loc EElabFnPlicityMismatch
end;
let plicity = param.plicity in
@@ -165,92 +197,111 @@ and check' ctx ({loc; data = t} as node) vtyp = match t, force ctx.env vtyp with
let typ = quote !ctx.env vtyp in
let names = check_irrefutable_patt' !ctx param.patt vtyp in
- (* Assign the names in ctx. The other params may refer to them. *)
- let (ctx', _) = Ctx.assign !ctx names vtyp (V.Arb vars.(j)) in
+ let (ctx', _) = Ctx.assign !ctx names vtyp (V.Arb (Uniq.get vars j)) in
ctx := ctx';
- params'.(j) <- {I.plicity; I.typ}
+ params'.(j) <- {I.plicity; I.names; I.typ}
done;
let vcodomain = eval clo codomain in
let codomain = quote !ctx.env vcodomain in
let body = check' !ctx body vcodomain in
- I.Fn {binders = {binders = params'; order}; body; codomain}
+ (I.Fn {multibinder = {binders = params'; order}; body; codomain}, vfntype)
-| A.Fn _, vtyp -> abort (loc @$ ErrExpectedFnType vtyp)
+(*| A.Fn _, Check vtyp -> abort loc @@ ErrExpectedFnType vtyp*)
-| A.Data (tag, explicit_args), V.DataTypeClosure (clo, ctors) ->
- begin match Tag.Map.find_opt tag ctors with
- | Some binders ->
- let (args, _) = check_args' ctx loc explicit_args clo binders in
- I.Data (tag, args)
- | None ->
- abort (loc @$ ErrUnexpectedCtor tag)
+| A.FnType (domains, codomain), Infer ->
+ let (ctx, multibinder) = infer_multibinder' ctx loc domains in
+ let codomain = check' ctx codomain V.Type in
+ (I.FnType {multibinder; codomain}, V.Type)
+
+| A.Data _, Infer -> abort loc EElabCantInferData
+
+| A.Data ({data = tag; _}, explicit_args),
+ Check (V.DataTypeClosure (clo, datatype) as vdatatype) ->
+ begin match Name.Ctor.Map.find_opt tag datatype.names with
+ | Some tagi ->
+ let (args, _) = check_args' ctx loc explicit_args clo
+ datatype.ctors.(tagi) in
+ let datatype = quote_datatype ctx.env clo datatype in
+ (I.Data {tag; tagi; args; datatype}, vdatatype)
+ | None -> abort loc @@ EElabUnexpectedCtor tag
+ end
+
+(*| A.Data _, Check vtyp -> abort loc @@ ErrExpectedDataType vtyp*)
+
+| A.DataType ctors, Infer ->
+ let next = ref 0 in
+ let ctors' = Array.make (Name.Ctor.Map.cardinal ctors) garbage in
+ let go {data = A.Ctor binders; _} =
+ let i = !next in
+ incr next;
+ let (_, multibinder) = infer_multibinder' ctx loc binders in
+ ctors'.(i) <- multibinder;
+ i in
+ let names = Name.Ctor.Map.map go ctors in
+ (I.DataType {ctors = ctors'; names}, V.Type)
+
+| A.Nat n, Infer -> (I.Nat n, V.Builtin I.NatType)
+
+| A.Var ({data = name; _}, i), Infer ->
+ begin match Ctx.lookup ctx name i with
+ | Some {typ = V.Future (id, j); _} -> raise (BlockedOnFuture (id, j, name))
+ | Some {typ; value; _} -> (quote ctx.env value, typ)
+ | None -> abort loc @@ EElabUnboundVar (name, i)
end
-| A.Data _, vtyp -> abort (loc @$ ErrExpectedDataType vtyp)
+| A.Type, Infer -> (I.Type, V.Type)
-| (A.Annot _ | A.App _ | A.FnType _ | A.DataType _ | A.Nat _ | A.Var _ | A.Type), vexpected ->
- let (t, vinferred) = infer' ctx node in
+| (A.Annot _ | A.App _ | A.Fn _ | A.FnType _ | A.Data _ | A.DataType _
+ | A.Nat _ | A.Var _ | A.Type) as t, Check vexpected ->
+ let (t, vinferred) = infer' ctx {loc; data = t} in
if conv ctx.env vexpected vinferred then
- t
+ (t, vexpected)
else
- abort (loc @$ ErrUnifyFail (vexpected, vinferred))
-
-and check_args' ctx loc explicit_args clo {binders; order} =
+ let expected = quote ctx.env vexpected in
+ let inferred = quote ctx.env vinferred in
+ abort loc @@ EElabUnifyFail (expected, inferred)
+
+(* Check the given explicit arguments against the given multibinder.
+ * Return the elaborated terms and the multibinder's closure extended with
+ * fresh variables defined to the arguments' values. *)
+and check_args'
+: context -> Loc.t -> A.term array -> V.environment -> I.multibinder
+ -> I.term array * V.environment
+= fun ctx loc explicit_args clo {binders; order} ->
let arity = Array.length binders in
- let explicit_count = Array.length explicit_args in
- let implicit_count = ref 0 in
-
- (* Elaborate explicits to Some and implicits to None. *)
- let args = Array.make arity garbage in
- for i = 0 to arity - 1 do
- (* TODO: Once we allow passing an implicit argument explicitly,
- * this logic will need to change a bit. *)
- args.(i) <- match binders.(i).plicity with
- | Plicity.Ex ->
- let j = i - !implicit_count in
- begin if explicit_count <= j then
- abort (loc @$ ErrArityMismatch)
- end;
- Some (explicit_args.(j))
- | Plicity.Im ->
- incr implicit_count;
- None
- done;
-
- (* We had enough explicit args, but now check if we have too many. *)
- begin if explicit_count > arity - !implicit_count then
- abort (loc @$ ErrArityMismatch)
- end;
+ let args = elaborate_args' loc explicit_args binders in
let vars = Uniq.fresh arity in
let clo = Env.bind clo vars in
- (* Check arguments. *)
let args' = Array.make arity garbage in
let clo = ref clo in
for i = 0 to arity - 1 do
let j = order.(i) in
let vtyp = eval !clo binders.(j).typ in
- let (arg, varg) = match args.(j) with
- | Some arg ->
- let arg = check' ctx arg vtyp in
- let varg = eval ctx.env arg in
- (arg, varg)
- | None -> no_impl "implict args" in
+ let arg = match args.(j) with
+ | Some arg -> check' ctx arg vtyp
+ | None -> no_impl "implicit args" in
args'.(j) <- arg;
- clo := Env.define !clo vars.(j) varg
+ let varg = eval ctx.env arg in
+ clo := Env.define !clo (Uniq.get vars j) varg
done;
+
(args', !clo)
+(* Infer the given definers. Return the elaborated definers and the context
+ * extended with fresh variables for the definers defined to each rhs. *)
(* TODO: We need to check for circularity. Idea for algorithm: Have elaboration
* routines track not only variable usage, but also the least "delay"
* asssociated with each variable usage. E.g., a variable x uses x with delay 0
* (it's used immediately), a function uses the variables used in its body with
* their delay + 1, and a β-redex uses the variables in the lambda with their
* delay - 1. *)
-and infer_definers' ctx definers =
+and infer_definers'
+: context -> A.definer array -> context * I.definer array
+= fun ctx definers ->
let ndefiners = Array.length definers in
(* To simplify context handling, we work in a weakened contexted from the
@@ -260,7 +311,7 @@ and infer_definers' ctx definers =
let ctx = Ctx.bind ctx vars in
match definers with
- | [||] -> impossible ()
+ | [||] -> impossible "Elab.infer_definers': no definers"
(* Single non-recursive definer. These are handled specially, because
* the type annotation on the lhs is optional. *)
@@ -271,19 +322,19 @@ and infer_definers' ctx definers =
| Ok (names, vtyp) ->
let rhs = check' ctx rhs vtyp in
(names, rhs, vtyp)
- | Error {data = ErrNonInferablePatt _; _} ->
+ | Error {data = EElabNonInferablePatt; _} ->
(* Next try to infer from rhs (and check lhs). *)
let (rhs, vtyp) = infer' ctx rhs in
let names = check_irrefutable_patt' ctx lhs vtyp in
(names, rhs, vtyp)
- | Error e -> abort e in
+ | Error e -> abort e.loc e.data in
let typ = quote ctx.env vtyp in
(* XXX: vrhs doesn't need to be a Rec. We could eagerly evaluate it, or
* add some other mechanism for laziness (like glued eval). *)
let vrhs = V.Rec {env = ref ctx.env; rhs} in
- let (ctx, _) = Ctx.assign ctx names vtyp (V.Arb vars.(0)) in
- let ctx = Ctx.define ctx vars.(0) vrhs in
- (ctx, [|{rhs; typ}|])
+ let (ctx, _) = Ctx.assign ctx names vtyp (V.Arb (Uniq.get vars 0)) in
+ let ctx = Ctx.define ctx (Uniq.get vars 0) vrhs in
+ (ctx, [|{names; rhs; typ}|])
(* Single recursive definer or many mutually-recursive definers. In either
* case, a type annotation on the lhs is mandatory. Probably, we could use
@@ -297,15 +348,21 @@ and infer_definers' ctx definers =
* assignments. We later set the active flag to false on assignments
* for non-recusive definers, so that the rhs of non-recursive definers
* can not access their own assignment. *)
- let name_set = ref Name.Set.empty in
+ let name_set = ref Name.Var.Set.empty in
let ctx_with_asns = ref ctx in
let asns = Array.make ndefiners garbage in
for i = 0 to ndefiners - 1 do
- let (names, vtyp) = infer_lhs' ctx definers.(i) in
- name_set := Array.fold_left declare_name' !name_set names;
- let (ctx', asn) = Ctx.assign !ctx_with_asns names vtyp (V.Arb vars.(i)) in
- ctx_with_asns := ctx';
- asns.(i) <- asn
+ let {loc; data = {A.lhs; _}} = definers.(i) in
+ match infer_irrefutable_patt ctx lhs with
+ | Ok (names, vtyp) ->
+ name_set := Array.fold_left declare_name' !name_set names;
+ let (ctx', asn) = Ctx.assign !ctx_with_asns
+ names vtyp (V.Arb (Uniq.get vars i)) in
+ ctx_with_asns := ctx';
+ asns.(i) <- asn
+ | Error ({data = EElabNonInferablePatt; _} as root) ->
+ abort loc @@ EElabNoLetAnnot root
+ | Error e -> abort e.loc e.data
done;
let ctx = !ctx_with_asns in
@@ -314,7 +371,9 @@ and infer_definers' ctx definers =
* elaboration of each rhs, i.e., the let-bound variables behave like
* fn-bound variables. It's hard to get around this. It might be useful
* to elaborate each rhs with every other rhs defined in context, but
- * that would require every rhs to already be elaborated! *)
+ * that would require every rhs to already be elaborated! Of course,
+ * after elaboration each rhs can see every other rhs, and we achieve
+ * by lazily evaluating each rhs using V.Rec. *)
let env = ref ctx.env in
let definers' = Array.make ndefiners garbage in
for i = 0 to ndefiners - 1 do
@@ -335,8 +394,9 @@ and infer_definers' ctx definers =
end in
let vrhs = V.Rec {env; rhs} in
- env := Env.define !env vars.(i) vrhs;
- definers'.(i) <- {I.rhs; I.typ}
+ env := Env.define !env (Uniq.get vars i) vrhs;
+ let names = definers.(i).data.lhs.data.names in
+ definers'.(i) <- {I.names; I.rhs; I.typ}
done;
(* TODO: For this situation, it might be nice to abstract the surface
@@ -344,14 +404,9 @@ and infer_definers' ctx definers =
* environment + a surface name assignment. *)
({ctx with env = !env}, definers')
-and infer_lhs' ctx {loc; data = {lhs; _}} =
- match infer_irrefutable_patt ctx lhs with
- | Ok (names, vtyp) -> (names, vtyp)
- | Error ({data = ErrNonInferablePatt _; _} as root) ->
- abort (loc @$ ErrNoLetAnnot root)
- | Error e -> abort e
-
-and infer_binders' ctx loc binders =
+and infer_multibinder'
+: context -> Loc.t -> A.binder array -> context * I.multibinder
+= fun ctx loc binders ->
(* We associate two different orders on the variables of a multibinder.
* The *type dependency ordering* is the order where x <T y iff the type of
* y depends on x. The *representation dependency ordering* is the order
@@ -392,23 +447,24 @@ and infer_binders' ctx loc binders =
(* We generate a globally unique id for this multibinder so that we can
* determine which BlockedOnFuture exceptions belong to us (see below).
* This uniq is not a variable. *)
- let id = (Uniq.fresh 1).(0) in
+ let id = Uniq.get (Uniq.fresh 1) 0 in
let arity = Array.length binders in
let vars = Uniq.fresh arity in
let ctx = Ctx.bind ctx vars in
-
(* Add future assignments to ctx. *)
- let name_set = ref Name.Set.empty in
+ let name_set = ref Name.Var.Set.empty in
let ctx = ref ctx in
let asns = Array.make arity garbage in
for i = 0 to arity - 1 do
- let names = irrefutable_patt_names binders.(i).data.patt in
+ (* TODO: We should probably check that the pattern is irrefutable
+ * here. *)
+ let names = binders.(i).data.patt.data.names in
name_set := Array.fold_left declare_name' !name_set names;
let typ = V.Future (id, i) in
- let value = V.Arb vars.(i) in
+ let value = V.Arb (Uniq.get vars i) in
let (ctx', asn) = Ctx.assign !ctx names typ value in
ctx := ctx';
asns.(i) <- asn
@@ -421,7 +477,7 @@ and infer_binders' ctx loc binders =
type state =
| Pending of {
- dependency: (int * Name.t) option;
+ dependency: (int * Name.Var.t) option;
dependents: IntSet.t;
}
| Elaborated of int
@@ -438,8 +494,12 @@ and infer_binders' ctx loc binders =
name :: cycle
else
search (name :: cycle) j
- | Pending {dependency = None; _} -> impossible ()
- | Elaborated _ -> impossible () in
+ | Pending {dependency = None; _} ->
+ impossible "Elab.infer_multibinder': find_cycle called with \
+ pending binder with no dependency"
+ | Elaborated _ ->
+ impossible "Elab.infer_multibinder': find_cycle called with \
+ elaborated binder" in
search [] start
end in
@@ -451,8 +511,12 @@ and infer_binders' ctx loc binders =
let rec try_binder i =
let dependents = match states.(i) with
| Pending {dependency = None; dependents} -> dependents
- | Pending {dependency = Some _; _} -> impossible ()
- | Elaborated _ -> impossible () in
+ | Pending {dependency = Some _; _} ->
+ impossible "Elab.infer_multibinder': try_binder called with \
+ pending binder with a dependency"
+ | Elaborated _ ->
+ impossible "Elab.infer_multibinder': try_binder called with \
+ elaborated binder" in
match infer_binder' ctx binders.(i) with
| vtyp ->
asns.(i).typ <- vtyp;
@@ -462,14 +526,21 @@ and infer_binders' ctx loc binders =
| exception BlockedOnFuture (id', j, name) when id' = id ->
states.(i) <- Pending {dependency = Some (j, name); dependents};
states.(j) <- match states.(j) with
- | Pending r -> Pending {r with dependents = IntSet.add i r.dependents}
- | Elaborated _ -> impossible ()
+ | Pending r ->
+ Pending {r with dependents = IntSet.add i r.dependents}
+ | Elaborated _ ->
+ impossible "Elab.infer_multibinder': BlockedOnFuture raised \
+ for elaborated binder"
and remove_dependency j =
let dependents = match states.(j) with
| Pending {dependency = Some _; dependents} -> dependents
- | Pending {dependency = None; _} -> impossible ()
- | Elaborated _ -> impossible () in
+ | Pending {dependency = None; _} ->
+ impossible "Elab.infer_multibinder': remove_dependency called \
+ with pending binder with no dependency"
+ | Elaborated _ ->
+ impossible "Elab.infer_multibinder': remove_dependency called \
+ with elaborated binder" in
states.(j) <- Pending {dependency = None; dependents};
try_binder j in
@@ -478,77 +549,294 @@ and infer_binders' ctx loc binders =
done;
let binders' = Array.make arity garbage in
- let order = Array.make arity (-1) in
+ let order = Array.make arity 0 in
for i = 0 to arity - 1 do
match states.(i) with
| Elaborated j ->
let typ = quote ctx.env asns.(i).typ in
- binders'.(i) <- {I.plicity = binders.(i).data.plicity; I.typ};
+ binders'.(i) <- {
+ I.plicity = binders.(i).data.plicity;
+ I.names = binders.(i).data.patt.data.names;
+ I.typ;
+ };
order.(j) <- i
| Pending _ ->
- abort (loc @$ ErrBinderCycle (find_cycle states i))
+ abort loc @@ EElabBinderCycle (find_cycle states i)
done;
(ctx, {binders = binders'; order})
-and infer_binder' ctx {loc; data = {plicity; patt; typ}} = match typ with
-| Some a ->
- let a = check' ctx a V.Type in
- let va = eval ctx.env a in
- let _ = check_irrefutable_patt' ctx patt va in
- va
+and infer_binder'
+: context -> A.binder -> V.value
+= fun ctx {loc; data = {plicity; patt; typ}} -> match typ with
+| Some typ ->
+ let typ = check' ctx typ V.Type in
+ let vtyp = eval ctx.env typ in
+ ignore @@ check_irrefutable_patt' ctx patt vtyp;
+ vtyp
| None ->
match infer_irrefutable_patt ctx patt, plicity with
| Ok (_, vtyp), _ -> vtyp
- | Error {data = ErrNonInferablePatt _; _}, Plicity.Im ->
+ | Error {data = EElabNonInferablePatt; _}, Plicity.Im ->
begin match check_irrefutable_patt ctx patt V.Type with
| Ok _ -> V.Type
- | Error e -> abort (loc @$ ErrUnannotImplicitIsType e)
+ | Error root -> abort loc @@ EElabUnannotImplicitIsType root
end
- | Error ({data = ErrNonInferablePatt _; _} as root), Plicity.Ex ->
- abort (loc @$ ErrNoBinderAnnot root)
- | Error e, _ -> abort e
-
-and infer_ctor' ctx {loc; data = A.Ctor binders} =
- let (_, binders) = infer_binders' ctx loc binders in
- binders
-
-(* XXX: We're assuming in at least one place (the single non-recursive definer
- * case in infer_definers') that these functions that operate on patterns
- * never return an array with duplicated names. (Actually, duplicate names in
- * multibinders is no problem, but we want to forbid them to avoid programmer
- * errors.) *)
+ | Error ({data = EElabNonInferablePatt; _} as root), Plicity.Ex ->
+ abort loc @@ EElabNoBinderAnnot root
+ | Error e, _ -> abort e.loc e.data
+
+(* Check that each term in the array starting at the given index is a type
+ * convertible to the given value. *)
+and check_types'
+: context -> A.term array -> int -> V.value -> unit
+= fun ctx typs start vtyp ->
+ for i = start to Array.length typs - 1 do
+ let typ' = check' ctx typs.(i) V.Type in
+ let vtyp' = eval ctx.env typ' in
+ begin if not (conv ctx.env vtyp vtyp') then
+ let typ = quote ctx.env vtyp in
+ let typ' = quote ctx.env vtyp' in
+ abort typs.(i).loc @@ EElabUnifyFail (typ, typ')
+ end
+ done
-and infer_irrefutable_patt ctx patt =
+and infer_irrefutable_patt
+: context -> A.patt -> (Name.Var.t node array * V.value, error) result
+= fun ctx patt ->
try Ok (infer_irrefutable_patt' ctx patt) with
| Exn e -> Error e
-and infer_irrefutable_patt' ctx ({loc; data = patt} as node) = match patt with
-| A.PWild -> abort (loc @$ ErrNonInferablePatt node)
-| A.PBind _ -> abort (loc @$ ErrNonInferablePatt node)
-| A.PAnnot (patt, typ) ->
- let typ = check' ctx typ V.Type in
+
+(* TODO: Reimplement with infer_patt'? *)
+and infer_irrefutable_patt'
+: context -> A.patt -> Name.Var.t node array * V.value
+= fun ctx {loc; data = patt} -> match patt with
+| {typs = [||]; _} -> abort loc EElabNonInferablePatt
+| {disc = {loc; data = A.DData _}; _} -> abort loc EElabDataPattRefutable
+| {names; typs; disc = {data = A.DWild; _}} ->
+ let typ = check' ctx typs.(0) V.Type in
let vtyp = eval ctx.env typ in
- let names = check_irrefutable_patt' ctx patt vtyp in
+ check_types' ctx typs 1 vtyp;
(names, vtyp)
-| A.PData _ -> abort (loc @$ ErrDataPattRefutable)
-and check_irrefutable_patt ctx patt vtyp =
+and check_irrefutable_patt
+: context -> A.patt -> V.value -> (Name.Var.t node array, error) result
+= fun ctx patt vtyp ->
try Ok (check_irrefutable_patt' ctx patt vtyp) with
| Exn e -> Error e
-and check_irrefutable_patt' ctx {loc; data = patt} vtyp = match patt with
-| A.PWild -> [||]
-| A.PBind name -> [|loc @$ name|]
-| A.PAnnot (patt, typ') ->
- let typ' = check' ctx typ' V.Type in
- let vtyp' = eval ctx.env typ' in
- if conv ctx.env vtyp vtyp' then
- check_irrefutable_patt' ctx patt vtyp
+
+(* TODO: Reimplement with check_patt'? *)
+and check_irrefutable_patt'
+: context -> A.patt -> V.value -> Name.Var.t node array
+= fun ctx {loc; data = patt} vtyp -> match patt with
+| {disc = {loc; data = A.DData _}; _} -> abort loc EElabDataPattRefutable
+| {names; typs; disc = {data = A.DWild; _}} ->
+ check_types' ctx typs 0 vtyp;
+ names
+
+and infer_patt'
+: context -> A.patt -> I.patt * V.value
+= fun ctx {loc; data = patt} -> match patt with
+| {typs = [||]; _} -> abort loc EElabNonInferablePatt
+| {names; typs; disc} ->
+ let typ = check' ctx typs.(0) V.Type in
+ let vtyp = eval ctx.env typ in
+ check_types' ctx typs 1 vtyp;
+ let disc = check_disc' ctx disc vtyp in
+ ({names; disc}, vtyp)
+
+and check_patt'
+: context -> A.patt -> V.value -> I.patt
+= fun ctx {loc; data = {names; typs; disc}} vtyp ->
+ check_types' ctx typs 0 vtyp;
+ let disc = check_disc' ctx disc vtyp in
+ {names; disc}
+
+and check_disc'
+: context -> A.discriminant -> V.value -> I.discriminant
+= fun ctx {loc; data = disc} vtyp -> match disc, force ctx.env vtyp with
+| A.DWild, _ -> I.DWild
+| A.DData {tag = {data = tag; _}; subpatts = explicit_subpatts},
+ V.DataTypeClosure (clo, vdatatype) ->
+ begin match Name.Ctor.Map.find_opt tag vdatatype.names with
+ | Some tagi -> (* XXX: This is very similar to check_args'. *)
+ let datatype = quote_datatype ctx.env clo vdatatype in
+ let {I.binders; I.order} = vdatatype.ctors.(tagi) in
+ let arity = Array.length binders in
+
+ let vars = Uniq.fresh arity in
+ let ctx = Ctx.bind ctx vars in
+ let clo = Env.bind clo vars in
+
+ let subpatts = elaborate_args' loc explicit_subpatts binders in
+ let subpatts' = Array.make arity garbage in
+ let ctx = ref ctx in
+ for i = 0 to arity - 1 do
+ let j = order.(i) in
+ let vtyp = eval clo binders.(j).typ in
+ let subpatt = match subpatts.(j) with
+ | Some patt -> check_patt' !ctx patt vtyp
+ | None -> no_impl "implict args" in
+ subpatts'.(j) <- subpatt;
+ (* XXX: I think this is a quadratic time algorithm. We should
+ * thread a context through the check_disc calls instead of
+ * repeatedly reassigning/rebinding/redefining stuff. *)
+ ctx := assign_patt !ctx subpatt vtyp (Uniq.get vars j)
+ done;
+
+ I.DData {tag; tagi; subpatts = subpatts'; datatype}
+
+ | None -> abort loc @@ EElabUnexpectedCtor tag
+ end
+| A.DData _, vtyp -> abort loc @@ EElabExpectedDataType (quote ctx.env vtyp)
+
+
+
+
+(*
+Use during evaluation of Match (belongs in eval.ml probably):
+ type var_tree =
+ | Wild of Uniq.t
+ | Data of Uniq.t * var_tree array
+or
+ type bindee =
+ | Var of Uniq.t
+ | Tree of Uniq.t option * bindee array
+ ^^^^^^
+ because top-level of multibinder has no variable
+or, to deal with the option issue above,
+ type bindee = Bindee of subbindee array
+ and subbindee =
+ | Var of Uniq.t
+ | Tree of Uniq.t * subbindee array
+Note that Var is not the same as Tree with an empty subbindee array, because
+the former establishes no definitions, whereas the latter defines the variable
+to equal a V.Data with no fields.
+
+
+
+
+*)
+
+(*
+ 1. Check each A.patt against inferred type of scrutinee, resulting in
+ an array of I.discriminant.
+ 2. For each case, bind the discriminant (which involves (a) "linearizing"
+ the discriminant into an array of variables in a depth-first
+ left-to-right fashion and (b) establishing definitions between variables
+ corresponding to subpatterns and the evaluated subpatterns themselves)
+ and infer/check the body of the case (according to the ambient mode) in
+ this extended context. This produces an I.case corresponding to each
+ A.case.
+ 3. Produce the switch tree for efficient evaluation with the following
+ algorithm: We're given (clause * int) array, where each clause is
+*)
+
+(* A clause is a conjunction of tests, and we view an array of clauses
+ * disjunctively. The case:int is the index of the body that the match
+ * expression evaluates to if the clause matches. *)
+(*
+type clause = {
+ tests: I.data_discriminant Uniq.Map.t;
+ case: int;
+}
+
+let rec elaborate_match loc scrutinee cases =
+ let vars = Uniq.fresh 1 in
+ let env = Env.bind Env.empty vars in
+
+ let to_clause i case =
+ let tests = match case with
+ | I.Case (I.DWild, _) -> Uniq.Map.empty
+ | I.Case (I.DData data_disc, _) ->
+ Uniq.Map.singleton vars.(0) data_disc in
+ {tests; case = i} in
+ let clauses = Array.mapi to_clause cases in
+
+ let used_cases = Array.make (Array.length cases) false in
+ let switch = elaborate_switch loc used_cases env clauses in
+ begin match Array.find_index not used_cases with
+ (* TODO: Use loc of the redundant case, not the entire match. *)
+ | Some i -> abort loc ErrRedundantCase
+ | None -> ()
+ end;
+
+ I.Match {scrutinee; cases; switch}
+
+and elaborate_switch match_loc used_cases env clauses =
+ let clause_count = Array.length clauses in
+
+ if clause_count > 0 then
+ match Uniq.Map.min_binding_opt clauses.(0).tests with
+ (* Base case: An empty clause is a vacuous conjunction and hence always
+ * matches. *)
+ | None ->
+ let case = clauses.(0).case in
+ used_cases.(case) <- true;
+ I.Goto case
+
+ (* Inductive case: Switch on an arbitrary variable in the first clause.
+ * (Actually, here we switch on the least variable (i.e., such that
+ * discriminants are analyzed breadth-first), but that's just so
+ * that the output is predicatable when debugging.)
+ * TODO: We may want to use some heuristic to pick the variable. *)
+ | Some (var, {datatype = {ctors; }; _}) ->
+ (* Recursively create a switch tree for each constructor. *)
+ let ctor_count = Array.length ctors in
+ let subswitches = Array.make ctor_count garbage in
+ for i = 0 to ctor_count - 1 do
+ let arity = Array.length ctors.(i).binders in
+ subswitches.(i) <- elaborate_switch_branch
+ match_loc used_cases env clauses var arity i
+ done;
+
+ let index = match Env.quote env var with
+ | Some ij -> ij
+ | None -> impossible () in
+ I.Switch (index, subswitches)
else
- abort (loc @$ ErrUnifyFail (vtyp, vtyp'))
-| A.PData _ -> abort (loc @$ ErrDataPattRefutable)
-
-and irrefutable_patt_names {loc; data = patt} = match patt with
-| A.PWild -> [||]
-| A.PBind name -> [|loc @$ name|]
-| A.PAnnot (patt, _) -> irrefutable_patt_names patt
-| A.PData _ -> abort (loc @$ ErrDataPattRefutable)
+ (* TODO: Give an example of a missing case. *)
+ abort match_loc ErrNonExhaustiveMatch
+
+and elaborate_switch_branch match_loc used_cases env clauses var arity i =
+ let clause_count = Array.length clauses in
+ let subvars = Uniq.fresh arity in
+
+ (* Count the number of clauses in the subproblem. *)
+ let subclause_count = ref 0 in
+ for j = 0 to clause_count - 1 do
+ match Uniq.Map.find_opt var clauses.(j).tests with
+ | Some {tag = Tag.Index.Of (i', _); _} when i' = i ->
+ (* Clause j has a test of form "var = C ...", where C is ctor. *)
+ incr subclause_count
+ | None ->
+ (* Clause j has no opinion on var. *)
+ incr subclause_count
+ done;
+
+ (* Collect the subclauses. *)
+ let subclauses = Inc_array.make !subclause_count in
+ for j = 0 to clause_count - 1 do
+ let {tests; case} = clauses.(j) in
+ match Uniq.Map.find_opt var tests with
+ | Some {tag = Tag.Index.Of (i', _); subdiscs; _} when i' = i ->
+ (* Clause j has a test of form "var = C (d_1,...,d_n)", where C is
+ * ctor. In the subproblem, replace this clause by clauses
+ * "subvar_i = d_i" for each i such that d_i is a data
+ * discriminant. *)
+ let tests = Uniq.Map.remove var tests in
+ let add_test tests subvar = function
+ | I.DWild -> tests
+ | I.DData data_disc -> Uniq.Map.add subvar data_disc tests in
+ let tests = fold_left2 add_test tests subvars subdiscs in
+ Inc_array.add subclauses {tests; case}
+ | None ->
+ (* Clause j has no opinion on var, so add this clause unchanged
+ * into the subproblem. *)
+ Inc_array.add subclauses {tests; case}
+ done;
+ let subclauses = Inc_array.to_array subclauses in
+
+ let env = Env.bind env subvars in
+ elaborate_switch match_loc used_cases env subclauses
+*)
diff --git a/env.ml b/env.ml
@@ -1,23 +1,62 @@
-open Global
+module UniqMap = Uniq.UniqMap
+
+module UniqTsil = struct
+ module IndexMap = Map.Make(Int)
+
+ module VarMap = Uniq.RangeMap.Make(struct
+ type v = int
+ type v' = int * int
+ let offset i j = (i, j)
+ end)
+
+ type t = {
+ length: int;
+ index_to_var: Uniq.range IndexMap.t;
+ var_to_index: VarMap.t;
+ }
+
+ let lin = {
+ length = 0;
+ index_to_var = IndexMap.empty;
+ var_to_index = VarMap.empty;
+ }
+
+ let snoc {length; index_to_var; var_to_index} vars = {
+ length = length + 1;
+ index_to_var = IndexMap.add length vars index_to_var;
+ var_to_index = VarMap.add vars length var_to_index;
+ }
+
+ let var_of {length; index_to_var; _} (Index.Of (i, j)) =
+ let l = length - i - 1 in (* Convert index i to level l. *)
+ Uniq.get (IndexMap.find l index_to_var) j
+
+ let index_of {length; var_to_index; _} var =
+ match VarMap.find_opt var var_to_index with
+ | Some (l, j) ->
+ let i = length - l - 1 in (* Convert level l to index i. *)
+ Some (Index.Of (i, j))
+ | None -> None
+end
type 'a environment = {
- bindings: Index.UniqTsil.t;
- definitions: 'a Uniq.Map.t;
+ bindings: UniqTsil.t;
+ definitions: 'a UniqMap.t;
}
let empty = {
- bindings = Index.UniqTsil.lin;
- definitions = Uniq.Map.empty;
+ bindings = UniqTsil.lin;
+ definitions = UniqMap.empty;
}
let bind env vars =
- {env with bindings = Index.UniqTsil.snoc env.bindings vars}
+ {env with bindings = UniqTsil.snoc env.bindings vars}
let define env var value =
- {env with definitions = Uniq.Map.add var value env.definitions}
+ {env with definitions = UniqMap.add var value env.definitions}
-let index env ij = Index.UniqTsil.var_of env.bindings ij
+let index env ij = UniqTsil.var_of env.bindings ij
-let quote env var = Index.UniqTsil.index_of env.bindings var
+let quote env var = UniqTsil.index_of env.bindings var
-let eval env var = Uniq.Map.find_opt var env.definitions
+let eval env var = UniqMap.find_opt var env.definitions
diff --git a/error.ml b/error.ml
@@ -0,0 +1,96 @@
+module F = Format
+open Node
+module I = Internal
+
+type error = error' node
+and error' =
+| ELexerNameIndexTooBig
+| ELexerInvalidNumberTerminator
+| ELexerNoRule
+
+| EParserInvalidTerm
+| EParserInvalidPatt
+| EParserEmptyDomain
+
+| EDesugarDuplicateCtor of Name.Ctor.t
+| EDesugarDuplicateBinderName of Name.Var.t
+| EDesugarDataAndData
+
+| EElabApplyNonFunction of I.term
+| EElabNonInferablePatt
+| EElabUnifyFail of I.term * I.term (* TODO: Rename to ConvFail? *)
+| EElabUnexpectedCtor of Name.Ctor.t
+| EElabExpectedDataType of I.term
+| EElabNoLetAnnot of error
+| EElabBinderCycle of Name.Var.t list
+| EElabUnannotImplicitIsType of error
+| EElabNoBinderAnnot of error
+| EElabDuplicateName of Name.Var.t
+| EElabArityMismatch
+| EElabCantInferData
+| EElabDataPattRefutable
+| EElabUnboundVar of Name.Var.t * int
+| EElabFnArityMismatch
+| EElabFnPlicityMismatch
+(*| EElabNonExhaustiveMatch*)
+(*| EElabRedundantCase*)
+(*| EElabExpectedFnType of I.term*)
+
+let rec pp_print_error ppf {loc; data = e} =
+ F.fprintf ppf "[%a] %a"
+ Loc.pp_print loc
+ pp_print_error' e
+
+and pp_print_error' ppf = function
+| ELexerNameIndexTooBig ->
+ F.pp_print_string ppf "name index too big"
+| ELexerInvalidNumberTerminator ->
+ F.pp_print_string ppf "invalid character after number literal"
+| ELexerNoRule ->
+ F.pp_print_string ppf "lexical error"
+
+| EParserInvalidTerm ->
+ F.pp_print_string ppf "invalid term"
+| EParserInvalidPatt ->
+ F.pp_print_string ppf "invalid pattern"
+| EParserEmptyDomain ->
+ F.pp_print_string ppf "empty domain"
+
+| EDesugarDuplicateCtor tag ->
+ F.fprintf ppf "%s: %a"
+ "duplicate constructor: "
+ Name.Ctor.pp_print tag
+| EDesugarDuplicateBinderName name ->
+ F.fprintf ppf "%s: %a"
+ "duplicate binder name: "
+ Name.Var.pp_print name
+| EDesugarDataAndData ->
+ F.pp_print_string ppf "conjunction of two data patterns"
+
+| EElabApplyNonFunction typ ->
+ F.fprintf ppf "@[<4>%s@ %a@]"
+ "attempt to apply non-function of inferred type"
+ (I.pp_print_term 0) typ
+| EElabNonInferablePatt ->
+ F.pp_print_string ppf "non-inferable pattern"
+
+(* TODO *)
+| EElabUnifyFail _ -> F.pp_print_string ppf "ErrUnifyFail"
+| EElabCantInferData -> F.pp_print_string ppf "ErrCantInferData"
+| EElabNoLetAnnot _ -> F.pp_print_string ppf "ErrNoLetAnnot"
+| EElabNoBinderAnnot _ -> F.pp_print_string ppf "ErrNoBinderAnnot"
+| EElabUnannotImplicitIsType _ -> F.pp_print_string ppf "ErrUnannotImplicitIsType"
+| EElabDataPattRefutable -> F.pp_print_string ppf "ErrDataPattRefutable"
+| EElabBinderCycle _ -> F.pp_print_string ppf "ErrBinderCycle"
+| EElabArityMismatch -> F.pp_print_string ppf "ErrArityMismatch"
+| EElabUnboundVar _ -> F.pp_print_string ppf "ErrUnboundVar"
+| EElabExpectedDataType _ -> F.pp_print_string ppf "ErrExpectedDataType"
+| EElabFnArityMismatch -> F.pp_print_string ppf "ErrFnArityMismatch"
+| EElabFnPlicityMismatch -> F.pp_print_string ppf "ErrFnPlicityMismatch"
+| EElabUnexpectedCtor _ -> F.pp_print_string ppf "ErrUnexpectedCtor"
+| EElabDuplicateName _ -> F.pp_print_string ppf "ErrDuplicateName"
+(*| EElabExpectedFnType _ -> "ErrExpectedFnType"*)
+
+exception Exn of error
+
+let abort loc e = raise (Exn (loc @$ e))
diff --git a/eval.ml b/eval.ml
@@ -1,4 +1,3 @@
-open Global
module I = Internal
module V = Value
@@ -16,7 +15,7 @@ let rec eval env = function
let env = ref env in
for i = 0 to ndefiners - 1 do
let vrhs = V.Rec {env; rhs = definers.(i).rhs} in
- env := Env.define !env vars.(i) vrhs
+ env := Env.define !env (Uniq.get vars i) vrhs
done;
eval !env body
@@ -26,7 +25,14 @@ let rec eval env = function
| I.FnType fntype -> V.FnTypeClosure (env, fntype)
-| I.Data (tag, args) -> V.Data (tag, Array.map (eval env) args)
+| I.Data {tag; tagi; args; datatype} ->
+ V.Data {
+ tag;
+ tagi;
+ args = Array.map (eval env) args;
+ clo = env;
+ datatype;
+ }
| I.DataType datatype -> V.DataTypeClosure (env, datatype)
@@ -45,12 +51,14 @@ let rec eval env = function
and apply fn args = match fn, args with
| V.Rec {env; rhs}, args -> apply (eval !env rhs) args
-| V.FnClosure (clo, {binders = {binders; _}; body; _}), args ->
+| V.FnClosure (clo, {multibinder = {binders; _}; body; _}), args ->
let arity = Array.length binders in
let vars = Uniq.fresh arity in
- let clo = Env.bind clo vars in
- let clo = fold_left2 Env.define clo vars args in
- eval clo body
+ let clo = ref (Env.bind clo vars) in
+ for i = 0 to arity - 1 do
+ clo := Env.define !clo (Uniq.get vars i) args.(i)
+ done;
+ eval !clo body
| V.Builtin I.NatOpAdd, [|V.Nat m; V.Nat n|] -> V.Nat (Z.add m n)
| V.Builtin I.NatOpMul, [|V.Nat m; V.Nat n|] -> V.Nat (Z.mul m n)
| fn, args -> V.App (fn, args)
@@ -60,6 +68,9 @@ and apply fn args = match fn, args with
* needing to recurse. *)
and force env = function
| V.Rec {env = env'; rhs} -> force env (eval !env' rhs)
+(* XXX: There's no reason to force the arguments for "CbNeed redexes" (i.e.,
+ * when fn is a lambda), but we do need to force args in a "CbValue redex"
+ * (i.e., when fn is a builtin with builtin reduction rules). *)
| V.App (fn, args) -> apply (force env fn) (Array.map (force env) args)
| V.Arb var ->
begin match Env.eval env var with
@@ -67,85 +78,3 @@ and force env = function
| None -> Arb var
end
| v -> v
-
-let rec quote env = function
-| V.Rec {env = env'; rhs} -> quote env (eval !env' rhs)
-| V.App (fn, args) -> I.App (quote env fn, Array.map (quote env) args)
-| V.FnClosure (clo, {binders; body; codomain}) ->
- let (env, clo, binders) = quote_binders env clo binders in
- let body = quote env (eval clo body) in
- let codomain = quote env (eval clo codomain) in
- I.Fn {binders; body; codomain}
-| V.FnTypeClosure (clo, {binders; codomain}) ->
- let (env, clo, binders) = quote_binders env clo binders in
- let codomain = quote env (eval clo codomain) in
- I.FnType {binders; codomain}
-| V.Data (tag, args) -> I.Data (tag, Array.map (quote env) args)
-| V.DataTypeClosure (clo, ctors) ->
- let quote_ctor binders =
- let (_, _, binders) = quote_binders env clo binders in
- binders in
- I.DataType (Tag.Map.map quote_ctor ctors)
-| V.Nat n -> I.Nat n
-| V.Arb var ->
- begin match Env.quote env var with
- | Some ij -> I.Var ij
- | None -> impossible ()
- end
-| V.Type -> I.Type
-| V.Builtin b -> I.Builtin b
-| V.Future _ -> impossible ()
-
-and quote_binders env clo {binders; order} =
- let arity = Array.length binders in
- let vars = Uniq.fresh arity in
- let env = Env.bind env vars in
- let clo = Env.bind clo vars in
- let binders' = Array.make arity garbage in
- for i = 0 to arity - 1 do
- let j = order.(i) in
- let typ = quote env (eval clo binders.(j).typ) in
- binders'.(j) <- {binders.(j) with typ}
- done;
- (env, clo, {binders = binders'; order})
-
-let rec conv env v0 v1 = match force env v0, force env v1 with
-(* TODO: η-equality? *)
-(* TODO: Switch *)
-| V.App (fn0, args0), V.App (fn1, args1) ->
- (* Note that aargs and bargs aren't necessarily the same length, but they
- * are if fn0 and fn1 are convertible, so the short-circuiting of (&&)
- * ensures that for_all2 can't fail. *)
- conv env fn0 fn1 && Array.for_all2 (conv env) args0 args1
-| V.FnClosure (clo0, fn0), V.FnClosure (clo1, fn1) ->
- let arity = Array.length fn0.binders.binders in
- let vars = Uniq.fresh arity in
- let env = Env.bind env vars in
- let clo0, clo1 = Env.bind clo0 vars, Env.bind clo1 vars in
- let vbody0, vbody1 = eval clo0 fn0.body, eval clo1 fn1.body in
- conv env vbody0 vbody1
-| V.FnTypeClosure (clo0, fntype0), V.FnTypeClosure (clo1, fntype1) ->
- let binders0 = fntype0.binders in
- let binders1 = fntype1.binders in
- let arity0 = Array.length binders0.binders in
- let arity1 = Array.length binders1.binders in
- arity0 = arity1 &&
- let vars = Uniq.fresh arity0 in
- let env = Env.bind env vars in
- let clo0, clo1 = Env.bind clo0 vars, Env.bind clo1 vars in
- let domain_conv i0 i1 =
- i0 = i1 &&
- let vtyp0 = eval clo0 binders0.binders.(i0).typ in
- let vtyp1 = eval clo1 binders1.binders.(i1).typ in
- conv env vtyp0 vtyp1 in
- Array.for_all2 domain_conv binders0.order binders1.order &&
- let vcodomain0 = eval clo0 fntype0.codomain in
- let vcodomain1 = eval clo1 fntype1.codomain in
- conv env vcodomain0 vcodomain1
-| V.Nat n, V.Nat m -> Z.equal n m
-| V.Arb var0, V.Arb var1 -> var0 = var1
-| V.Type, V.Type -> true
-| V.Builtin b0, V.Builtin b1 -> b0 = b1
-| Future _, _ -> impossible ()
-| _, Future _ -> impossible ()
-| _, _ -> false
diff --git a/global.ml b/global.ml
@@ -1,39 +0,0 @@
-include Tsil.Global
-include Node.Global
-
-let impossible () = failwith "impossible"
-let no_impl feature = failwith ("no impl: " ^ feature)
-
-(* For creating uninitialized arrays *)
-let garbage = Obj.magic 0
-
-(* For some reason this isn't in the standard Array module... *)
-let rec fold_left2 f acc xs ys =
- begin if Array.length xs <> Array.length ys then
- invalid_arg "fold_left2"
- end;
- let acc = ref acc in
- for i = 0 to Array.length xs - 1 do
- acc := f !acc xs.(i) ys.(i)
- done;
- !acc
-
-(* Short-circuiting pairwise AND. *)
-let rec all2 f xs ys =
- begin if Array.length xs <> Array.length ys then
- invalid_arg "all2"
- end;
- let len = Array.length xs in
- let rec go i = (i = len) || (f xs.(i) ys.(i) && go (i + 1)) in
- go 0
-
-let concat_string_array sep ss =
- let lensum = Array.fold_left (fun len s -> len + String.length s) 0 ss in
- let bytes = Bytes.create lensum in
- let pos = ref 0 in
- for i = 0 to Array.length ss - 1 do
- let len = String.length ss.(i) in
- Bytes.blit_string ss.(i) 0 bytes !pos len;
- pos := !pos + len
- done;
- Bytes.to_string bytes
diff --git a/inc_array.ml b/inc_array.ml
@@ -0,0 +1,16 @@
+(* Arrays that are initialized incrementally. *)
+
+open Util
+
+type 'a t = {
+ mutable length: int;
+ data: 'a array;
+}
+
+let make n = {length = 0; data = Array.make n garbage}
+
+let add a v =
+ a.data.(a.length) <- v;
+ a.length <- a.length + 1
+
+let to_array a = a.data
diff --git a/index.ml b/index.ml
@@ -1,48 +1,12 @@
(* Pairs of indices, which we usualy denote ij or (i, j). The first index i is
* a de Bruijn index in the usual sense; it refers to a binder, with the
- * innermost binder being 0. All our binders are multibinders, i.e., they bind
- * multiple variables simultaneously, and the second index j refers to the
- * "subbinder" within the multibinder. *)
+ * innermost binder being 0. All our binders are multibinders (i.e., they bind
+ * multiple variables simultaneously), so each entry in an environment is
+ * actually a range of variables, and the second index j refers to a variable
+ * within such a range. *)
type index = Of of int * int
type t = index
-let to_string (Of (i, j)) = string_of_int i ^ "." ^ string_of_int j
-
-module UniqTsil = struct
- module IntMap = Map.Make(Int)
-
- type t = {
- length: int;
- index_to_var: Uniq.t array IntMap.t;
- var_to_index: (int * int) Uniq.Map.t;
- }
-
- let lin = {
- length = 0;
- index_to_var = IntMap.empty;
- var_to_index = Uniq.Map.empty;
- }
-
- let snoc {length; index_to_var; var_to_index} vars =
- let var_to_index = ref var_to_index in
- for j = 0 to Array.length vars - 1 do
- var_to_index := Uniq.Map.add vars.(j) (length, j) !var_to_index
- done;
- {
- length = length + 1;
- index_to_var = IntMap.add length vars index_to_var;
- var_to_index = !var_to_index
- }
-
- let var_of {length; index_to_var; _} (Of (i, j)) =
- let l = length - i - 1 in (* Convert index i to level l. *)
- (IntMap.find l index_to_var).(j)
-
- let index_of {length; var_to_index; _} var =
- match Uniq.Map.find_opt var var_to_index with
- | Some (l, j) ->
- let i = length - l - 1 in (* Convert level l to index i. *)
- Some (Of (i, j))
- | None -> None
-end
-\ No newline at end of file
+let pp_print ppf (Of (i, j)) =
+ Format.fprintf ppf "%d.%d" i j
diff --git a/internal.ml b/internal.ml
@@ -1,5 +1,37 @@
-open Global
-module P = Pretty
+open Node
+module F = Format
+
+(* Ideas for implicit args/metavariables/unification: Unlike in Andras's
+ * elaboration-zoo, we use globally fresh names in the semantic domain, so
+ * when checking that all variables that occur in a candidate solution for a
+ * metavariable are in scope at the metavariable's site of creation, we don't
+ * need to do any index/level manipulation. Rather, it suffices to simply
+ * lookup the uniq in the metavariable's environment: if it's there, then the
+ * variable is in scope and all is well; otherwise, the variable is out of
+ * scope and we produce a unification error.
+ *
+ * In the elaboration-zoo, metas are instead viewed as closed terms
+ * lambda-abstracted over the entire ambient environment (i.e., roughly
+ * speaking, such that a solution to a meta Γ ⊢ m has form ⋅ ⊢ λ Γ. t for some
+ * term t), which has the disadvantage that forming a meta takes time linear in
+ * the size of the environment.
+ *
+ * When actually solving a metavariable (which happens during unification
+ * between two values, namely the metavariable and its candidate solution), we
+ * optimistically try to quote the candidate solution inside the metavariable's
+ * environment, which will (rightfully) fail if the candidate solution depends
+ * on out-of-scope variables.
+ *
+ * Maybe we can represent metavariables in values as simply a value option ref.
+ * Then when checking an argument list, we keep track of all these refs (one
+ * per implicit argument), and ensure after elaborating every argument that the
+ * metas have been solved to Some solution. Better: Use an inline record so
+ * that there's 1 indirection instead of 2. I.e., V.value becomes
+ * ... | Meta of {mutable soln: V.value}
+ *)
+
+(* TODO: When we actually start compiling to low-level code, we need to
+ * consider where types should be stored on internal terms. *)
type builtin =
| NatType
@@ -8,90 +40,255 @@ type builtin =
type term =
| Let of definer array * term
-(*| Switch of term * case array*)
+(*| Match of {
+ scrutinee: term;
+ cases: case array;
+ switch: switch;
+}*)
| App of term * term array
| Fn of fn
| FnType of fntype
-| Data of Tag.t * term array
+| Data of {
+ tag: Name.Ctor.t;
+ tagi: int;
+ args: term array;
+ datatype: datatype;
+}
| DataType of datatype
| Nat of Z.t
-| Var of Index.t
+| Var of Index.t (* TODO: Store type on here or in env? *)
| Type
| Builtin of builtin
and definer = {
+ names: Name.Var.t node array;
rhs: term;
typ: term;
}
+and case = {
+ patt: patt;
+ body: term;
+}
+
+(* Decision tree of match. Switch is an internal node that switches on a
+ * non-empty datatype, and Goto is a leaf node that jumps to the indicated
+ * case. *)
+and switch =
+| Switch of Index.t * switch array
+| Goto of int
+
+(* TODO: Generalize binders/multibinders to include discriminants, i.e.,
+ * generalize from linear to tree structure. When we bind such a binder,
+ * we also establish some definitions according to the tree structure. *)
+
and binder = {
plicity: Plicity.t;
+ names: Name.Var.t node array;
typ: term;
}
-and sorted_binders = {
+and multibinder = {
binders: binder array;
order: int array;
}
and fn = {
- binders: sorted_binders;
+ multibinder: multibinder;
body: term;
codomain: term;
}
and fntype = {
- binders: sorted_binders;
+ multibinder: multibinder;
codomain: term;
}
-and datatype = sorted_binders Tag.Map.t
+and datatype = {
+ ctors: multibinder array;
+ names: int Name.Ctor.Map.t;
+}
-(*
-and case = patt * term
-and patt = patt_discriminant list
-and patt_discriminant =
-| PattDiscTrue
-| PattDiscFalse
-| PattDiscData of ident * int
-*)
+and patt = {
+ names: Name.Var.t node array;
+ disc: discriminant;
+}
+
+and discriminant =
+| DWild
+| DData of data_discriminant
+
+and data_discriminant = {
+ tag: Name.Ctor.t;
+ tagi: int;
+ subpatts: patt array;
+ datatype: datatype;
+}
+
+let rec pp_print_with_parens
+: 'a. int -> int -> F.formatter -> ('a, F.formatter, unit) format -> 'a
+= fun m c ppf ->
+ if c < m then begin
+ F.pp_print_string ppf "(";
+ F.kfprintf (fun ppf -> F.pp_print_string ppf ")") ppf
+ end else
+ F.fprintf ppf
+
+and pp_print_term
+: int -> F.formatter -> term -> unit
+= fun m ppf -> function
+| Let (definers, body) ->
+ pp_print_with_parens m 0 ppf "@[<v 0>%a;@,%a@]"
+ (F.pp_print_array ~pp_sep:pp_print_comma pp_print_definer) definers
+ (pp_print_term 0) body
+
+(* TODO: Match *)
+
+| App (fn, args) ->
+ pp_print_with_parens m 5 ppf "@[<4>%a%a@]"
+ (pp_print_term 6) fn
+ pp_print_args args
+
+| Fn {multibinder = {binders; _}; body; codomain} ->
+ pp_print_with_parens m 1 ppf "@[fn@[<hv 4>@ %a@]%a=>@[<4>@ %a: %a@]@]"
+ pp_print_binders binders
+ pp_print_trailing_delim (1, ",")
+ (pp_print_term 3) body
+ (pp_print_term 2) codomain
+
+| FnType {multibinder = {binders; _}; codomain} ->
+ pp_print_with_parens m 3 ppf "@[[@[<hv 4>@,%a@]%a] -> %a@]"
+ pp_print_binders binders
+ pp_print_trailing_delim (0, ",")
+ (pp_print_term 3) codomain
+
+| Data {tag; args; datatype; _} ->
+ pp_print_with_parens m 5 ppf "@[<4>%a%a: %a@]"
+ Name.Ctor.pp_print tag
+ pp_print_args args
+ (pp_print_term 2) (DataType datatype)
+| DataType {ctors; names} ->
+ pp_print_with_parens m 6 ppf "@[#{@[<hv 4>@,%a@]%a}@]"
+ pp_print_ctors (ctors, names)
+ pp_print_trailing_delim (0, ";")
+
+| Nat n ->
+ pp_print_with_parens m 6 ppf "%a"
+ Z.pp_print n
+
+(* TODO: Print surface names here, possibly in addition to indices. This
+ * requires passing around a context during pretty printing *sigh*. *)
+| Var ij ->
+ pp_print_with_parens m 6 ppf "%a"
+ Index.pp_print ij
+
+| Type ->
+ pp_print_with_parens m 6 ppf "%s"
+ "~Type"
+
+| Builtin b ->
+ pp_print_with_parens m 6 ppf "%a"
+ pp_print_builtin b
+
+and pp_print_definer ppf {names; rhs; typ} =
+ F.fprintf ppf "@[<4>%a: %a :=@ %a@]"
+ pp_print_names names
+ (pp_print_term 2) typ
+ (pp_print_term 1) rhs
+
+and pp_print_args ppf = function
+| [||] -> () (* This only happens for Data, not Fn. *)
+| args ->
+ F.fprintf ppf "@ %a"
+ (F.pp_print_array ~pp_sep:F.pp_print_space (pp_print_term 6)) args
+
+and pp_print_binders ppf binders =
+ F.pp_print_array ~pp_sep:pp_print_comma pp_print_binder ppf binders
+
+and pp_print_binder ppf {plicity; names; typ} =
+ F.fprintf ppf "@[%a%a: %a@]"
+ pp_print_plicity plicity
+ pp_print_names names
+ (pp_print_term 2) typ
+
+and pp_print_ctors ppf (ctors, names) =
+ let first = ref true in
+ Name.Ctor.Map.iter (fun tag tagi ->
+ begin if not !first then
+ F.fprintf ppf ";@ ";
+ first := false
+ end;
+ match ctors.(tagi).binders with
+ | [||] -> Name.Ctor.pp_print ppf tag
+ | binders ->
+ F.fprintf ppf "@[<4>%a%a@]"
+ Name.Ctor.pp_print tag
+ pp_print_binders binders
+ ) names
+
+and pp_print_plicity ppf = function
+| Plicity.Im -> F.pp_print_string ppf "?"
+| Plicity.Ex -> ()
+
+and pp_print_names ppf = function
+| [||] -> F.fprintf ppf "_"
+| names -> F.pp_print_array ~pp_sep:pp_print_and pp_print_name ppf names
+
+and pp_print_name ppf {data = name; _} = Name.Var.pp_print ppf name
+
+and pp_print_trailing_delim ppf (n, d) =
+ F.pp_print_custom_break ppf ~fits:("", n, "") ~breaks:(d, 0, "")
+
+and pp_print_comma ppf () = F.fprintf ppf ",@ "
+
+and pp_print_and ppf () = F.pp_print_string ppf " & "
+
+and pp_print_builtin ppf = function
+| NatType -> F.pp_print_string ppf "~Nat"
+| NatOpAdd -> F.pp_print_string ppf "~+"
+| NatOpMul -> F.pp_print_string ppf "~*"
+
+(*
let rec write_term buf = function
| Let (definers, body) ->
P.parens buf 0 @@ fun () ->
- P.at 1 P.fold_left buf ", " write_definer definers;
+ P.at 1 P.iter buf ", " write_definer definers;
P.string buf "; ";
P.at 0 write_term buf body
| App (fn, args) ->
P.parens buf 6 @@ fun () ->
P.at 7 write_term buf fn;
P.string buf " ";
- P.at 7 P.fold_left buf " " write_term args
-| Fn {binders = {binders; _}; body; codomain} ->
+ P.at 7 P.iter buf " " write_term args
+| Fn {multibinder = {binders; _}; body; codomain} ->
P.parens buf 1 @@ fun () ->
P.string buf "fn ";
- P.at 2 P.fold_left buf ", " write_binder binders;
+ P.at 2 P.iter buf ", " write_binder binders;
P.string buf " : ";
P.at 2 write_term buf codomain;
P.string buf " => ";
P.at 1 write_term buf body
-| FnType {binders = {binders; _}; codomain} ->
+| FnType {multibinder = {binders; _}; codomain} ->
P.parens buf 3 @@ fun () ->
P.string buf "[";
- P.at 3 P.fold_left buf ", " write_binder binders;
+ P.at 3 P.iter buf ", " write_binder binders;
P.string buf "] -> ";
P.at 3 write_term buf codomain
-| Data (tag, args) ->
+| Data {tag; args; datatype} ->
P.parens buf 6 @@ fun () ->
- P.at 7 P.string buf (Tag.to_string tag);
+ P.at 7 P.string buf (Tag.Index.to_string tag);
begin if Array.length args > 0 then
P.string buf " ";
- P.at 7 P.fold_left buf " " write_term args
- end
-| DataType ctors ->
+ P.at 7 P.iter buf " " write_term args
+ end;
+ P.string buf " : ";
+ P.at 2 write_term buf (DataType datatype)
+| DataType {ctors; names} ->
P.parens buf 7 @@ fun () ->
P.string buf "#{";
- P.at 3 P.fold_left buf "; " write_ctor (Array.of_list (Tag.Map.to_list ctors));
+ P.at 3 P.iter2 buf "; " write_ctor
+ (Array.of_list (Tag.Name.Map.to_list names)) ctors;
P.string buf "}"
| Nat n ->
P.parens buf 7 @@ fun () ->
@@ -125,14 +322,15 @@ and write_binder buf = function
| {plicity = Plicity.Ex; typ} ->
write_term buf typ
-and write_ctor buf (tag, {binders; _}) =
- P.string buf (Tag.to_string tag);
+and write_ctor buf (tag, _) {binders; _} =
+ P.string buf (Tag.Name.to_string tag);
begin if Array.length binders > 0 then
P.string buf " ";
- P.at 3 P.fold_left buf ", " write_binder binders
+ P.at 3 P.iter buf ", " write_binder binders
end
let to_string t =
- let buf = P.make 128 in
+ let buf = P.make 1024 in (* Arbitrary initial size *)
write_term buf t;
P.to_string buf
+*)
diff --git a/lexer.mll b/lexer.mll
@@ -1,27 +1,17 @@
{
-open Global
+open Node
+open Error
open Parser
-type error = error' node
-and error' =
-| ErrIndexTooBig
-| ErrInvalidNumberTerminator
-| ErrNoRule
-
-let rec string_of_error {loc; data = e} =
- Loc.to_string loc ^ " " ^ string_of_error' e
-and string_of_error' = function
-| ErrIndexTooBig -> "name index too big"
-| ErrInvalidNumberTerminator -> "invalid terminator after integer literal"
-| ErrNoRule -> "lexical error"
-
-exception Exn of error
-
let abort lexbuf e =
- let loc = Loc.of_position (Lexing.lexeme_start_p lexbuf) in
- raise (Exn {loc; data = e})
+ let beg_pos = Lexing.lexeme_start_p lexbuf in
+ let end_pos = Lexing.lexeme_end_p lexbuf in
+ let loc = Loc.of_positions beg_pos end_pos in
+ Error.abort loc e
}
+let ascii_horizontal = ['\t' ' '-'~']
+
let dnumeral = ['0'-'9']
let bnumeral = ['0'-'1']
let onumeral = ['0'-'7']
@@ -34,14 +24,18 @@ let latin = ulatin | llatin
(* We allow only those Greek letters that can't be confused with Latin letters
* in most fonts. *)
let ugreek = "Γ" | "Δ" | "Θ" | "Λ" | "Ξ" | "Π" | "Σ" | "Φ" | "Ψ" | "Ω"
-let lgreek = "α" | "β" | "γ" | "δ" | "ε" | "ζ" | "η" | "θ" | "ι" | "κ" | "λ" | "μ" | "ν" | "ξ" | "π" | "ρ" | "σ" | "τ" | "υ" | "φ" | "χ" | "ψ" | "ω"
+let lgreek = "α" | "β" | "γ" | "δ" | "ε" | "ζ" | "η" | "θ" | "ι" | "κ" | "λ"
+ | "μ" | "ν" | "ξ" | "π" | "ρ" | "σ" | "τ" | "υ" | "φ" | "χ" | "ψ" | "ω"
let lgreekalt = "ϵ" | "ϑ" | "ϰ" | "ϖ" | "ϱ" | "ς" | "ϕ"
let greek = ugreek | lgreek | lgreekalt
+let comment = '\\' '\\' (ascii_horizontal | greek)*
+
let ident_start = latin | greek | '_'
let ident_cont = latin | greek | '_' | dnumeral | '\'' | '"'
-let name = ident_start ident_cont*
-let tag = ident_cont+
+let ident_quote = ascii_horizontal | greek
+let name = (ident_start ident_cont* as x) | '`' (ident_quote* as x) '`'
+let tag = (ident_cont+ as x) | '`' (ident_quote* as x) '`'
(* This format is accepted by Z.of_string. *)
let dnumber = dnumeral (dnumeral | '_')*
@@ -56,8 +50,8 @@ let invalid_number_terminator = ident_cont
rule token = parse
| [' ' '\t' '\r'] { token lexbuf }
-| '\n' { Lexing.new_line lexbuf; token lexbuf }
-| eof { EOF }
+| comment? '\n' { Lexing.new_line lexbuf; token lexbuf }
+| comment? eof { EOF }
| ";" { SEMICOLON }
| ":" { COLON }
@@ -78,44 +72,53 @@ rule token = parse
| "{" { LBRACE }
| "}" { RBRACE }
+| "||" { PIPE2 }
+| "&&" { AMPERSAND2 }
+
+| "<" { LT }
+| "<=" { LE }
+| "=" { EQ }
+| ">=" { GE }
+| ">" { GT }
+| "<>" { NE }
+
| "+" { PLUS }
+| "-" { MINUS }
| "*" { ASTERISK }
+| "/" { SLASH }
+| "%" { PERCENT }
| "|" { PIPE }
| "&" { AMPERSAND }
| "rec" { REC }
-
| "match" { MATCH }
-
| "if" { IF }
| "then" { THEN }
| "else" { ELSE }
-
| "iff" { IFF }
| "do" { DO }
-
| "fn" { FN }
-| "@" (tag as t) { TAG t }
-
| "_" { UNDERSCORE }
-| (name as x) "#" (number as i) {
+| "@" tag { TAG x }
+
+| name "#" (number as i) {
let z = Z.of_string i in
(* TODO: Standardize this limit. *)
if Z.fits_int z then
INDEXED_NAME (x, Z.to_int z)
else
- abort lexbuf ErrIndexTooBig
+ abort lexbuf ELexerNameIndexTooBig
}
-| name as x { NAME x }
+| name { NAME x }
| number as n { NAT (Z.of_string n) }
| number invalid_number_terminator
- { abort lexbuf ErrInvalidNumberTerminator }
+ { abort lexbuf ELexerInvalidNumberTerminator }
-| _ { abort lexbuf ErrNoRule }
+| _ { abort lexbuf ELexerNoRule }
{
}
diff --git a/loc.ml b/loc.ml
@@ -1,11 +1,24 @@
+open Lexing
+
type loc =
-| None
-| Loc of int * int
+| Nowhere
+| Loc of {
+ beg_row: int;
+ beg_col: int;
+ end_row: int;
+ end_col: int;
+}
type t = loc
-let of_position (pos: Lexing.position) =
- Loc (pos.pos_lnum, pos.pos_cnum - pos.pos_bol)
+let of_positions beg_pos end_pos =
+ Loc {
+ beg_row = beg_pos.pos_lnum;
+ beg_col = beg_pos.pos_cnum - beg_pos.pos_bol;
+ end_row = end_pos.pos_lnum;
+ end_col = end_pos.pos_cnum - end_pos.pos_bol;
+ }
-let to_string = function
-| None -> "?:?"
-| Loc (row, col) -> string_of_int row ^ ":" ^ string_of_int col
+let pp_print ppf = function
+| Nowhere -> Format.pp_print_string ppf "?"
+| Loc {beg_row; beg_col; end_row; end_col} ->
+ Format.fprintf ppf "%d:%d-%d:%d" beg_row beg_col end_row end_col
diff --git a/main.ml b/main.ml
@@ -1,8 +1,7 @@
-open Global
+open Node
module I = Internal
module V = Value
-(*
let buf = Lexing.from_string "\
N := [A: Type, A, A -> A] -> A;\n\
add: [N, N] -> N := fn m, n => fn A, z, s => m A (n A z s) s;\n\
@@ -11,15 +10,34 @@ let buf = Lexing.from_string "\
five := add two three;\n\
five Nat 0 (fn n => n + 1)\n\
"
+
+(*
+let buf = Lexing.from_string "\
+ T & S := #{@a Nat; @b #(), Nat, Nat, Nat, Nat, Nat, Nat, Nat, Nat, Nat; @c #(); @d #(); @e #(); @f #(); @g #(); @h #(); @i #(); @j #(); @k #(); @l #(); @m #()};\n\
+ @a 3 : S\n\
+"
+*)
+
+(*
+let buf = Lexing.from_string "\
+ f: Nat := g,\n\
+ g: Nat := f;\n\
+ 3\n\
+"
*)
(*
-let buf = Lexing.from_string "T := #{@a Nat; @b #()}; @a 3 : T"
+let buf = Lexing.from_string "\
+ T & S := #{@a Nat; @b #()};\n\
+ @a 3 : S\n\
+"
*)
+(*
let buf = Lexing.from_string "\
(fn => Nat) ()\
"
+*)
(*
let buf = Lexing.from_string "\
@@ -29,43 +47,45 @@ let buf = Lexing.from_string "\
"
*)
-let concrete =
- try Parser.start Lexer.token buf with
- | Lexer.Exn e ->
- print_endline (Lexer.string_of_error e);
- exit 1
- | Parser.Exn e ->
- print_endline (Parser.string_of_error e);
- exit 1
-
-let abstract = Desugar.term concrete
-
-(* TODO: We should allow names to map to values in the context, so we don't
- * need a a psuedo-binder to create the top-level/default context. *)
-
let ctx =
- let nat_binder = {I.plicity = Plicity.Ex; I.typ = I.Builtin I.NatType} in
+ let nat_binder = {
+ I.plicity = Plicity.Ex;
+ I.names = [|
+ Loc.Nowhere @$ Name.Var.Of "a";
+ Loc.Nowhere @$ Name.Var.Of "b";
+ |];
+ I.typ = I.Builtin I.NatType;
+ } in
let nat_op_typ = V.FnTypeClosure (Env.empty, {
- binders = {binders = [|nat_binder; nat_binder|]; order = [|0; 1|]};
+ multibinder = {binders = [|nat_binder; nat_binder|]; order = [|0; 1|]};
codomain = I.Builtin I.NatType;
}) in
let ctx = Ctx.empty in
let (ctx, _) = Ctx.assign ctx
- [|Loc.None @$ Name.Of "Type"|] V.Type V.Type in
+ [|Loc.Nowhere @$ Name.Var.Of "Type"|] V.Type V.Type in
let (ctx, _) = Ctx.assign ctx
- [|Loc.None @$ Name.Of "Nat"|] V.Type (V.Builtin I.NatType) in
+ [|Loc.Nowhere @$ Name.Var.Of "Nat"|] V.Type (V.Builtin I.NatType) in
let (ctx, _) = Ctx.assign ctx
- [|Loc.None @$ Name.Of "`+"|] nat_op_typ (V.Builtin I.NatOpAdd) in
+ [|Loc.Nowhere @$ Name.Var.Of "`+"|] nat_op_typ (V.Builtin I.NatOpAdd) in
let (ctx, _) = Ctx.assign ctx
- [|Loc.None @$ Name.Of "`*"|] nat_op_typ (V.Builtin I.NatOpMul) in
+ [|Loc.Nowhere @$ Name.Var.Of "`*"|] nat_op_typ (V.Builtin I.NatOpMul) in
ctx
-let (internal, ty) =
- match Elab.infer ctx abstract with
- | Ok r -> r
- | Error e -> print_endline (Elab.string_of_error e); exit 1
+let print fmt a =
+ Format.set_margin 80;
+ Format.set_max_indent 0;
+ Format.printf "%a@." fmt a
+
+let (internal, ty) = try
+ let concrete = Parser.start Lexer.token buf in
+ let abstract = Desugar.term concrete in
+ Elab.infer' ctx abstract
+with Error.Exn e ->
+ print Error.pp_print_error e;
+ exit 1
-let internal' = Eval.quote ctx.env (Eval.eval ctx.env internal)
+let internal' = Quote.quote ctx.env (Eval.eval ctx.env internal)
-let () = print_endline (I.to_string internal)
-let () = print_endline (I.to_string internal')
+let () =
+ print (I.pp_print_term 0) internal;
+ print (I.pp_print_term 0) internal'
diff --git a/name.ml b/name.ml
@@ -1,11 +1,34 @@
-type name = Of of string
-type t = name
+module Var = struct
+ type var = Of of string
+ type t = var
-module Ord = struct
- type t = name
- let compare (Of x) (Of y) = String.compare x y
+ module Ord = struct
+ type t = var
+ let compare (Of x) (Of y) = String.compare x y
+ end
+
+ module Map = Map.Make(Ord)
+
+ module Set = Set.Make(Ord)
+
+ (* TODO: Doesn't handled backticked names. *)
+ let pp_print ppf (Of s) = Format.fprintf ppf "%s" s
end
-module Map = Map.Make(Ord)
+module Ctor = struct
+ type ctor = Of of string
+ type t = ctor
+
+ module Ord = struct
+ type t = ctor
+ let compare (Of x) (Of y) = String.compare x y
+ end
+
+ module Map = Map.Make(Ord)
+
+ let equal (Of x) (Of y) = String.equal x y
+
+ (* TODO: Doesn't handled backticked names. *)
+ let pp_print ppf (Of s) = Format.fprintf ppf "%@%s" s
+end
-module Set = Set.Make(Ord)
diff --git a/node.ml b/node.ml
@@ -1,13 +1,5 @@
(* Nodes are simply data together with a location. *)
-type 'a t = {loc: Loc.t; data: 'a}
+type 'a node = {loc: Loc.t; data: 'a}
let (@$) loc data = {loc; data}
-
-(* TODO: Remove uses of this in favor of pattern matching. *)
-let map f {loc; data} = {loc; data = f data}
-
-module Global = struct
- type 'a node = 'a t = {loc: Loc.t; data: 'a}
- let (@$) = (@$)
-end
diff --git a/parser.mly b/parser.mly
@@ -1,26 +1,9 @@
%{
-open Global
+open Util
+open Node
+open Error
module C = Concrete
-type error = error' node
-and error' =
-| ErrInvalidTerm
-| ErrInvalidPatt
-| ErrIndexInPatt
-| ErrEmptyDomain
-
-let rec string_of_error {loc; data = e} =
- Loc.to_string loc ^ " " ^ string_of_error' e
-and string_of_error' = function
-| ErrInvalidTerm -> "invalid term"
-| ErrInvalidPatt -> "invalid pattern"
-| ErrIndexInPatt -> "only unindexed variables can appear in patterns"
-| ErrEmptyDomain -> "empty domain"
-
-exception Exn of error
-
-let abort loc e = raise (Exn (loc @$ e))
-
(* To avoid reduce-reduce conflicts, we need to distinguish between raw
* expressions (which are generally of unknown syntactic category) and concrete
* terms/patterns/etc. In a hand-written parser, we'd have more control over
@@ -35,34 +18,33 @@ and expr' =
| Let of int * C.definer list * expr
| Seq of expr * expr
| Match of expr * int * C.case list
-| If of expr * expr * expr
-| Iff of expr * expr
+| If of C.cond * expr * expr
+| Iff of C.cond * expr
| Annot of expr * expr
-| Add of expr * expr
-| Mul of expr * expr
+| BinOp of expr * C.binop * expr
| Or of expr * expr
| And of expr * expr
| App of expr * int * expr list
| Fn of int * C.param list * expr
| FnType of int * C.domain list * expr
-| Variant of Tag.t * int * expr list
+| Variant of C.ctor_name * int * expr list
| VariantType of int * ctor list
| Tuple of int * expr list
| TupleType of int * C.domain list
| Unit
| Nat of Z.t
| Underscore
-| IndexedVar of Name.t * int
-| Var of Name.t
+| IndexedVar of C.var_name * int
+| Var of C.var_name
| Paren of expr
and ctor = ctor' node
-and ctor' = Ctor of Tag.t * int * C.domain list
+and ctor' = Ctor of C.ctor_name * int * C.domain list
let cons a (len, lst) = (len + 1, a :: lst)
let to_array len lst =
- let arr = Array.make len (Obj.magic 0) in
+ let arr = Array.make len garbage in
List.iteri (Array.set arr) lst;
arr
@@ -70,13 +52,12 @@ let rec term {loc; data = e} = loc @$ match e with
| Let (len, definers, body) -> C.Let (to_array len definers, term body)
| Seq (a, b) -> C.Seq (term a, term b)
| Match (scrutinee, len, cases) -> C.Match (term scrutinee, to_array len cases)
-| If (c, t, f) -> C.If (term c, term t, term f)
-| Iff (c, t) -> C.Iff (term c, term t)
+| If (c, t, f) -> C.If (c, term t, term f)
+| Iff (c, t) -> C.Iff (c, term t)
| Annot (a, b) -> C.Annot (term a, term b)
-| Add (a, b) -> C.Add (term a, term b)
-| Mul (a, b) -> C.Mul (term a, term b)
-| Or _ -> abort loc ErrInvalidTerm
-| And _ -> abort loc ErrInvalidTerm
+| BinOp (a, o, b) -> C.BinOp (term a, o, term b)
+| Or _ -> abort loc EParserInvalidTerm
+| And _ -> abort loc EParserInvalidTerm
| App (f, len, args) -> C.App (term f, Array.map term (to_array len args))
| Fn (len, params, body) -> C.Fn (to_array len params, term body)
| FnType (len, doms, cod) -> C.FnType (to_array len doms, term cod)
@@ -86,9 +67,9 @@ let rec term {loc; data = e} = loc @$ match e with
| TupleType (len, doms) -> C.TupleType (to_array len doms)
| Unit -> C.Unit
| Nat n -> C.Nat n
-| Underscore -> abort loc ErrInvalidTerm
-| IndexedVar (x, i) -> C.Var (x, Some i)
-| Var x -> C.Var (x, None)
+| Underscore -> abort loc EParserInvalidTerm
+| IndexedVar (x, i) -> C.IndexedVar (x, i)
+| Var x -> C.Var x
| Paren e -> C.Paren (term e)
and ctor {loc; data = Ctor (tag, len, doms)} =
@@ -105,14 +86,19 @@ and patt {loc; data = e} = loc @$ match e with
| Underscore -> C.PWild
| Var x -> C.PBind x
| Paren p -> C.PParen (patt p)
-| Let _ | Seq _ | Match _ | If _ | Iff _ | Add _ | Mul _
-| App _ | Fn _ | FnType _ | VariantType _ | TupleType _ ->
- abort loc ErrInvalidPatt
-| IndexedVar _ ->
- abort loc ErrIndexInPatt
-
-let loc_of i = Loc.of_position (Parsing.rhs_start_pos i)
-let (@$) i x = loc_of i @$ x
+| Let _ | Seq _ | Match _ | If _ | Iff _ | BinOp _ | App _
+| Fn _ | FnType _ | VariantType _ | TupleType _ | IndexedVar _ ->
+ abort loc EParserInvalidPatt
+
+let loc_of i =
+ let beg_pos = Parsing.rhs_start_pos i in
+ let end_pos = Parsing.rhs_end_pos i in
+ Loc.of_positions beg_pos end_pos
+
+let loc =
+ let beg_pos = Parsing.symbol_start_pos () in
+ let end_pos = Parsing.symbol_end_pos () in
+ Loc.of_positions beg_pos end_pos
%}
%token EOF
@@ -121,13 +107,11 @@ let (@$) i x = loc_of i @$ x
%token HASH_LPAREN LPAREN RPAREN
%token LBRACK RBRACK
%token HASH_LBRACE LBRACE RBRACE
-%token PLUS ASTERISK
+%token PIPE2 AMPERSAND2
+%token LT LE EQ GE GT NE
+%token PLUS MINUS ASTERISK SLASH PERCENT
%token PIPE AMPERSAND
-%token REC
-%token MATCH
-%token IF THEN ELSE
-%token IFF DO
-%token FN
+%token REC MATCH IF THEN ELSE IFF DO FN
%token UNDERSCORE
%token <string> TAG
%token <string * int> INDEXED_NAME
@@ -145,7 +129,7 @@ let (@$) i x = loc_of i @$ x
* fn ..., p(p1 ... pn), ... => b ---> fn ..., p1, ..., pn, ... => b
* The pattern p in the second rule is restricted to being a composition of
* PParen and PAnnot. Note that application of the first rule may make the
- * second rule applicable, and application of the first rule may need to be
+ * second rule applicable, and application of the second rule may need to be
* iterated until we reach a "normal form". *)
start: expr0 EOF { term $1 }
@@ -153,138 +137,194 @@ start: expr0 EOF { term $1 }
expr0: (* Definitions and sequencing *)
| definer_list SEMICOLON expr0 {
let (len, lst) = $1 in
- 2 @$ Let (len, lst, $3)
+ loc @$ Let (len, lst, $3)
}
- | expr1 SEMICOLON expr0 { 2 @$ Seq ($1, $3) }
+ | expr1 SEMICOLON expr0 { loc @$ Seq ($1, $3) }
| expr1 { $1 }
-expr1: (* Control flow *)
+
+expr1: (* Control flow and functions *)
(* TODO: Probably we should change the match syntax when staging is added,
* so that we can write something like `$match x ...` instead of
* `$(x match ...)` for conditional compilation. *)
| expr2 MATCH LBRACE case_list RBRACE {
let (len, lst) = $4 in
- 2 @$ Match($1, len, lst)
+ loc @$ Match($1, len, lst)
}
- (* TODO: if let: "if p := x then t else f"; and similarly for iff *)
- | IF expr1 THEN expr1 ELSE expr1 { 1 @$ If ($2, $4, $6) }
- | IFF expr1 DO expr1 { 1 @$ Iff ($2, $4) }
+ | IF cond THEN expr1 ELSE expr1 { loc @$ If ($2, $4, $6) }
+ | IFF cond DO expr1 { loc @$ Iff ($2, $4) }
| FN param_list DOUBLE_ARROW expr1 {
let (len, lst) = $2 in
- 1 @$ Fn (len, lst, $4)
+ loc @$ Fn (len, lst, $4)
}
| expr2 { $1 }
+
expr2: (* Type annotations (right-associative) *)
- | expr3 COLON expr2 { 2 @$ Annot ($1, $3) }
+ | expr3 COLON expr2 { loc @$ Annot ($1, $3) }
| expr3 { $1 }
+
expr3: (* Function arrow (right-associative) *)
| expr6 SINGLE_ARROW expr3 {
- 2 @$ FnType (1, [1 @$ C.DExplicit (term $1)], $3)
+ let dom = loc_of 1 @$ C.ExplicitDomain (term $1) in
+ loc @$ FnType (1, [dom], $3)
}
| LBRACK domain_list RBRACK SINGLE_ARROW expr3 {
let (len, lst) = $2 in
if len = 0 then
- abort (loc_of 2) ErrEmptyDomain
+ abort (loc_of 2) EParserEmptyDomain
else
- 4 @$ FnType (len, lst, $5)
+ loc @$ FnType (len, lst, $5)
}
| expr4 { $1 }
-expr4: (* Additive binary operators (left-associative;
- * pairwise incomparable precedence) *)
- | expr4add PLUS expr5 { 2 @$ Add ($1, $3) }
- | expr4or PIPE expr5 { 2 @$ Or ($1, $3) }
- | expr5 { $1 }
-expr4add:
- | expr4add PLUS expr5 { 2 @$ Add ($1, $3) }
- | expr5 { $1 }
-expr4or:
- | expr4or PIPE expr5 { 2 @$ Or ($1, $3) }
- | expr5 { $1 }
-expr5: (* Multiplicative binary operators (left-associative;
- * pairwise incomparable precedence) *)
- | expr5mul ASTERISK expr6 { 2 @$ Mul ($1, $3) }
- | expr5and AMPERSAND expr6 { 2 @$ And ($1, $3) }
- | expr6 { $1 }
-expr5mul:
- | expr5mul ASTERISK expr6 { 2 @$ Mul ($1, $3) }
- | expr6 { $1 }
-expr5and:
- | expr5and AMPERSAND expr6 { 2 @$ And ($1, $3) }
- | expr6 { $1 }
-expr6:
- | expr7 expr7 expr7_apposition_list {
+
+expr4: expr4a { $1 } (* Binary operators and relations *)
+
+expr4a: (* Logical OR (left-associative) *)
+ | expr4a PIPE2 expr4b { loc @$ BinOp ($1, C.LAnd, $3) }
+ | expr4b { $1 }
+
+expr4b: (* Logical AND (left-associative) *)
+ | expr4b AMPERSAND2 expr4c { loc @$ BinOp ($1, C.LOr, $3) }
+ | expr4c { $1 }
+
+expr4c: (* Relations (non-associative) *)
+ (* TODO: Chaining relations? I.e., ability to write x < y < z instead of
+ * x < y && y < z. *)
+ | expr4d rel expr4d { loc @$ BinOp ($1, $2, $3) }
+ | expr4d { $1 }
+rel:
+ | LT { C.Lt }
+ | LE { C.Le }
+ | EQ { C.Eq }
+ | GE { C.Ge }
+ | GT { C.Gt }
+ | NE { C.Ne }
+
+expr4d: (* Additive operators (left-associative) *)
+ | expr4d0 addop expr4e { loc @$ BinOp ($1, $2, $3) }
+ | expr4d1 PIPE expr4e { loc @$ Or ($1, $3) }
+ | expr4e { $1 }
+addop:
+ | PLUS { C.Add }
+ | MINUS { C.Sub }
+expr4d0:
+ | expr4d0 addop expr4e { loc @$ BinOp ($1, $2, $3) }
+ | expr4e { $1 }
+expr4d1:
+ | expr4d1 PIPE expr4e { loc @$ Or ($1, $3) }
+ | expr4e { $1 }
+
+expr4e: (* Multiplicative operators (left-associative) *)
+ | expr4e0 mulop expr4f { loc @$ BinOp ($1, $2, $3) }
+ | expr4e1 AMPERSAND expr4f { loc @$ And ($1, $3) }
+ | expr4f { $1 }
+mulop:
+ | ASTERISK { C.Mul }
+ | SLASH { C.Div }
+ | PERCENT { C.Mod }
+expr4e0:
+ | expr4e0 mulop expr4f { loc @$ BinOp ($1, $2, $3) }
+ | expr4f { $1 }
+expr4e1:
+ | expr4e1 AMPERSAND expr4f { loc @$ And ($1, $3) }
+ | expr4f { $1 }
+
+expr4f: expr5 { $1 }
+
+expr5:
+ | expr6 expr6 expr6_apposition_list {
let (len, lst) = $3 in
- 1 @$ App ($1, len + 1, $2 :: lst)
+ loc @$ App ($1, len + 1, $2 :: lst)
}
- | TAG expr7_apposition_list {
+ | ctor_name expr6_apposition_list {
let (len, lst) = $2 in
- 1 @$ Variant (Tag.Of $1, len, lst)
+ loc @$ Variant ($1, len, lst)
}
- | expr7 { $1 }
-expr7:
- | UNDERSCORE { 1 @$ Underscore }
- | INDEXED_NAME { 1 @$ let (x, i) = $1 in IndexedVar (Name.Of x, i) }
- | NAME { 1 @$ Var (Name.Of $1) }
- | NAT { 1 @$ Nat $1 }
+ | expr6 { $1 }
+
+expr6:
+ | UNDERSCORE { loc @$ Underscore }
+ | indexed_var_name {
+ let (x, i) = $1 in
+ loc @$ IndexedVar (x, i)
+ }
+ | var_name { loc @$ Var $1 }
+ | NAT { loc @$ Nat $1 }
| HASH_LBRACE ctor_list RBRACE {
let (len, lst) = $2 in
- 1 @$ VariantType (len, lst)
+ loc @$ VariantType (len, lst)
}
| HASH_LPAREN domain_list RPAREN {
let (len, lst) = $2 in
- 1 @$ TupleType (len, lst)
+ loc @$ TupleType (len, lst)
}
| LPAREN expr1 COMMA expr1_comma_list RPAREN {
let (len, lst) = $4 in
- 1 @$ Tuple (len + 1, $2 :: lst)
+ loc @$ Tuple (len + 1, $2 :: lst)
}
- | LPAREN RPAREN { 1 @$ Unit }
- | LPAREN expr0 RPAREN { 1 @$ Paren $2 }
+ | LPAREN RPAREN { loc @$ Unit }
+ | LPAREN expr0 RPAREN { loc @$ Paren $2 }
expr1_comma_list:
| expr1 COMMA expr1_comma_list { cons $1 $3 }
| expr1 { (1, [$1]) }
| { (0, []) }
-expr7_apposition_list:
- | expr7 expr7_apposition_list { cons $1 $2 }
+expr6_apposition_list:
+ | expr6 expr6_apposition_list { cons $1 $2 }
| { (0, []) }
definer:
- | expr2 COLON_EQ expr1 { 2 @$ C.Definer (patt $1, term $3) }
- | expr2 COLON_EQ REC expr1 { 2 @$ C.RecDefiner (patt $1, term $4) }
+ | expr2 COLON_EQ expr1 { loc @$ C.Definer (patt $1, term $3) }
+ | expr2 COLON_EQ REC expr1 { loc @$ C.RecDefiner (patt $1, term $4) }
definer_list:
| definer COMMA definer_list { cons $1 $3 }
| definer { (1, [$1]) }
case:
- | expr2 DOUBLE_ARROW expr1 { 2 @$ C.Case (patt $1, term $3) }
+ | expr2 DOUBLE_ARROW expr1 { loc @$ C.Case (patt $1, term $3) }
case_list:
| case COMMA case_list { cons $1 $3 }
| case { (1, [$1]) }
| { (0, []) }
+cond:
+ | expr1 { loc @$ C.TrueCond (term $1) }
+ | expr2 COLON_EQ expr1 { loc @$ C.PattCond (patt $1, term $3) }
+
param:
- | expr2 { 1 @$ C.Param (Plicity.Ex, patt $1) }
- | QUESTION expr2 { 1 @$ C.Param (Plicity.Im, patt $2) }
+ | expr2 { loc @$ C.Param (Plicity.Ex, patt $1) }
+ | QUESTION expr2 { loc @$ C.Param (Plicity.Im, patt $2) }
param_list:
| param COMMA param_list { cons $1 $3 }
| param { (1, [$1]) }
| { (0, []) }
domain:
- | expr3 { 1 @$ C.DExplicit (term $1) }
- | QUESTION expr3 { 1 @$ C.DImplicitType (patt $2) }
- | expr3 COLON expr2 { 2 @$ C.DAnnot (Plicity.Ex, patt $1, term $3) }
- | QUESTION expr3 COLON expr2 { 3 @$ C.DAnnot (Plicity.Im, patt $2, term $4) }
+ | expr3 { loc @$ C.ExplicitDomain (term $1) }
+ | QUESTION expr3 { loc @$ C.ImplicitTypeDomain (patt $2) }
+ | expr3 COLON expr2 { loc @$ C.AnnotDomain (Plicity.Ex, patt $1, term $3) }
+ | QUESTION expr3 COLON expr2 { loc @$ C.AnnotDomain (Plicity.Im, patt $2, term $4) }
domain_list:
| domain COMMA domain_list { cons $1 $3 }
| domain { (1, [$1]) }
| { (0, []) }
+ctor_name:
+ | TAG { loc @$ Name.Ctor.Of $1 }
+
+indexed_var_name:
+ | INDEXED_NAME {
+ let (x, i) = $1 in
+ (loc @$ Name.Var.Of x, i)
+ }
+
+var_name:
+ | NAME { loc @$ Name.Var.Of $1 }
+
ctor:
- | TAG domain_list {
+ | ctor_name domain_list {
let (len, lst) = $2 in
- 1 @$ Ctor (Tag.Of $1, len, lst)
+ loc @$ Ctor ($1, len, lst)
}
ctor_list:
| ctor SEMICOLON ctor_list { cons $1 $3 }
diff --git a/plicity.ml b/plicity.ml
@@ -2,7 +2,3 @@
type plicity = Im | Ex
type t = plicity
-
-let to_string = function
-| Im -> "Im"
-| Ex -> "Ex"
diff --git a/pretty.ml b/pretty.ml
@@ -1,4 +1,5 @@
-(* Pretty printing. *)
+(* "Pretty" printing. All this module does is handle insertion of
+ * parentheses. *)
let make n = (Buffer.create n, 0)
@@ -14,11 +15,21 @@ let parens (b, m) c k =
let string (b, _) s =
Buffer.add_string b s
-let fold_left (b, m) sep f = function
+let iter (b, m) sep f = function
| [||] -> ()
-| ss ->
- f (b, m) ss.(0);
- for i = 1 to Array.length ss - 1 do
+| xs ->
+ f (b, m) xs.(0);
+ for i = 1 to Array.length xs - 1 do
Buffer.add_string b sep;
- f (b, m) ss.(i)
+ f (b, m) xs.(i)
done
+
+let iter2 (b, m) sep f xs ys =
+ if Array.length xs <> Array.length ys then
+ invalid_arg "Pretty.iter2"
+ else if Array.length xs > 0 then
+ f (b, m) xs.(0) ys.(0);
+ for i = 1 to Array.length xs - 1 do
+ Buffer.add_string b sep;
+ f (b, m) xs.(i) ys.(i)
+ done
diff --git a/quote.ml b/quote.ml
@@ -0,0 +1,61 @@
+open Util
+open Eval
+module I = Internal
+module V = Value
+
+let rec quote env = function
+| V.Rec {env = env'; rhs} -> quote env (eval !env' rhs)
+| V.App (fn, args) -> I.App (quote env fn, Array.map (quote env) args)
+| V.FnClosure (clo, {multibinder; body; codomain}) ->
+ let (env, clo, multibinder) =
+ bind_and_quote_multibinder env clo multibinder in
+ let body = quote env (eval clo body) in
+ let codomain = quote env (eval clo codomain) in
+ I.Fn {multibinder; body; codomain}
+| V.FnTypeClosure (clo, {multibinder; codomain}) ->
+ let (env, clo, multibinder) =
+ bind_and_quote_multibinder env clo multibinder in
+ let codomain = quote env (eval clo codomain) in
+ I.FnType {multibinder; codomain}
+| V.Data {tag; tagi; args; clo; datatype} ->
+ I.Data {
+ tag;
+ tagi;
+ args = Array.map (quote env) args;
+ datatype = quote_datatype env clo datatype;
+ }
+| V.DataTypeClosure (clo, datatype) ->
+ I.DataType (quote_datatype env clo datatype)
+| V.Nat n -> I.Nat n
+| V.Arb var ->
+ begin match Env.quote env var with
+ | Some ij -> I.Var ij
+ | None -> impossible "Eval.quote: env does not bind variable in value"
+ end
+| V.Type -> I.Type
+| V.Builtin b -> I.Builtin b
+| V.Future _ -> impossible "Eval.quote: "
+
+and quote_datatype env clo {ctors; names} = {
+ ctors = Array.map (quote_multibinder env clo) ctors;
+ names;
+}
+
+and bind_and_quote_multibinder env clo {binders; order} =
+ let arity = Array.length binders in
+ let vars = Uniq.fresh arity in
+ let env = Env.bind env vars in
+ let clo = Env.bind clo vars in
+ let binders' = Array.make arity garbage in
+ for i = 0 to arity - 1 do
+ (* TODO: This does not need to be done in order. *)
+ let j = order.(i) in
+ let typ = quote env (eval clo binders.(j).typ) in
+ binders'.(j) <- {binders.(j) with typ}
+ done;
+ (env, clo, {binders = binders'; order})
+
+and quote_multibinder env clo multibinder =
+ let (_, _, multibinder) =
+ bind_and_quote_multibinder env clo multibinder in
+ multibinder
diff --git a/repr.ml b/repr.ml
@@ -1,10 +1,32 @@
+(* TODO: functions/closures *)
+(* TODO: packing control *)
+
type repr =
-| RAlign of repr * int
-(*| Pad of repr * int*)
-| RPtr
-| RNat of int
-| RRepeat of repr * term1
-| RUnion of repr list
-| RStruct of repr list (* What about dependent representations? *)
-| RDepPair of repr * term1 * repr (* Left repr, left type, right repr in weakened context *)
-(* E.g., #(n: Nat, a: Arr T n) has repr RDepPair (RNat, ↑Nat, n: ↑Nat ⊢ RRepeat (reprof T) n) *)
+| Constant of constant
+| Uniform of uniform
+| NonUniform of nonuniform
+and constant =
+| CAlign of constant * term1 (* The term has type Nat1. *)
+| CPtr
+| CBits of term1 (* The term has type Nat1. *)
+| CData of constant array
+| CTuple of constant array
+| CArray of constant * term1 (* The term has type Nat1. *)
+and uniform =
+| UConstant of constant
+| UAlign of uniform * term1 (* The term has type Nat1. *)
+| UData of uniform array
+| UTuple of (uniform * term1) array * uniform
+| UArray of uniform * term1 (* The term has type ↑Nat0. *)
+and nonuniform =
+| NUniform of uniform
+| NTuple of (uniform * term1) array * nonuniform
+ (* The array is of (repr, lifted type) pairs, and each repr is formed in a
+ * context with variables of the preceding lifted types bound. E.g., if
+ * type T has uniform repr R and type Nat0 has repr CBits 64, then
+ * #(n: Nat0, a: Arr T n) has repr
+ * NTuple ([|(CBits 64, ↑Nat0)|], n: ↑Nat0 ⊢ UArray (R, n)). *)
+
+(* This isn't quite right, because we also need to allow for representation
+ * variables. The above types need to be embedded in the term1 syntax, just
+ * like how we embed types in terms. *)
diff --git a/tag.ml b/tag.ml
@@ -1,9 +0,0 @@
-type tag = Of of string
-type t = tag
-
-let to_string (Of x) = "@" ^ x
-
-module Map = Map.Make(struct
- type t = tag
- let compare (Of x) (Of y) = String.compare x y
-end)
diff --git a/tsil.ml b/tsil.ml
@@ -30,8 +30,7 @@ let of_list r = Lin <@ r
let to_list l = l @> []
-module Global = struct
- type 'a tsil = 'a t = Lin | Snoc of 'a tsil * 'a
+module Ops = struct
let (<@) = (<@)
let (@>) = (@>)
end
diff --git a/unify.ml b/unify.ml
@@ -0,0 +1,76 @@
+open Util
+open Eval
+module I = Internal
+module V = Value
+
+(* TODO: Actually make this perform unification, not conversion checking *)
+let rec conv env v0 v1 = match force env v0, force env v1 with
+(* TODO: η-equality? *)
+(* TODO: Switch *)
+| V.Rec _, _ -> impossible "Unify.conv: Rec's are removed during forcing"
+| _, V.Rec _ -> impossible "Unify.conv: Rec's are removed during forcing"
+| V.App (fn0, args0), V.App (fn1, args1) ->
+ (* Note that aargs and bargs aren't necessarily the same length, but they
+ * are if fn0 and fn1 are convertible, so the short-circuiting of (&&)
+ * ensures that for_all2 can't fail. *)
+ conv env fn0 fn1 && Array.for_all2 (conv env) args0 args1
+| V.FnClosure (clo0, fn0), V.FnClosure (clo1, fn1) ->
+ let arity = Array.length fn0.multibinder.binders in
+ let vars = Uniq.fresh arity in
+ let env = Env.bind env vars in
+ let clo0, clo1 = Env.bind clo0 vars, Env.bind clo1 vars in
+ let vbody0, vbody1 = eval clo0 fn0.body, eval clo1 fn1.body in
+ conv env vbody0 vbody1
+| V.FnTypeClosure (clo0, fntype0), V.FnTypeClosure (clo1, fntype1) ->
+ let {I.multibinder = multibinder0; I.codomain = codomain0} = fntype0 in
+ let {I.multibinder = multibinder1; I.codomain = codomain1} = fntype1 in
+ begin match bind_and_conv_multibinder env clo0 clo1
+ multibinder0 multibinder1 with
+ | Some (env, clo0, clo1) ->
+ let vcodomain0 = eval clo0 codomain0 in
+ let vcodomain1 = eval clo1 codomain1 in
+ conv env vcodomain0 vcodomain1
+ | None -> false
+ end
+| V.Data {tag = tag0; args = args0; _}, V.Data {tag = tag1; args = args1; _} ->
+ tag0 = tag1 && Array.for_all2 (conv env) args0 args1
+| V.DataTypeClosure (clo0, datatype0), V.DataTypeClosure (clo1, datatype1) ->
+ let {I.ctors = ctors0; I.names = names0} = datatype0 in
+ let {I.ctors = ctors1; I.names = names1} = datatype1 in
+ Array.for_all2 (conv_multibinder env clo0 clo1) ctors0 ctors1 &&
+ Name.Ctor.Map.equal Int.equal names0 names1
+| V.Nat n, V.Nat m -> Z.equal n m
+| V.Arb var0, V.Arb var1 -> var0 = var1
+| V.Type, V.Type -> true
+| V.Builtin b0, V.Builtin b1 -> b0 = b1
+| V.Future _, _ -> impossible "Unify.conv: Future tested for conversion"
+| _, V.Future _ -> impossible "Unify.conv: Future tested for conversion"
+| _, _ -> false
+
+and bind_and_conv_multibinder env clo0 clo1 multibinder0 multibinder1 =
+ let {I.binders = binders0; I.order = order0} = multibinder0 in
+ let {I.binders = binders1; I.order = order1} = multibinder1 in
+ let arity0 = Array.length binders0 in
+ let arity1 = Array.length binders1 in
+ if arity0 = arity1 then
+ let vars = Uniq.fresh arity0 in
+ let env = Env.bind env vars in
+ let clo0 = Env.bind clo0 vars in
+ let clo1 = Env.bind clo1 vars in
+ let domain_conv i0 i1 =
+ i0 = i1 && binders0.(i0).plicity = binders1.(i1).plicity &&
+ let vtyp0 = eval clo0 binders0.(i0).typ in
+ let vtyp1 = eval clo1 binders1.(i1).typ in
+ conv env vtyp0 vtyp1 in
+ if Array.for_all2 domain_conv order0 order1 then
+ Some (env, clo0, clo1)
+ else
+ None
+ else
+ None
+
+and conv_multibinder env clo0 clo1 multibinder0 multibinder1 =
+ match bind_and_conv_multibinder env clo0 clo1
+ multibinder0 multibinder1 with
+ | Some _ -> true
+ | None -> false
diff --git a/uniq.ml b/uniq.ml
@@ -1,19 +1,79 @@
+(* A uniq is a globally-fresh identifier. We mainly use these for variables in
+ * the semantic domain, but they can be used any time we need a unique
+ * identifier. *)
+
+open Util
+
type uniq = Uniq of int
-type t = uniq
+type range = Range of int * int
exception Exhaustion
let fresh =
- let k = ref (-1) in
+ let next = ref 0 in
fun n ->
- if !k > max_int - n then
+ if !next > max_int - n then
raise Exhaustion (* This should never happen in practice. *)
else
- let arr = Array.init n (fun i -> Uniq (!k + i)) in
- k := !k + n;
- arr
+ let r = Range (!next, n) in
+ next := !next + n;
+ r
+
+let length (Range (_, l)) = l
-module Map = Map.Make(struct
+let get (Range (b, l)) i =
+ if i < 0 || i >= l then
+ invalid_arg "Uniq.get"
+ else
+ Uniq (b + i)
+
+module UniqMap = Map.Make(struct
type t = uniq
let compare (Uniq x) (Uniq y) = Int.compare x y
end)
+
+(* A RangeMap is a map which is extended with (range, value) pairs, but which
+ * is queried at a single uniq x by "offsetting" the value stored at the range
+ * (b, _) containing x (there can be at most one such range by uniqueness!)
+ * by x - b. *)
+module RangeMap = struct
+ module type Value = sig
+ type v
+ type v'
+ val offset: v -> int -> v'
+ end
+
+ module Make (V: Value) = struct
+ module K = struct
+ type t =
+ | Key of int * int
+ | Query of int
+
+ let compare k0 k1 = match k0, k1 with
+ | Key (b0, _), Key (b1, _) -> Int.compare b0 b1
+ | Key (b, l), Query x ->
+ if x < b then +1 else
+ if x >= b + l then -1 else 0
+ | Query x, Key (b, l) ->
+ if x < b then -1 else
+ if x >= b + l then +1 else 0
+ | Query _, Query _ -> impossible "Uniq.RangeMap.Make.K.compare"
+ end
+
+ (* TODO: Using a Map here is a hack, and it leads to wasted space both
+ * in keys and in values. Ideally, we should use a completely custom
+ * data structure. *)
+ module M = Map.Make(K)
+
+ type t = (V.v * int) M.t
+
+ let empty: t = M.empty
+
+ let add (Range (b, l)) v m = M.add (K.Key (b, l)) (v, b) m
+
+ let find_opt (Uniq x) m =
+ match M.find_opt (K.Query x) m with
+ | Some (v, b) -> Some (V.offset v (x - b))
+ | None -> None
+ end
+end
diff --git a/util.ml b/util.ml
@@ -0,0 +1,16 @@
+let impossible reason = failwith ("impossible: " ^ reason)
+let no_impl feature = failwith ("no impl: " ^ feature)
+
+(* For creating uninitialized arrays *)
+let garbage = Obj.magic 0
+
+(* For some reason this isn't in the standard Array module... *)
+let fold_left2 f acc xs ys =
+ begin if Array.length xs <> Array.length ys then
+ invalid_arg "Util.fold_left2"
+ end;
+ let acc = ref acc in
+ for i = 0 to Array.length xs - 1 do
+ acc := f !acc xs.(i) ys.(i)
+ done;
+ !acc
diff --git a/value.ml b/value.ml
@@ -1,44 +1,27 @@
-open Global
module I = Internal
type value =
-| Rec of {env: value Env.environment ref; rhs: I.term}
+| Rec of {
+ env: environment ref;
+ rhs: I.term;
+}
(*| Switch of ...*)
| App of value * value array
-| FnClosure of value Env.environment * I.fn
-| FnTypeClosure of value Env.environment * I.fntype
-| Data of Tag.t * value array
-| DataTypeClosure of value Env.environment * I.datatype
+| FnClosure of environment * I.fn
+| FnTypeClosure of environment * I.fntype
+| Data of {
+ tag: Name.Ctor.t;
+ tagi: int;
+ args: value array;
+ clo: environment;
+ datatype: I.datatype;
+}
+| DataTypeClosure of environment * I.datatype
| Nat of Z.t
-| Arb of Uniq.t
-(*| Meta of Uniq.t*)
+| Arb of Uniq.uniq
+(*| Meta of ...*)
| Type
| Builtin of I.builtin
-| Future of Uniq.t * int
+| Future of Uniq.uniq * int
-(*
-let rec to_string m e =
- let s = to_string in
- let (c, s) = match e with
- | App (fn, args) -> (6, (s 7 fn) String.concat " " (List.map (s 7) (f :: args)))
- | Fn (arity, body) -> (1, "fn " ^ string_of_int arity ^ " => " ^ s 1 body)
- | FnType (_, doms, cod) ->
- let sdoms = match doms with
- | dom :: [] -> s 4 dom
- | doms -> "(" ^ String.concat ", " (List.map (s 1) doms) ^ ")" in
- (3, sdoms ^ " -> " ^ s 3 cod)
- | Data (tag, ts) ->
- (6, "@" ^ String.concat " " (tag :: List.map (s 7) ts))
- | DataType ctors ->
- let string_of_ctor (tag, ts) =
- "@" ^ String.concat " " (tag :: List.map (s 7) ts) in
- let ctors = List.map string_of_ctor (IdentMap.to_list ctors) in
- (8, "#[" ^ String.concat "; " ctors ^ "]")
- | Nat n -> (8, Z.to_string n)
- | Var i -> (8, "?" ^ string_of_int i)
- | Intrin Type -> (8, "`Type")
- | Intrin NatType -> (8, "`Nat")
- | Intrin NatOpAdd -> (8, "`+")
- | Intrin NatOpMul -> (8, "`*") in
- if c < m then "(" ^ s ^ ")" else s
-*)
+and environment = value Env.environment