-
Notifications
You must be signed in to change notification settings - Fork 2
/
closure.rs
2530 lines (2429 loc) · 125 KB
/
closure.rs
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
use coq::checker::environ::{EnvError, Globals};
use coq::kernel::esubst::{Expr, Idx, IdxError, IdxResult, Lift, Subs as SubsRaw};
use coq::kernel::names::{
KnUser,
};
use core::convert::{TryFrom};
use core::nonzero::{NonZero};
use core::ops::Deref;
use ocaml::de::{Array, ORef};
use ocaml::values::{
CaseInfo,
Cast,
CoFix,
Cons,
Constr,
Cst,
Name,
Finite,
Fix,
Fix2,
Ind,
PRec,
Proj,
PUniverses,
RDecl,
RecordBody,
};
use std::borrow::{Cow as RustCow};
use std::cell::{Cell as RustCell};
use std::collections::{HashMap};
use std::collections::hash_map;
use std::hash::{Hash, Hasher};
use std::iter::{self};
use std::mem;
use std::option::{NoneError};
use std::sync::{Arc};
use typed_arena::{Arena};
use util::borrow::{Cow};
use util::ghost_cell::{Cell, Set};
use vec_map::{self, VecMap};
pub type Subs<'id, 'a, 'b> = SubsRaw<&'a [FConstr<'id, 'a, 'b>]>;
pub type MRef<'b, T> = &'b ORef<T>;
/// A dumbed-down version of Cow that has no requirements on its type parameter.
/// Used for implementig the immutable version of zip.
type SCow<'b, T> = Cow<'b, T, T>;
/*
* Five kinds of reductions:
*
* β - lambda application
* δ - constant expansion
* ι - case analysis
* ζ - let substitution
* η - function extensionality
*
* Of these, only the first four occur during normal reduction (the fifth is invoked
* opportunistically during conversion). The first four are configured on or off by flags passed
* to the reduction function. In Rust, as an optimization, these flags are set as compile time
* constants.
*/
#[derive(Copy,Clone,Debug,Hash)]
pub enum TableKey<T> {
ConstKey(T),
// should not occur
// VarKey(Id),
RelKey(Idx),
}
#[derive(Clone, Debug, Eq, PartialEq)]
pub enum RedError {
Idx(IdxError),
Env(EnvError),
NotFound,
}
// We box the Error to shrink the Result size.
pub type RedResult<T> = Result<T, Box<RedError>>;
/// Specification of the reduction function.
/// Flags of reduction and cache of constants: 'a is a type that may be
/// mapped to constr. 'a infos implements a cache for constants and
/// abstractions, storing a representation (of type 'a) of the body of
/// this constant or abstraction.
/// * i_tab is the cache table of the results
/// * i_repr is the function to get the representation from the current
/// state of the cache and the body of the constant. The result
/// is stored in the table.
/// * i_rels = (4,[(1,c);(3,d)]) means there are 4 free rel variables
/// and only those with index 1 and 3 have bodies which are c and d resp.
///
/// ref_value_cache searchs in the tab, otherwise uses i_repr to
/// compute the result and store it in the table. If the constant can't
/// be unfolded, returns None, but does not store this failure. * This
/// doesn't take the RESET into account. You mustn't keep such a table
/// after a Reset. * This type is not exported. Only its two
/// instantiations (cbv or lazy) are.
bitflags! {
pub struct Reds : u8 {
const BETA = 0b0001;
const DELTA = 0b0010;
const IOTA = 0b0100;
const ZETA = 0b1000;
const BETADELTAIOTA = Self::BETA.bits | Self::DELTA.bits | Self::ZETA.bits |
Self::IOTA.bits;
const BETADELTAIOTANOLET = Self::BETA.bits | Self::DELTA.bits | Self::IOTA.bits;
const BETAIOTAZETA = Self::BETA.bits | Self::IOTA.bits | Self::ZETA.bits;
}
}
#[derive(Copy, Clone)]
pub struct Context<'id, 'a, 'b> where 'b: 'a, 'id: 'a {
/// The term arena; this is really a poor substitute for garbage collection, since it can't
/// free, but my suspicion is that it will still have better performance characteristics than
/// Arc.
term_arena: &'a Arena<FTerm<'id, 'a, 'b>>,
/// We need this arena for a kind of silly reason. We need essentially all Constrs we
/// reference to have lifetime 'b, and if we get a Constr from a global we indeed have a
/// reference with lifetime 'b; so far, so good, right? Unfortunately, in some cases (namely,
/// those with universe polymorphism) we actually need to modify the Constr in a way that we
/// don't want to share mutably with everyone each time, since it depends on the particular
/// universe with which it was instantiated.
///
/// Note that this could be shared a la Globals, if we so chose.
///
/// At first, I was sad about this, because it seemed to make the distinction between 'a and 'b
/// meaningless; but on further reflection, it actually makes it *meaningful* since before this
/// point there was not much reason for 'b to be different from 'a. Now, it is clear where we
/// can't get away without an arena at all (constr_arena, which is actually a perfect use case
/// for an arena in the sense that we'd never deallocate these Constrs until we were done with
/// the reduction anyway), and where we might want to start looking for other means of GC
/// (term_arena). However, it might be useful to use a structure other than an arena (like a
/// Vec with size "number of inds in the environment", combined with a sendable iterator over
/// the vector). That would make it Sendable, but at the cost of some complexity.
constr_arena: &'b Arena<Constr>,
/// This arena could possibly be avoided, but we use it when we cons substitutions in order to
/// make them cheap to copy. Since the elements in this arena don't have a destructor, the
/// drop should also be very fast (note that if we took this one step further and gave the
/// same treatment to the Subs vectors themselves, the FTerm arena could also almost be made to
/// have no destructors; the only cost would be that we couldn't mutate lifts in place, but
/// that's not too useful too often (and in the handful of places where that is useful, maybe
/// we could have a specialized mutable version of the structure). The only remaining place
/// where we'd have a Vec is the array used for lambda arguments, and as comments below point
/// out that doesn't really need to be a Vec anyway.
fconstr_arena: &'a Arena<FConstr<'id, 'a, 'b>>,
// constr_arena: Arena<FConstr<'id, 'a, 'b>>
// Not clear to me if this would actually be an optimization (at least, not with the current
// substitution structure). The reason being that the amount of sharing we can actually get
// out of it seems limited, since the interesting reduction steps usually require mutating
// the substitution environment in some way, and we normally only construct closures to a
// relatively short depth unless we can strip away their environments. So far, we only seem
// to really gain sharing during mk_clos_deep, which is called just once(ish) per Constr node;
// so we probably don't get *too* much sharing out of that (if the reduction steps are actually
// doing something). In other places, we gain sharing automatically by sharing terms that
// contain substitutions within them; in those cases, *also* sharing the substitutions wouldn't
// buy a whole lot.
//
// If we change our minds, it's not that hard to fix, we just need to change &Subs (or Subs) to
// &'a Subs in a bunch of places.
//
// subs_arena: Arena<Subs<T>>,
}
pub type TableKeyC<'b> = TableKey<MRef<'b, PUniverses<Cst>>>;
// TODO: Use custom KeyHash algorithm.
struct KeyTable<'b, T>(HashMap<TableKeyC<'b>, T>);
pub struct Infos<'id, 'b, T> {
flags : Reds,
// i_repr : 'a infos -> constr -> 'a;
// globals: &'a mut Globals<'g>,
// i_rels : int * (int * constr) list;
rels: (u32, VecMap<&'b mut Constr>),
tab: KeyTable<'b, T>,
/// The owning set: if you want to do pretty much anything with an FConstr<'id, 'a, 'b>, you
/// need this in order to do it.
pub set: Set<'id>,
}
#[derive(Copy,Clone,Debug,PartialEq)]
pub enum RedState {
/// Norm means the term is fully normalized and cannot create a redex
/// when substituted
Norm,
/// Cstr means the term is in head normal form and that it can
/// create a redex when substituted (i.e. constructor, fix, lambda)
Cstr,
/// Whnf means we reached the head normal form and that it cannot
/// create a redex when substituted
Whnf,
/// Red is used for terms that might be reduced
Red,
}
/// type of shared terms. fconstr and frterm are mutually recursive.
/// Clone of the constr structure, but completely mutable, and
/// annotated with reduction state (reducible or not).
pub struct FConstr<'id, 'a, 'b> where 'b: 'a, 'id: 'a {
norm: Cell<'id, RedState>,
term: Cell<'id, Option<&'a FTerm<'id, 'a, 'b>>>,
}
pub enum FTerm<'id, 'a, 'b> where 'b: 'a, 'id: 'a {
Rel(Idx),
/// Metas and sorts; but metas shouldn't occur in a .vo...
Atom(&'b Constr),
Cast(FConstr<'id, 'a, 'b>, MRef<'b, (Constr, Cast, Constr)>, FConstr<'id, 'a, 'b>),
Flex(TableKeyC<'b>),
Ind(MRef<'b, PUniverses<Ind>>),
Construct(MRef<'b, PUniverses<Cons>>),
App(FConstr<'id, 'a, 'b>, Vec<FConstr<'id, 'a, 'b>>),
Proj(&'b Cst, bool, FConstr<'id, 'a, 'b>),
Fix(MRef<'b, Fix>, usize, Subs<'id, 'a, 'b>),
CoFix(MRef<'b, CoFix>, usize, Subs<'id, 'a, 'b>),
Case(MRef<'b, (CaseInfo, Constr, Constr, Array<Constr>)>, FConstr<'id, 'a, 'b>,
FConstr<'id, 'a, 'b>, Vec<FConstr<'id, 'a, 'b>>),
/// predicate and branches are closures
CaseT(MRef<'b, (CaseInfo, Constr, Constr, Array<Constr>)>, FConstr<'id, 'a, 'b>,
Subs<'id, 'a, 'b>),
Lambda(Vec</*(Name, Constr)*/MRef<'b, (Name, Constr, Constr)>>,
&'b Constr, Subs<'id, 'a, 'b>),
Prod(MRef<'b, (Name, Constr, Constr)>, FConstr<'id, 'a, 'b>, FConstr<'id, 'a, 'b>),
LetIn(MRef<'b, (Name, Constr, Constr, Constr)>,
FConstr<'id, 'a, 'b>, FConstr<'id, 'a, 'b>, Subs<'id, 'a, 'b>),
// should not occur
// | FEvar of existential_key * fconstr array (* why diff from kernel/closure? *)
/// FLIFT is a delayed shift; allows sharing between 2 lifted copies
/// of a given term.
Lift(Idx, FConstr<'id, 'a, 'b>),
/// FCLOS is a delayed substitution applied to a constr
Clos(&'b Constr, Subs<'id, 'a, 'b>),
// /// FLOCKED is used to erase the content of a reference that must
// /// be updated. This is to allow the garbage collector to work
// /// before the term is computed.
// Locked,
}
/// The type of (machine) stacks (= lambda-bar-calculus' contexts)
/// Inst ≡ ! allows us to type-safely represent stacks which have no instructions;
/// Shift ≡ ! allows us to type-safely represent stacks which have no shifts.
/// Inst ≡ ! and Shift ≡ ! means the stack is purely applicative.
/// (NOTE: This might become harder if / when we move away from Vecs, so it's a bit risky to add
/// this at this stage).
pub enum StackMember<'id, 'a, 'b, Inst, Shft> where 'b: 'a, 'id: 'a {
App(Vec<FConstr<'id, 'a, 'b>>),
Case(MRef<'b, (CaseInfo, Constr, Constr, Array<Constr>)>, FConstr<'id, 'a, 'b>,
Vec<FConstr<'id, 'a, 'b>>, Inst),
CaseT(MRef<'b, (CaseInfo, Constr, Constr, Array<Constr>)>, Subs<'id, 'a, 'b>, Inst),
Proj(usize, usize, &'b Cst, bool, Inst),
Fix(FConstr<'id, 'a, 'b>, Stack<'id, 'a, 'b, Inst, Shft>, Inst),
Shift(Idx, Shft),
Update(&'a FConstr<'id, 'a, 'b>, Inst),
}
/// A [stack] is a context of arguments, arguments are pushed by
/// [append_stack] one array at a time but popped with [decomp_stack]
/// one by one
pub struct Stack<'id, 'a, 'b, Inst, Shft>(Vec<StackMember<'id, 'a, 'b, Inst, Shft>>)
where 'b: 'a, 'id: 'a;
/// Full stack (all operations are allowed).
pub type FStack<'id, 'a, 'b> = Stack<'id, 'a, 'b, (), ()>;
/// Purely applicative stack (only Apps are allowed).
pub type AStack<'id, 'a, 'b> = Stack<'id, 'a, 'b, !, !>;
/// Applicative + shifts (only Shift and App are allowed).
pub type SStack<'id, 'a, 'b> = Stack<'id, 'a, 'b, !, ()>;
/// The result of trying to perform beta reduction.
enum Application<'id, 'a, 'b> where 'b: 'a, 'id: 'a {
/// Arguments are fully applied; this is the corresponding substitution.
Full(Subs<'id, 'a, 'b>),
/// Arguments are partially applied; this is the corresponding thunk.
Partial(FConstr<'id, 'a, 'b>),
}
/// cache of constants: the body is computed only when needed.
pub type ClosInfos<'id, 'a, 'b> = Infos<'id, 'b, FConstr<'id, 'a, 'b>>;
pub trait IRepr<'id, 'a, 'b> {
fn i_repr<T>(set: &Set<'id>, c: T, ctx: Context<'id, 'a, 'b>) -> Result<Self, (IdxError, T)>
where T: Into<&'b Constr> + 'b,
T: Deref<Target=Constr>,
Self: Sized;
}
impl ::std::convert::From<IdxError> for Box<RedError> {
fn from(e: IdxError) -> Self {
Box::new(RedError::Idx(e))
}
}
impl ::std::convert::From<EnvError> for Box<RedError> {
fn from(e: EnvError) -> Self {
Box::new(RedError::Env(e))
}
}
impl<'id, 'a, 'b> Context<'id, 'a, 'b> {
pub fn new(term_arena: &'a Arena<FTerm<'id, 'a, 'b>>,
constr_arena: &'b Arena<Constr>,
fconstr_arena: &'a Arena<FConstr<'id, 'a, 'b>>) -> Self {
Context {
term_arena: term_arena,
constr_arena: constr_arena,
fconstr_arena: fconstr_arena,
}
}
}
impl<'b, T> ::std::ops::Deref for KeyTable<'b, T> {
type Target = HashMap<TableKeyC<'b>, T>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl<'b, T> ::std::ops::DerefMut for KeyTable<'b, T> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl<'a, B> Deref for SCow<'a, B>
{
type Target = B;
fn deref(&self) -> &B {
match *self {
Cow::Borrowed(borrowed) => borrowed,
Cow::Owned(ref owned) => owned,
}
}
}
impl<'b> Into<&'b Constr> for &'b mut Constr {
fn into(self) -> &'b Constr {
&*self
}
}
impl<'b> PartialEq for TableKeyC<'b> {
fn eq(&self, o: &Self) -> bool {
match (*self, *o) {
(TableKey::ConstKey(o1), TableKey::ConstKey(o2)) => {
let PUniverses(ref c1, ref u1) = **o1;
let PUniverses(ref c2, ref u2) = **o2;
KnUser(c1) == KnUser(c2) && u1.equal(u2)
},
(TableKey::RelKey(i1), TableKey::RelKey(i2)) => i1 == i2,
(_, _) => false,
}
}
}
impl<'b> Eq for TableKeyC<'b> {}
impl<'b> Hash for TableKeyC<'b> {
fn hash<H>(&self, state: &mut H) where H: Hasher {
match *self {
TableKey::ConstKey(o) => {
let PUniverses(ref c, ref u) = **o;
state.write_u64(1);
KnUser(c).hash(state);
u.hash(state);
},
TableKey::RelKey(i) => {
state.write_u64(2);
i.hash(state);
}
}
}
}
impl<'id, 'a, 'b> IRepr<'id, 'a, 'b> for FConstr<'id, 'a, 'b> {
fn i_repr<T>(set: &Set<'id>, c: T, ctx: Context<'id, 'a, 'b>) -> Result<Self, (IdxError, T)>
where T: Into<&'b Constr> + 'b,
T: Deref<Target=Constr>,
{
let env = Subs::id(None);
env.mk_clos_raw(set, c, ctx)
}
}
impl<'id, 'a, 'b, T> Infos<'id, 'b, T> where T: IRepr<'id, 'a, 'b> {
fn ref_value_cache<'r, 'g>(set: &Set<'id>,
rels: &'r mut (u32, VecMap<&'b mut Constr>),
tab: &'r mut KeyTable<'b, T>,
globals: &'r Globals<'g>, rf: TableKeyC<'b>,
ctx: Context<'id, 'a, 'b>) -> IdxResult<Option<&'r T>>
where 'g: 'b
{
Ok(Some(match tab.entry(rf) {
hash_map::Entry::Occupied(o) => o.into_mut(),
hash_map::Entry::Vacant(v) => {
match rf {
TableKey::RelKey(n) => {
let (s, ref mut l) = *rels;
if let Some(i) = s.checked_sub(u32::from(n)) {
// i is potentially valid, meaning u32 (note that n is a length, not an
// Idx, so 0 is a valid index; also note that since n was a positive
// u32, we know i < s).
// FIXME: Verify that u32 to usize is always a valid cast.
let i = i as usize;
let (mut body, old) = if let vec_map::Entry::Occupied(mut o) =
l.entry(i) {
let c = o.get().lift(n)?;
// One weird trick to keep your lifetimes in order!
// We remove body from rels; if there's a panic here and we catch
// it before rels goes away, it will be incorrect (proper
// UnwindSafe usage will make this a nonissue, but if we care, we
// can either use take_mut or add a catch_panic here to fix body
// back up). In the absence of panics, once this block ends nobody
// will ever try to look up this entry directly in i_rels again,
// since ref_value_cache is the only thing that does so and it will
// always take the entry in the outer hash table if it's present;
// therefore, the removal here is fine.
let mut body: &'b mut Constr = o.remove();
let mut old = mem::replace(body, c);
(body, old)
} else {
// Rel is a free variable without a body.
return Ok(None);
};
// We do this dance to ensure that we keep rels the same if there
// was a (non-panic) error. Obviously if there's a panic during
// VecMap insertion, this will not work out, but that shouldn't be
// an issue given how VecMap is implemented (in particular, it
// shouldn't shrink after remove is run, so it should still have
// space for i).
//
// Note that unlike inserting into a vacant HashMap entry or
// reinserting the old VecMap entry (neither of which should panic
// in practice), there is actually a chance that i_repr panics
// here. To be on the safe side, maybe we should catch_panic
// here (which take_mut would also do for safety reasons).
match T::i_repr(&set, body, ctx) {
Ok(c) => v.insert(c),
Err((e, body)) => {
mem::replace(body, old);
l.insert(i, body);
return Err(e)
},
}
// As mentioned above, this insert shouldn't panic in practice,
// because space for the entry was already reserved.
} else {
// n would definitely be invalid if it were negative, (it wasn't
// actually a free variable at all), so we know it can't have a body.
return Ok(None);
}
},
// | VarKey(id) => raise Not_found
TableKey::ConstKey(cst) => {
if let Some(Ok(body)) = globals.constant_value(cst) {
let body = match body {
RustCow::Borrowed(b) => b,
RustCow::Owned(b) => {
// Sometimes we get an owned value back (due to, e.g., universe
// polymorphism), so we need to allocate it in our constr arena.
// This is very unfortunate, but I'm not sure I see a good way
// around something like this; it seems like making *all*
// Constr references be Cow would be overkill, for example.
// We could also share the Constr arena and cache further, since
// the values in it only depend on the input parameters, but that
// would probably limit parallelism.
ctx.constr_arena.alloc(b)
},
};
v.insert(T::i_repr(&set, body, ctx).map_err( |(e, _)| e)?)
} else {
// We either encountered a lookup error, or cst was not an
// evaluable constant; in either case, we can't return a body.
return Ok(None);
}
},
}
},
}))
}
pub fn unfold_reference<'r, 'g>(&'r mut self,
globals: &'r Globals<'g>, rf: TableKeyC<'b>,
ctx: Context<'id, 'a, 'b>) ->
IdxResult<Option<(&'r mut Set<'id>, &'r T)>>
where 'g: 'b,
{
let Infos { ref mut set, ref mut rels, ref mut tab, .. } = *self;
let res = Self::ref_value_cache(set, rels, tab, globals, rf, ctx)?;
Ok(res.map( |res| (set, res) ))
}
}
impl RedState {
fn neutr(&self) -> Self {
match *self {
RedState::Whnf => RedState::Whnf,
RedState::Norm => RedState::Whnf,
RedState::Red => RedState::Red,
RedState::Cstr => RedState::Red,
}
}
}
pub fn clone_vec<'id, 'a, 'b>(v: &[FConstr<'id, 'a, 'b>],
set: &Set<'id>) -> Vec<FConstr<'id, 'a, 'b>> {
v.iter().map( |x| x.clone(set) ).collect()
}
impl<'id, 'a, 'b> FConstr<'id, 'a, 'b> {
pub fn clone(&self, set: &Set<'id>) -> Self {
FConstr {
norm: self.norm.clone(set),
term: self.term.clone(set),
}
}
pub fn fterm(&self, set: &Set<'id>) -> Option<&'a FTerm<'id, 'a, 'b>> {
*set.get(&self.term)
}
fn set_norm(&self, set: &mut Set<'id>) {
*set.get_mut(&self.norm) = RedState::Norm;
}
fn update(&self, set: &mut Set<'id>, no: RedState, term: Option<&'a FTerm<'id, 'a, 'b>>) {
// Could issue a warning if no is still Red, pointing out that we lose sharing.
*set.get_mut(&self.norm) = no;
*set.get_mut(&self.term) = term;
}
fn lft(&self, set: &Set<'id>, mut n: Idx, ctx: Context<'id, 'a, 'b>) -> IdxResult<Self> {
let mut ft = self;
loop {
match *ft.fterm(set).expect("Tried to lift a locked term") {
FTerm::Ind(_) | FTerm::Construct(_)
| FTerm::Flex(TableKey::ConstKey(_)/* | VarKey _*/) => return Ok(ft.clone(set)),
FTerm::Rel(i) => return Ok(FConstr {
norm: Cell::new(RedState::Norm),
term: Cell::new(Some(ctx.term_arena.alloc(FTerm::Rel(i.checked_add(n)?)))),
}),
FTerm::Lambda(ref tys, f, ref e) => {
let mut e = e.clone(); // expensive
e.shift(n)?;
return Ok(FConstr {
norm: Cell::new(RedState::Cstr),
term: Cell::new(Some(
ctx.term_arena.alloc(FTerm::Lambda(tys.clone(), // expensive
f, e)))),
})
},
FTerm::Fix(fx, i, ref e) => {
let mut e = e.clone(); // expensive
e.shift(n)?;
return Ok(FConstr {
norm: Cell::new(RedState::Cstr),
term: Cell::new(Some(ctx.term_arena.alloc(FTerm::Fix(fx, i, e)))),
})
},
FTerm::CoFix(cfx, i, ref e) => {
let mut e = e.clone(); // expensive
e.shift(n)?;
return Ok(FConstr {
norm: Cell::new(RedState::Cstr),
term: Cell::new(Some(ctx.term_arena.alloc(FTerm::CoFix(cfx, i, e)))),
})
},
FTerm::Lift(k, ref m) => {
n = n.checked_add(k)?;
ft = m;
},
_ => return Ok(FConstr {
norm: ft.norm.clone(set),
term: Cell::new(Some(ctx.term_arena.alloc(FTerm::Lift(n, ft.clone(set))))),
})
}
}
}
/// Lifting specialized to the case where n = 0.
fn lft0(&self, set: &Set<'id>, ctx: Context<'id, 'a, 'b>) -> IdxResult<Self> {
match *self.fterm(set).expect("Tried to lift a locked term") {
FTerm::Ind(_) | FTerm::Construct(_)
| FTerm::Flex(TableKey::ConstKey(_)/* | VarKey _*/) => Ok(self.clone(set)),
FTerm::Rel(_) => Ok(FConstr {
norm: Cell::new(RedState::Norm),
term: self.term.clone(set),
}),
FTerm::Lambda(_, _, _) | FTerm::Fix(_, _, _) | FTerm::CoFix(_, _, _) => Ok(FConstr {
norm: Cell::new(RedState::Cstr),
term: self.term.clone(set),
}),
FTerm::Lift(k, ref m) => m.lft(set, k, ctx),
_ => Ok(self.clone(set))
}
}
/// The inverse of mk_clos_deep: move back to constr
/// Note: self must be typechecked beforehand!
fn to_constr<F>(&self, set: &mut Set<'id>, constr_fun: F, lfts: &Lift,
ctx: Context<'id, 'a, 'b>) -> IdxResult<Constr>
where F: Fn(&FConstr<'id, 'a, 'b>, &mut Set<'id>,
&Lift, Context<'id, 'a, 'b>) -> IdxResult<Constr>,
{
// FIXME: The constant cloning of lfts can likely be replaced by a slightly different API
// where lfts is taken mutably, and added shifts or lifts can be pushed for a scope, then
// popped automatically. There are several cute ways to do this (including a wrapper
// around a lift with a custom destructor, and using a closure). The destructor route is
// somewhat appealing because it probably makes it easier to convert this into something
// that doesn't require mutual recursion.
/* let mut norm_ = self.norm.get();
let mut term_ = Cow::Borrowed(self.fterm().expect("Tried to lift a locked term!"));*/
let mut lfts = RustCow::Borrowed(lfts);
let mut v = self;
loop {
match *v.fterm(set).expect("Tried to lift a locked term!") {
FTerm::Rel(i) => return Ok(Constr::Rel(i32::from(lfts.reloc_rel(i)?) as i64)),
FTerm::Flex(TableKey::RelKey(i)) =>
return Ok(Constr::Rel(i32::from(lfts.reloc_rel(i)?) as i64)),
// | FFlex (VarKey x) -> Var x
FTerm::Atom(c) => {
// Unless something weird is happening, this should be a cheap operation,
// because atoms should only contain sorts in our implementation (so this just
// becomes a clone()). Really, this could probably just be replaced by a
// c.clone(), for more clarity.
return c.exliftn(&lfts)
},
FTerm::Cast(ref a, k, ref b) => {
let a = constr_fun(a, set, &lfts, ctx)?;
let b = constr_fun(b, set, &lfts, ctx)?;
return Ok(Constr::Cast(ORef(Arc::from((a, k.1, b)))))
},
FTerm::Flex(TableKey::ConstKey(c)) => return Ok(Constr::Const(c.clone())),
FTerm::Ind(op) => return Ok(Constr::Ind(op.clone())),
FTerm::Construct(op) => return Ok(Constr::Construct(op.clone())),
FTerm::Case(ci, ref p, ref c, ref ve) => {
let (ref ci, _, _, _) = **ci;
let p = constr_fun(p, set, &lfts, ctx)?;
let c = constr_fun(c, set, &lfts, ctx)?;
// expensive -- allocates a Vec
let ve: Result<Vec<_>, _> = ve.iter()
.map( |v| constr_fun(v, set, &lfts, ctx) )
.collect();
return Ok(Constr::Case(ORef(Arc::from((ci.clone(), p, c,
Array(Arc::from(ve?)))))))
},
FTerm::CaseT(ci, ref c, ref e) => {
/*
// FIXME: Do we really need to create a new FTerm here? Aren't we just going
// to immediately turn it back into a Term? It seems like it would be better
// to just go directly to the logic in FTerm::Case above.
let (_, ref p, _, ref ve) = **ci;
norm = RedState::Red;
term = Cow::Owned(FTerm::Case(ci.clone(), e.mk_clos2(p, ctx)?,
c.clone(),
e.mk_clos_vect(&ve, ctx)? /* expensive */));
*/
let (ref ci, ref p, _, ref ve) = **ci;
let p = e.mk_clos2(set, p, ctx)?;
let p = constr_fun(&p, set, &lfts, ctx)?;
let c = constr_fun(c, set, &lfts, ctx)?;
// expensive -- allocates a Vec
let ve: Result<Vec<_>, _> = ve.iter()
.map( |t: &'b Constr| {
let v = e.mk_clos(set, t, ctx);
constr_fun(&v?, set, &lfts, ctx)
})
.collect();
return Ok(Constr::Case(ORef(Arc::from((ci.clone(), p, c,
Array(Arc::from(ve?)))))))
},
FTerm::Fix(o, i, ref e) => {
// FIXME: The recursion here seems like it potentially wastes a lot of work
// converting Constrs to FTerms and back... same with CoFix below, and possibly
// CaseT above to some extent. Also, we probably prematurely allocate a few
// times, since this is one of the cases where we only need references to
// FTerms and FConstrs rather than full-fledged FTerms / FConstrs.
let Fix(Fix2(ref reci, _), PRec(ref lna, ref tys, ref bds)) = **o;
// expensive, makes a Vec
let ftys: Result<Vec<_>, _> = tys.iter()
.map( |t| {
let t = e.mk_clos(set, t, ctx);
constr_fun(&t?, set, &lfts, ctx)
})
.collect();
// Note: I believe the length should always be nonzero here, but I'm not 100%
// sure. For now, we separate the two cases to avoid failing outright in the
// zero case (it would also save useless work, but our implementation won't
// let you try to lift by zero so this is an academic point). This also
// applies to CoFix below.
// expensive, makes a Vec
let fbds: Result<Vec<_>, _> = if let Some(n) = NonZero::new(bds.len()) {
let n = Idx::new(n)?;
let mut e = e.clone(); // expensive, but shouldn't outlive this block.
e.liftn(n)?;
// expensive, but shouldn't outlive this block.
let mut lfts = lfts.into_owned();
lfts.liftn(n)?;
bds.iter()
.map( |t| {
let t = e.mk_clos(set, t, ctx);
constr_fun(&t?, set, &lfts, ctx)
})
.collect()
} else {
// expensive, makes a Vec
bds.iter()
.map( |t| {
let t = e.mk_clos(set, t, ctx);
constr_fun(&t?, set, &lfts, ctx)
})
.collect()
};
// FIXME: We know (assuming reasonable FTerm construction) that i fits in an
// i64 if it was created directly from a Constr. We also know that i fits in
// an isize if it was created by unfolding a Fix, since all the FTerm::Fix
// terms we derive have i < reci.len(), and reci is a vector; Rust guarantees
// that vector lengths (in bytes, actually!) fit in an isize. What remains to
// be determined is whether isize is always guaranteed to fit in an i64. If
// that's true, this cast necessarily succeeds.
return Ok(Constr::Fix(ORef(Arc::from(Fix(Fix2(reci.clone(), i as i64),
PRec(lna.clone(),
Array(Arc::new(ftys?)),
Array(Arc::new(fbds?))))))))
},
FTerm::CoFix(o, i, ref e) => {
let CoFix(_, PRec(ref lna, ref tys, ref bds)) = **o;
// expensive, makes a Vec
let ftys: Result<Vec<_>, _> = tys.iter()
.map( |t| {
let t = e.mk_clos(set, t, ctx);
constr_fun(&t?, set, &lfts, ctx)
})
.collect();
// expensive, makes a Vec
let fbds: Result<Vec<_>, _> = if let Some(n) = NonZero::new(bds.len()) {
let n = Idx::new(n)?;
let mut e = e.clone(); // expensive, but shouldn't outlive this block.
e.liftn(n)?;
// expensive, but shouldn't outlive this block.
let mut lfts = lfts.into_owned();
lfts.liftn(n)?;
bds.iter()
.map( |t| {
let t = e.mk_clos(set, t, ctx);
constr_fun(&t?, set, &lfts, ctx)
})
.collect()
} else {
// expensive, makes a Vec
bds.iter()
.map( |t| {
let t = e.mk_clos(set, t, ctx);
constr_fun(&t?, set, &lfts, ctx)
})
.collect()
};
// FIXME: We know (assuming reasonable FTerm construction) that i fits in an
// i64 if it was created directly from a Constr. We also know that i fits in
// an isize if it was created by unfolding a CoFix, since all the FTerm::CoFix
// terms we derive have i < reci.len(), and reci is a vector; Rust guarantees
// that vector lengths (in bytes, actually!) fit in an isize. What remains to
// be determined is whether isize is always guaranteed to fit in an i64. If
// that's true, this cast necessarily succeeds.
return Ok(Constr::CoFix(ORef(Arc::from(CoFix(i as i64,
PRec(lna.clone(),
Array(Arc::new(ftys?)),
Array(Arc::new(fbds?))))))))
},
FTerm::App(ref f, ref ve) => {
let f = constr_fun(f, set, &lfts, ctx)?;
// expensive -- allocates a Vec
let ve: Result<Vec<_>, _> = ve.iter()
.map( |v| constr_fun(v, set, &lfts, ctx) )
.collect();
return Ok(Constr::App(ORef(Arc::from((f, Array(Arc::from(ve?)))))))
},
FTerm::Proj(p, b, ref c) => {
let c = constr_fun(c, set, &lfts, ctx)?;
return Ok(Constr::Proj(ORef(Arc::from((Proj(p.clone(), b), c)))))
},
FTerm::Lambda(ref tys, b, ref e) => {
// We should know v is nonzero, if Lambda was created properly.
// TODO: Enforce this by construction with a Lambda newtype in this file.
// FIXME: Is converting to dest_flambda only to convert back to a Lambda really
// worthwhile? It seems like we just keep recurively allocating smaller
// FTerms, then turning them back into smaller Lambdas, when we could just skip
// allocating the FTerm in the first place.
// FIXME: Based on the above, maybe consider having dest_flambda not actually
// allocate new FTerms, and instead just return references? Dunno if this
// would work, since we'd need to make new ones at some point anyway.
// FIXME: Consider switching FTerm::Lambda to take slices so we don't have to
// clone here. This optimization may also apply for other FTerms but Lambda is
// the most obvious, since dest_flambda only needs to slice into the array (of
// course, because of this maybe dest_flambda isn't even needed here).
let (na, ty, bd) =
FTerm::dest_flambda(set, Subs::mk_clos2, tys, b, e, ctx)?;
let ty = constr_fun(&ty, set, &lfts, ctx)?;
// expensive, but shouldn't outlive this block.
let mut lfts = lfts.into_owned();
lfts.lift()?;
let bd = constr_fun(&bd, set, &lfts, ctx)?;
return Ok(Constr::Lambda(ORef(Arc::from((na, ty, bd)))))
},
FTerm::Prod(o, ref t, ref c) => {
let (ref n, _, _) = **o;
let t = constr_fun(t, set, &lfts, ctx)?;
// expensive, but shouldn't outlive this block.
let mut lfts = lfts.into_owned();
lfts.lift()?;
let c = constr_fun(c, set, &lfts, ctx)?;
return Ok(Constr::Prod(ORef(Arc::from((n.clone(), t, c)))))
},
FTerm::LetIn(o, ref b, ref t, ref e) => {
let (ref n, _, _, ref f) = **o;
let b = constr_fun(b, set, &lfts, ctx)?;
let t = constr_fun(t, set, &lfts, ctx)?;
let mut e = e.clone(); // expensive, but shouldn't outlive this block.
e.lift()?;
let fc = e.mk_clos2(set, f, ctx)?;
// expensive, but shouldn't outlive this block.
let mut lfts = lfts.into_owned();
lfts.lift()?;
let fc = constr_fun(&fc, set, &lfts, ctx)?;
return Ok(Constr::LetIn(ORef(Arc::from((n.clone(), b, t, fc)))))
},
// | FEvar (ev,args) -> Evar(ev,Array.map (constr_fun lfts) args)
FTerm::Lift(k, ref a) => {
// expensive
let mut lfts_ = lfts.into_owned();
lfts_.shift(k)?;
lfts = RustCow::Owned(lfts_);
// norm_ = a.norm.get();
// term_ = Cow::Borrowed(a.fterm().expect("Tried to lift a locked term!"));
v = a;
},
FTerm::Clos(t, ref env) => {
let fr = env.mk_clos2(set, t, ctx)?;
// TODO: check whether the update here is useful. If so, we should
// slightly change the function definition.
// norm_ = ...
// let a = constr_fun(a, set, &lfts, ctx)?;
let norm = *set.get(&fr.norm);
let term = *set.get(&fr.term);
v.update(set, norm, term);
}
}
}
}
/// Zip a single item; shared between zip and zip_mut.
///
/// f is the function used to perform the append in the App case.
fn zip_item_mut<'r, I, S, F>(m: SCow<'r, Self>, set: &mut Set<'id>,
s: StackMember<'id, 'a, 'b, I, S>,
ctx: Context<'id, 'a, 'b>,
f: F) -> IdxResult<SCow<'r, Self>>
where F: FnOnce(&Set<'id>, Vec<Self>, Stack<'id, 'a, 'b, I, S>),
{
match s {
StackMember::App(args) => {
let norm = set.get(&m.norm).neutr();
let t = FTerm::App(m.clone(set), args);
Ok(Cow::Owned(FConstr {
norm: Cell::new(norm),
term: Cell::new(Some(ctx.term_arena.alloc(t))),
}))
},
StackMember::Case(o, p, br, _) => {
let norm = set.get(&m.norm).neutr();
let t = FTerm::Case(o, p, m.clone(set), br);
Ok(Cow::Owned(FConstr {
norm: Cell::new(norm),
term: Cell::new(Some(ctx.term_arena.alloc(t))),
}))
},
StackMember::CaseT(o, e, _) => {
let norm = set.get(&m.norm).neutr();
let t = FTerm::CaseT(o, m.clone(set), e);
Ok(Cow::Owned(FConstr {
norm: Cell::new(norm),
term: Cell::new(Some(ctx.term_arena.alloc(t))),
}))
},
StackMember::Proj(_, _, p, b, _) => {
let norm = set.get(&m.norm).neutr();
let t = FTerm::Proj(p, b, m.clone(set));
Ok(Cow::Owned(FConstr {
norm: Cell::new(norm),
term: Cell::new(Some(ctx.term_arena.alloc(t))),
}))
},
StackMember::Fix(fx, par, _) => {
// FIXME: This seems like a very weird and convoluted way to do this.
let mut v = vec![m.clone(set)];
f(set, v, par);
Ok(Cow::Owned(fx))
},
StackMember::Shift(n, _) => {
Ok(Cow::Owned(m.lft(set, n, ctx)?))
},
StackMember::Update(rf, _) => {
let norm = *set.get(&m.norm);
let term = *set.get(&m.term);
rf.update(set, norm, term);
// TODO: The below is closer to the OCaml implementation, but it doesn't seem
// like there's any point in doing it, since we never update m anyway (we do
// return it at the end, but we're currently returning an owned FTerm rather
// than a shared reference).
// *m = Cow::Borrowed(rf);
Ok(m)
},
}
}
/// This differs from the OCaml because it acutally mutates its stk argument. Fortunately,
/// this only matters in one place (whd_stack)--see below.
fn zip_mut<I, S>(&self, set: &mut Set<'id>, stk: &mut Stack<'id, 'a, 'b, I, S>,
ctx: Context<'id, 'a, 'b>) -> IdxResult<FConstr<'id, 'a, 'b>> {
let mut m = Cow::Borrowed(self);
while let Some(s) = stk.pop() {
m = Self::zip_item_mut(m, set, s, ctx, |_, v, par| {
stk.append(v);
// mem::swap(stk, &mut par);
// NOTE: Since we use a Vec rather than a list, the "head" of our stack is
// actually at the end of the Vec. Therefore, where in the OCaml we perform
// par @ stk, here we have reversed par and reversed stk, and perform
// stk ++ par (or kst ++ rap).
stk.extend(par.0.into_iter());
})?;
}
Ok(m.clone(set))
}
/// This differs from the OCaml because it actually mutates its stk argument. Fortunately, in
/// all but one place, this doesn't matter, because we end up not using the stack afterwards
/// anyway. The one place, sadly, is whd_stack, which is called all the time, so optimizing
/// that case separately might be a good idea.
fn fapp_stack_mut<I, S>(&self, set: &mut Set<'id>, stk: &mut Stack<'id, 'a, 'b, I, S>,
ctx: Context<'id, 'a, 'b>) -> IdxResult<FConstr<'id, 'a, 'b>> {
self.zip_mut(set, stk, ctx)
}
/// The immutable version of zip. Doesn't return a value, since it's only used by whd_stack to
/// apply update marks.
fn zip<I, S>(&self, set: &mut Set<'id>, stk: &Stack<'id, 'a, 'b, I, S>,
ctx: Context<'id, 'a, 'b>) -> IdxResult<()> {
let stk = stk.iter();
let mut bstk: Vec<SCow<_>> = Vec::new(); // the stack, m, and s outlive bstk
let mut m = Cow::Borrowed(self); // the stack and s outlive m
// We use a Vec of Cows of StackMembers rather than a normal stack. The reason is that
// normal stacks own their items, and some operations (like append) require mutability
// in order to function. Rather than write a specialized version of Stack that works
// within these restrictions, we just use the Vec directly and inline specialized versions
// of the methods we need (currently, just append).
//
// So what's the upside to this over cloning the stack up front? Well, we avoid tracing
// the stack again, and not being allowed to clone stacks normally is a nice lint, but
// that honestly might be less work; maybe we should just do that instead.
//
// Also note that we use Cow over regular Cow because regular Cow requires there to be an
// easy way to turn an immutable version into an owned one, which we don't actually have in
// general (we only sort of provide an implementation for Apps).
for s_ in stk {
let mut s_ = Cow::Borrowed(s_);
loop {
// Manual copy of append designed for our weird Cow stacks.
let append = |set: &Set<'id>, bstk: &mut Vec<_>, mut v: Vec<_>| {
if let Some(o) = bstk.last_mut() {
match *o {
Cow::Borrowed(&StackMember::App(ref l)) => {
v.extend(l.iter().map( |v| v.clone(set) ));
*o = Cow::Owned(StackMember::App(v));
return;
},
Cow::Owned(StackMember::App(ref mut l)) => {
mem::swap(&mut v, l);
l.extend(v.into_iter());
return;
},
_ => {},
}
}
bstk.push(Cow::Owned(StackMember::App(v)))
};
// First, check whether we can get away (almost) with the mutable case.
let s = match s_ {
Cow::Borrowed(s) => s,
Cow::Owned(s) => {