-
Notifications
You must be signed in to change notification settings - Fork 85
/
Copy pathtranslcore.ml
2430 lines (2339 loc) · 100 KB
/
translcore.ml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
(**************************************************************************)
(* *)
(* OCaml *)
(* *)
(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *)
(* *)
(* Copyright 1996 Institut National de Recherche en Informatique et *)
(* en Automatique. *)
(* *)
(* All rights reserved. This file is distributed under the terms of *)
(* the GNU Lesser General Public License version 2.1, with the *)
(* special exception on linking described in the file LICENSE. *)
(* *)
(**************************************************************************)
(* Translation from typed abstract syntax to lambda terms,
for the core language *)
open Misc
open Asttypes
open Primitive
open Types
open Typedtree
open Typeopt
open Lambda
open Translmode
open Debuginfo.Scoped_location
type error =
Free_super_var
| Unreachable_reached
| Bad_probe_layout of Ident.t
| Illegal_void_record_field
| Illegal_product_record_field of Jkind.Sort.Const.t
| Void_sort of type_expr
| Unboxed_vector_in_array_comprehension
| Unboxed_product_in_array_comprehension
exception Error of Location.t * error
let use_dup_for_constant_mutable_arrays_bigger_than = 4
(* CR layouts v7: In the places where this is used, we will want to allow
float#, but not void yet (e.g., the left of a semicolon and loop bodies). we
still default to value before checking for void, to allow for sort variables
arising in situations like
let foo () = raise Foo; ()
When this sanity check is removed, consider whether we are still defaulting
appropriately.
*)
let sort_must_not_be_void loc ty sort =
if Jkind.Sort.Const.(equal void sort) then raise (Error (loc, Void_sort ty))
let layout_exp sort e = layout e.exp_env e.exp_loc sort e.exp_type
let layout_pat sort p = layout p.pat_env p.pat_loc sort p.pat_type
let check_record_field_sort loc sort =
match (sort : Jkind.Sort.Const.t) with
| Base (Value | Float64 | Float32 | Bits8 | Bits16 | Bits32 | Bits64 | Vec128
| Word) -> ()
| Base Void -> raise (Error (loc, Illegal_void_record_field))
| Product _ as c -> raise (Error (loc, Illegal_product_record_field c))
(* Forward declaration -- to be filled in by Translmod.transl_module *)
let transl_module =
ref((fun ~scopes:_ _cc _rootpath _modl -> assert false) :
scopes:scopes -> module_coercion -> Longident.t option ->
module_expr -> lambda)
let transl_object =
ref (fun ~scopes:_ _id _s _cl -> assert false :
scopes:scopes -> Ident.t -> string list -> class_expr -> lambda)
(* Probe handlers are generated from %probe as closed functions
during transl_exp and immediately lifted to top level. *)
let probe_handlers = ref []
let clear_probe_handlers () = probe_handlers := []
let declare_probe_handlers lam =
List.fold_left (fun acc (funcid, func) ->
Llet(Strict, Lambda.layout_function, funcid, func, acc))
lam
!probe_handlers
(* Compile an exception/extension definition *)
let prim_fresh_oo_id =
Pccall
(Lambda.simple_prim_on_values ~name:"caml_fresh_oo_id" ~arity:1 ~alloc:false)
let transl_extension_constructor ~scopes env path ext =
let path =
Printtyp.wrap_printing_env env ~error:true (fun () ->
Option.map (Printtyp.rewrite_double_underscore_longidents env) path)
in
let name =
match path with
| None -> Ident.name ext.ext_id
| Some path -> Format.asprintf "%a" Pprintast.longident path
in
let loc = of_location ~scopes ext.ext_loc in
match ext.ext_kind with
Text_decl _ ->
(* Extension constructors are currently always Alloc_heap.
They could be Alloc_local, but that would require changes
to pattern typing, as patterns can close over them. *)
Lprim (Pmakeblock (Obj.object_tag, Immutable_unique, None, alloc_heap),
[Lconst (Const_base (Const_string (name, ext.ext_loc, None)));
Lprim (prim_fresh_oo_id, [Lconst (const_int 0)], loc)],
loc)
| Text_rebind(path, _lid) ->
transl_extension_path loc env path
(* To propagate structured constants *)
exception Not_constant
let extract_constant = function
Lconst sc -> sc
| _ -> raise Not_constant
let extract_float = function
Const_base(Const_float f) -> f
| _ -> fatal_error "Translcore.extract_float"
let transl_apply_position position =
match position with
| Default -> Rc_normal
| Nontail -> Rc_nontail
| Tail ->
if Config.stack_allocation then Rc_close_at_apply
else Rc_normal
let maybe_region get_layout lam =
let rec remove_tail_markers_and_exclave = function
| Lapply ({ap_region_close = Rc_close_at_apply} as ap) ->
Lapply ({ap with ap_region_close = Rc_normal})
| Lsend (k, lmet, lobj, largs, Rc_close_at_apply, mode, loc, layout) ->
Lsend (k, lmet, lobj, largs, Rc_normal, mode, loc, layout)
| Lregion _ as lam -> lam
| Lexclave lam -> lam
| lam ->
Lambda.shallow_map ~tail:remove_tail_markers_and_exclave ~non_tail:Fun.id lam
in
if not Config.stack_allocation then lam
else if may_allocate_in_region lam then Lregion (lam, get_layout ())
else remove_tail_markers_and_exclave lam
let maybe_region_layout layout lam =
maybe_region (fun () -> layout) lam
let maybe_region_exp sort exp lam =
maybe_region (fun () -> layout_exp sort exp) lam
let is_alloc_heap = function Alloc_heap -> true | Alloc_local -> false
(* In cases where we're careful to preserve syntactic arity, we disable
the arity fusion attempted by simplif.ml *)
let function_attribute_disallowing_arity_fusion =
{ default_function_attribute with may_fuse_arity = false }
(** A well-formed function parameter list is of the form
[G @ L @ [ Final_arg ]],
where the values of G and L are of the form [More_args { partial_mode }],
where [partial_mode] has locality Global in G and locality Local in L.
[curried_function_kind p] checks the well-formedness of the list and returns
the corresponding [curried_function_kind]. [nlocal] is populated as follows:
- if {v |L| > 0 v}, then {v nlocal = |L| + 1 v}.
- if {v |L| = 0 v},
* if the function returns at mode local, the final arg has mode local,
or the function itself is allocated locally, then {v nlocal = 1 v}.
* otherwise, {v nlocal = 0 v}.
*)
(* CR-someday: Now that some functions' arity won't be changed downstream of
lambda (see [may_fuse_arity = false]), we could change [nlocal] to be
more expressive. I suggest the variant:
{[
type partial_application_is_local_when =
| Applied_up_to_nth_argument_from_end of int
| Never
]}
I believe this will allow us to get rid of the complicated logic for
|L| = 0, and help clarify how clients use this type. I plan on doing
this in a follow-on PR.
*)
let curried_function_kind
: (function_curry * Mode.Alloc.l) list
-> return_mode:locality_mode
-> mode:locality_mode
-> curried_function_kind
=
let rec loop params ~return_mode ~mode ~running_count
~found_local_already
=
match params with
| [] -> Misc.fatal_error "Expected to find [Final_arg] at end of list"
| [ Final_arg, final_arg_mode ] ->
let nlocal =
if running_count = 0
&& is_alloc_heap return_mode
&& is_alloc_heap mode
&& is_alloc_heap (transl_alloc_mode_l final_arg_mode)
then 0
else running_count + 1
in
{ nlocal }
| (Final_arg, _) :: _ -> Misc.fatal_error "Found [Final_arg] too early"
| (More_args { partial_mode }, _) :: params ->
match transl_alloc_mode_l partial_mode with
| Alloc_heap when not found_local_already ->
loop params ~return_mode ~mode
~running_count:0 ~found_local_already
| Alloc_local ->
loop params ~return_mode ~mode
~running_count:(running_count + 1) ~found_local_already:true
| Alloc_heap ->
Misc.fatal_error
"A function argument with a Global partial_mode unexpectedly \
found following a function argument with a Local partial_mode"
in
fun params ~return_mode ~mode ->
loop params ~return_mode ~mode ~running_count:0
~found_local_already:false
(* Insertion of debugging events *)
let event_before ~scopes exp lam =
Translprim.event_before (of_location ~scopes exp.exp_loc) exp lam
let event_after ~scopes exp lam =
Translprim.event_after (of_location ~scopes exp.exp_loc) exp lam
let event_function ~scopes exp lam =
if !Clflags.debug && not !Clflags.native_code then
let repr = Some (ref 0) in
let (info, body) = lam repr in
(info,
Levent(body, {lev_loc = of_location ~scopes exp.exp_loc;
lev_kind = Lev_function;
lev_repr = repr;
lev_env = exp.exp_env}))
else
lam None
(* Assertions *)
let assert_failed loc ~scopes exp =
let slot =
transl_extension_path Loc_unknown
(Lazy.force Env.initial) Predef.path_assert_failure
in
let (fname, line, char) =
Location.get_pos_info loc.Location.loc_start
in
let loc = of_location ~scopes exp.exp_loc in
Lprim(Praise Raise_regular, [event_after ~scopes exp
(Lprim(Pmakeblock(0, Immutable, None, alloc_heap),
[slot;
Lconst(Const_block(0,
[Const_base(Const_string (fname, exp.exp_loc, None));
Const_base(Const_int line);
Const_base(Const_int char)]))], loc))], loc)
type fusable_function =
{ params : function_param list
; body : function_body
; return_sort : Jkind.Sort.Const.t
; return_mode : locality_mode
; region : bool
}
(* [fuse_method_arity] is what ensures that a n-ary method is compiled as a
(n+1)-ary function, where the first parameter is self. It fuses together the
self and method parameters.
Input: fun self -> fun method_param_1 ... method_param_n -> body
Output: fun self method_param_1 ... method_param_n -> body
It detects whether the AST is a method by the presence of [Texp_poly] on the
inner function. This is only ever added to methods.
*)
let fuse_method_arity (parent : fusable_function) : fusable_function =
match parent with
| { params = [ self_param ];
return_mode = Alloc_heap;
body =
Tfunction_body { exp_desc = Texp_function method_; exp_extra; }
}
when
List.exists
(function (Texp_poly _, _, _) -> true | _ -> false)
exp_extra
->
begin match transl_alloc_mode method_.alloc_mode with
| Alloc_heap -> ()
| Alloc_local ->
(* If we support locally-allocated objects, we'll also have to
pass the new mode back to the caller.
*)
Misc.fatal_error "Locally-allocated method body!"
end;
let self_param =
{ self_param
with fp_curry = More_args
{ partial_mode =
Mode.Alloc.disallow_right Mode.Alloc.legacy }
}
in
let return_sort = Jkind.Sort.default_for_transl_and_get method_.ret_sort in
{ params = self_param :: method_.params;
body = method_.body;
return_mode = transl_alloc_mode_l method_.ret_mode;
return_sort;
region = true;
}
| _ -> parent
(* Translation of expressions *)
let rec iter_exn_names f pat =
match pat.pat_desc with
| Tpat_var (id, _, _, _) -> f id
| Tpat_alias (p, id, _, _, _) ->
f id;
iter_exn_names f p
| _ -> ()
let transl_ident loc env ty path desc kind =
match desc.val_kind, kind with
| Val_prim p, Id_prim (poly_mode, poly_sort) ->
Translprim.transl_primitive loc p env ty ~poly_mode ~poly_sort (Some path)
| Val_anc _, Id_value ->
raise(Error(to_location loc, Free_super_var))
| (Val_reg | Val_self _), Id_value ->
transl_value_path loc env path
| _ -> fatal_error "Translcore.transl_exp: bad Texp_ident"
let can_apply_primitive p pmode pos args =
let is_omitted = function
| Arg _ -> false
| Omitted _ -> true
in
if List.exists (fun (_, arg) -> is_omitted arg) args then false
else begin
let nargs = List.length args in
if nargs = p.prim_arity then true
else if nargs < p.prim_arity then false
else if pos <> Typedtree.Tail then true
else begin
let return_mode = Ctype.prim_mode pmode p.prim_native_repr_res in
is_heap_mode (transl_locality_mode_l return_mode)
end
end
let zero_alloc_of_application
~num_args (annotation : Zero_alloc.assume option) funct =
match annotation, funct.exp_desc with
| Some assume, _ ->
(* The user wrote a zero_alloc attribute on the application - keep it. *)
Builtin_attributes.assume_zero_alloc ~inferred:false assume
| None, Texp_ident (_, _, { val_zero_alloc; _ }, _, _) ->
(* We assume the call is zero_alloc if the function is known to be
zero_alloc. If the function is zero_alloc opt, then we need to be sure
that the opt checks were run to license this assumption. We judge
whether the opt checks were run based on the argument to the
[-zero-alloc-check] command line flag. *)
let use_opt =
match !Clflags.zero_alloc_check with
| Check_default | No_check -> false
| Check_all | Check_opt_only -> true
in
begin match Zero_alloc.get val_zero_alloc with
| Check c when c.arity = num_args && (use_opt || not c.opt) ->
let assume : Zero_alloc.assume =
{ strict = c.strict;
never_returns_normally = false;
never_raises = false;
arity = c.arity;
loc = c.loc }
in
Builtin_attributes.assume_zero_alloc ~inferred:true assume
| Check _ | Default_zero_alloc | Ignore_assert_all | Assume _ ->
Zero_alloc_utils.Assume_info.none
end
| None, _ -> Zero_alloc_utils.Assume_info.none
let rec transl_exp ~scopes sort e =
transl_exp1 ~scopes ~in_new_scope:false sort e
(* ~in_new_scope tracks whether we just opened a new scope.
When we just opened a new scope, we avoid introducing an extraneous anonymous
function scope and instead inherit the new scope. E.g., [let f x = ...] is
parsed as a let-bound Pexp_function node [let f = fun x -> ...].
We give it f's scope.
*)
and transl_exp1 ~scopes ~in_new_scope sort e =
let eval_once =
(* Whether classes for immediate objects must be cached *)
match e.exp_desc with
Texp_function _ | Texp_for _ | Texp_while _ -> false
| _ -> true
in
if eval_once then transl_exp0 ~scopes ~in_new_scope sort e else
Translobj.oo_wrap e.exp_env true (transl_exp0 ~scopes ~in_new_scope sort) e
and transl_exp0 ~in_new_scope ~scopes sort e =
match e.exp_desc with
| Texp_ident(path, _, desc, kind, _) ->
transl_ident (of_location ~scopes e.exp_loc)
e.exp_env e.exp_type path desc kind
| Texp_constant cst -> Lconst (Const_base cst)
| Texp_let(rec_flag, pat_expr_list, body) ->
let return_layout = layout_exp sort body in
transl_let ~scopes ~return_layout rec_flag pat_expr_list
(event_before ~scopes body (transl_exp ~scopes sort body))
| Texp_function { params; body; ret_sort; ret_mode; alloc_mode;
zero_alloc } ->
let ret_sort = Jkind.Sort.default_for_transl_and_get ret_sort in
transl_function ~in_new_scope ~scopes e params body
~alloc_mode ~ret_mode ~ret_sort ~region:true ~zero_alloc
| Texp_apply({ exp_desc = Texp_ident(path, _, {val_kind = Val_prim p},
Id_prim (pmode, psort), _);
exp_type = prim_type; } as funct,
oargs, pos, ap_mode, zero_alloc)
when can_apply_primitive p pmode pos oargs ->
let rec cut_args prim_repr oargs =
match prim_repr, oargs with
| [], _ -> [], oargs
| _, [] -> failwith "Translcore cut_args"
| ((_, arg_repr) :: prim_repr), ((_, Arg (x, _)) :: oargs) ->
let arg_exps, extra_args = cut_args prim_repr oargs in
let arg_sort =
Translprim.sort_of_native_repr arg_repr ~poly_sort:psort
in
(x, arg_sort) :: arg_exps, extra_args
| _, ((_, Omitted _) :: _) -> assert false
in
let arg_exps, extra_args = cut_args p.prim_native_repr_args oargs in
let args = transl_list ~scopes arg_exps in
let prim_exp = if extra_args = [] then Some e else None in
let position =
if extra_args = [] then transl_apply_position pos
else Rc_normal
in
let assume_zero_alloc =
match zero_alloc with
| None -> Zero_alloc_utils.Assume_info.none
| Some assume -> Builtin_attributes.assume_zero_alloc ~inferred:false assume
in
let lam =
let loc =
map_scopes (update_assume_zero_alloc ~assume_zero_alloc)
(of_location ~scopes e.exp_loc)
in
Translprim.transl_primitive_application
loc p e.exp_env prim_type
~poly_mode:pmode ~poly_sort:psort
path prim_exp args (List.map fst arg_exps) position
in
if extra_args = [] then lam
else begin
let tailcall = Translattribute.get_tailcall_attribute funct in
let inlined = Translattribute.get_inlined_attribute funct in
let specialised = Translattribute.get_specialised_attribute funct in
let position = transl_apply_position pos in
let mode = transl_locality_mode_l ap_mode in
let result_layout = layout_exp sort e in
event_after ~scopes e
(transl_apply ~scopes ~tailcall ~inlined ~specialised
~assume_zero_alloc
~position ~mode
~result_layout lam extra_args (of_location ~scopes e.exp_loc))
end
| Texp_apply(funct, oargs, position, ap_mode, zero_alloc)
->
let tailcall = Translattribute.get_tailcall_attribute funct in
let inlined = Translattribute.get_inlined_attribute funct in
let specialised = Translattribute.get_specialised_attribute funct in
let result_layout = layout_exp sort e in
let position = transl_apply_position position in
let mode = transl_locality_mode_l ap_mode in
let assume_zero_alloc =
zero_alloc_of_application ~num_args:(List.length oargs) zero_alloc funct
in
event_after ~scopes e
(transl_apply ~scopes ~tailcall ~inlined ~specialised
~assume_zero_alloc
~result_layout
~position ~mode (transl_exp ~scopes Jkind.Sort.Const.for_function funct)
oargs (of_location ~scopes e.exp_loc))
| Texp_match(arg, arg_sort, pat_expr_list, partial) ->
let arg_sort = Jkind.Sort.default_for_transl_and_get arg_sort in
transl_match ~scopes ~arg_sort ~return_sort:sort e arg pat_expr_list
partial
| Texp_try(body, pat_expr_list) ->
let id = Typecore.name_cases "exn" pat_expr_list in
let return_layout = layout_exp sort e in
Ltrywith(transl_exp ~scopes sort body, id,
Matching.for_trywith ~scopes ~return_layout e.exp_loc (Lvar id)
(transl_cases_try ~scopes sort pat_expr_list),
return_layout)
| Texp_tuple (el, alloc_mode) ->
let ll, shape =
transl_value_list_with_shape ~scopes
(List.map (fun (_, a) -> (a, Jkind.Sort.Const.for_tuple_element)) el)
in
begin try
Lconst(Const_block(0, List.map extract_constant ll))
with Not_constant ->
Lprim(Pmakeblock(0, Immutable, Some shape,
transl_alloc_mode alloc_mode),
ll,
(of_location ~scopes e.exp_loc))
end
| Texp_unboxed_tuple el ->
let el =
List.map (fun (l, e, s) ->
(l, e, Jkind.Sort.default_for_transl_and_get s)) el
in
let shape = List.map (fun (_, e, s) -> layout_exp s e) el in
let ll = List.map (fun (_, e, s) -> transl_exp ~scopes s e) el in
Lprim(Pmake_unboxed_product shape,
ll,
of_location ~scopes e.exp_loc)
| Texp_construct(_, cstr, args, alloc_mode) ->
let args_with_sorts =
List.map2 (fun { ca_sort } e -> e, ca_sort) cstr.cstr_args args
in
let ll =
List.map (fun (e, sort) -> transl_exp ~scopes sort e) args_with_sorts
in
if cstr.cstr_inlined <> None then begin match ll with
| [x] -> x
| _ -> assert false
end else begin match cstr.cstr_tag, cstr.cstr_repr with
| Null, Variant_with_null -> Lconst Const_null
| Null, (Variant_boxed _ | Variant_unboxed | Variant_extensible) ->
assert false
| Ordinary {runtime_tag}, _ when cstr.cstr_constant ->
assert (args_with_sorts = []);
(* CR layouts v5: This could have void args, but for now we've ruled
that out by checking that the sort list is empty *)
Lconst(const_int runtime_tag)
| Ordinary _, (Variant_unboxed | Variant_with_null) ->
(match ll with [v] -> v | _ -> assert false)
| Ordinary {runtime_tag}, Variant_boxed _ ->
let constant =
match List.map extract_constant ll with
| exception Not_constant -> None
| constants -> (
match cstr.cstr_shape with
| Constructor_mixed shape ->
if !Clflags.native_code then
let shape = transl_mixed_product_shape shape in
Some (Const_mixed_block(runtime_tag, shape, constants))
else
(* CR layouts v5.9: Structured constants for mixed blocks should
be supported in bytecode. See symtable.ml for the difficulty.
*)
None
| Constructor_uniform_value ->
Some (Const_block(runtime_tag, constants)))
in
begin match constant with
| Some constant -> Lconst constant
| None ->
let alloc_mode = transl_alloc_mode (Option.get alloc_mode) in
let makeblock =
match cstr.cstr_shape with
| Constructor_uniform_value ->
let shape =
List.map (fun (e, sort) ->
Lambda.must_be_value (layout_exp sort e))
args_with_sorts
in
Pmakeblock(runtime_tag, Immutable, Some shape, alloc_mode)
| Constructor_mixed shape ->
let shape = Lambda.transl_mixed_product_shape shape in
Pmakemixedblock(runtime_tag, Immutable, shape, alloc_mode)
in
Lprim (makeblock, ll, of_location ~scopes e.exp_loc)
end
| Extension path, Variant_extensible ->
let lam = transl_extension_path
(of_location ~scopes e.exp_loc) e.exp_env path in
if cstr.cstr_constant
then (
assert (args_with_sorts = []);
(* CR layouts v5: This could have void args, but for now we've ruled
that out by checking that the sort list is empty *)
lam)
else
let alloc_mode = transl_alloc_mode (Option.get alloc_mode) in
let makeblock =
match cstr.cstr_shape with
| Constructor_uniform_value ->
let shape =
List.map (fun (e, sort) ->
Lambda.must_be_value (layout_exp sort e))
args_with_sorts
in
Pmakeblock(0, Immutable, Some (Lambda.generic_value :: shape),
alloc_mode)
| Constructor_mixed shape ->
let shape = Lambda.transl_mixed_product_shape shape in
let shape =
{ shape with value_prefix_len = shape.value_prefix_len + 1 }
in
Pmakemixedblock(0, Immutable, shape, alloc_mode)
in
Lprim (makeblock, lam :: ll, of_location ~scopes e.exp_loc)
| Extension _, (Variant_boxed _ | Variant_unboxed | Variant_with_null)
| Ordinary _, Variant_extensible -> assert false
end
| Texp_extension_constructor (_, path) ->
transl_extension_path (of_location ~scopes e.exp_loc) e.exp_env path
| Texp_variant(l, arg) ->
let tag = Btype.hash_variant l in
begin match arg with
None -> Lconst(const_int tag)
| Some (arg, alloc_mode) ->
let lam = transl_exp ~scopes Jkind.Sort.Const.for_poly_variant arg in
try
Lconst(Const_block(0, [const_int tag;
extract_constant lam]))
with Not_constant ->
Lprim(Pmakeblock(0, Immutable, None,
transl_alloc_mode alloc_mode),
[Lconst(const_int tag); lam],
of_location ~scopes e.exp_loc)
end
| Texp_record {fields; representation; extended_expression; alloc_mode} ->
transl_record ~scopes e.exp_loc e.exp_env
(Option.map transl_alloc_mode alloc_mode)
fields representation extended_expression
| Texp_record_unboxed_product
{fields; representation; extended_expression } ->
transl_record_unboxed_product ~scopes e.exp_loc e.exp_env
fields representation extended_expression
| Texp_field(arg, id, lbl, float, ubr) ->
let targ = transl_exp ~scopes Jkind.Sort.Const.for_record arg in
let sem =
if Types.is_mutable lbl.lbl_mut then Reads_vary else Reads_agree
in
let sem = add_barrier_to_read (transl_unique_barrier ubr) sem in
check_record_field_sort id.loc lbl.lbl_sort;
begin match lbl.lbl_repres with
Record_boxed _
| Record_inlined (_, Constructor_uniform_value, Variant_boxed _) ->
Lprim (Pfield (lbl.lbl_pos, maybe_pointer e, sem), [targ],
of_location ~scopes e.exp_loc)
| Record_unboxed | Record_inlined (_, _, Variant_unboxed) -> targ
| Record_float ->
let alloc_mode =
match float with
| Boxing (alloc_mode, _) -> alloc_mode
| Non_boxing _ -> assert false
in
let mode = transl_alloc_mode alloc_mode in
Lprim (Pfloatfield (lbl.lbl_pos, sem, mode), [targ],
of_location ~scopes e.exp_loc)
| Record_ufloat ->
Lprim (Pufloatfield (lbl.lbl_pos, sem), [targ],
of_location ~scopes e.exp_loc)
| Record_inlined (_, Constructor_uniform_value, Variant_extensible) ->
Lprim (Pfield (lbl.lbl_pos + 1, maybe_pointer e, sem), [targ],
of_location ~scopes e.exp_loc)
| Record_inlined (_, Constructor_mixed _, Variant_extensible) ->
(* CR layouts v5.9: support this *)
fatal_error
"Mixed inlined records not supported for extensible variants"
| Record_inlined (_, Constructor_mixed shape, Variant_boxed _)
| Record_mixed shape ->
let ({ value_prefix_len; flat_suffix } : mixed_product_shape) =
shape
in
let read =
if lbl.lbl_num < value_prefix_len then
Mread_value_prefix (maybe_pointer e)
else
let flat_read =
match flat_suffix.(lbl.lbl_num - value_prefix_len) with
| Float_boxed ->
(match float with
| Boxing (mode, _) ->
flat_read_float_boxed (transl_alloc_mode mode)
| Non_boxing _ ->
Misc.fatal_error
"expected typechecking to make [float] boxing mode\
\ present for float field read")
| non_float -> flat_read_non_float non_float
in
Mread_flat_suffix flat_read
in
let shape : Lambda.mixed_block_shape =
{ value_prefix_len; flat_suffix }
in
Lprim (Pmixedfield (lbl.lbl_pos, read, shape, sem), [targ],
of_location ~scopes e.exp_loc)
| Record_inlined (_, _, Variant_with_null) -> assert false
end
| Texp_unboxed_field(arg, arg_sort, _id, lbl, _) ->
begin match lbl.lbl_repres with
| Record_unboxed_product ->
let lbl_layout l = layout e.exp_env l.lbl_loc l.lbl_sort l.lbl_arg in
let layouts = Array.to_list (Array.map lbl_layout lbl.lbl_all) in
let arg_sort = Jkind.Sort.default_for_transl_and_get arg_sort in
let targ = transl_exp ~scopes arg_sort arg in
if Array.length lbl.lbl_all == 1 then
(* erase singleton unboxed records before lambda *)
targ
else
Lprim (Punboxed_product_field (lbl.lbl_num, layouts), [targ],
of_location ~scopes e.exp_loc)
end
| Texp_setfield(arg, arg_mode, id, lbl, newval) ->
(* CR layouts v2.5: When we allow `any` in record fields and check
representability on construction, [sort_of_jkind] will be unsafe here.
Probably we should add a sort to `Texp_setfield` in the typed tree,
then. *)
check_record_field_sort id.loc lbl.lbl_sort;
let mode =
Assignment (transl_modify_mode arg_mode)
in
let access =
match lbl.lbl_repres with
Record_boxed _
| Record_inlined (_, Constructor_uniform_value, Variant_boxed _) ->
Psetfield(lbl.lbl_pos, maybe_pointer newval, mode)
| Record_unboxed | Record_inlined (_, _, Variant_unboxed) ->
assert false
| Record_float -> Psetfloatfield (lbl.lbl_pos, mode)
| Record_ufloat -> Psetufloatfield (lbl.lbl_pos, mode)
| Record_inlined (_, Constructor_uniform_value, Variant_extensible) ->
Psetfield (lbl.lbl_pos + 1, maybe_pointer newval, mode)
| Record_inlined (_, Constructor_mixed _, Variant_extensible) ->
(* CR layouts v5.9: support this *)
fatal_error
"Mixed inlined records not supported for extensible variants"
| Record_inlined (_, Constructor_mixed shape, Variant_boxed _)
| Record_mixed shape -> begin
let ({ value_prefix_len; flat_suffix } : mixed_product_shape) =
shape
in
let write =
if lbl.lbl_num < value_prefix_len then
Mwrite_value_prefix (maybe_pointer newval)
else
let flat_element = flat_suffix.(lbl.lbl_num - value_prefix_len) in
Mwrite_flat_suffix flat_element
in
let shape : Lambda.mixed_block_shape =
{ value_prefix_len; flat_suffix }
in
Psetmixedfield(lbl.lbl_pos, write, shape, mode)
end
| Record_inlined (_, _, Variant_with_null) -> assert false
in
Lprim(access, [transl_exp ~scopes Jkind.Sort.Const.for_record arg;
transl_exp ~scopes lbl.lbl_sort newval],
of_location ~scopes e.exp_loc)
| Texp_array (amut, element_sort, expr_list, alloc_mode) ->
let mode = transl_alloc_mode alloc_mode in
let element_sort = Jkind.Sort.default_for_transl_and_get element_sort in
let kind = array_kind e element_sort in
let ll =
transl_list ~scopes
(List.map (fun e -> (e, element_sort)) expr_list)
in
let loc = of_location ~scopes e.exp_loc in
let makearray mutability =
Lprim (Pmakearray (kind, mutability, mode), ll, loc)
in
let duparray_to_mutable array =
Lprim (Pduparray (kind, Mutable), [array], loc)
in
let imm_array = makearray Immutable in
let lambda_arr_mut : Lambda.mutable_flag =
if Types.is_mutable amut then Mutable else Immutable
in
begin try
(* For native code the decision as to which compilation strategy to
use is made later. This enables the Flambda passes to lift certain
kinds of array definitions to symbols. *)
(* Deactivate constant optimization if array is small enough *)
if Types.is_mutable amut &&
List.length ll <= use_dup_for_constant_mutable_arrays_bigger_than
then begin
raise Not_constant
end;
(* Pduparray only works in Alloc_heap mode *)
if is_local_mode mode then raise Not_constant;
begin match List.map extract_constant ll with
| exception Not_constant
when kind = Pfloatarray && Types.is_mutable amut ->
(* We cannot currently lift mutable [Pintarray] arrays safely in
Flambda because [caml_modify] might be called upon them
(e.g. from code operating on polymorphic arrays, or functions
such as [caml_array_blit].
To avoid having different Lambda code for bytecode/Closure
vs. Flambda, we always generate [Pduparray] for mutable arrays
here, and deal with it in [Bytegen] (or in the case of Closure,
in [Cmmgen], which already has to handle [Pduparray Pmakearray
Pfloatarray] in the case where the array turned out to be
inconstant).
When not [Pfloatarray], the exception propagates to the handler
below. *)
duparray_to_mutable imm_array
| cl ->
let const =
if Config.flambda2 then
imm_array
else
match kind with
| Paddrarray | Pintarray ->
Lconst(Const_block(0, cl))
| Pfloatarray ->
Lconst(Const_float_array(List.map extract_float cl))
| Pgenarray ->
raise Not_constant (* can this really happen? *)
| Punboxedfloatarray _ | Punboxedintarray _
| Punboxedvectorarray _
| Pgcscannableproductarray _ | Pgcignorableproductarray _ ->
Misc.fatal_error "Use flambda2 for unboxed arrays"
in
if Types.is_mutable amut then duparray_to_mutable const else const
end
with Not_constant ->
makearray lambda_arr_mut
end
| Texp_list_comprehension comp ->
let loc = of_location ~scopes e.exp_loc in
Transl_list_comprehension.comprehension
~transl_exp ~scopes ~loc comp
| Texp_array_comprehension (_amut, elt_sort, comp) ->
(* We can ignore mutability here since we've already checked in in the
type checker; both mutable and immutable arrays are created the same
way *)
let loc = of_location ~scopes e.exp_loc in
let elt_sort = Jkind.Sort.default_for_transl_and_get elt_sort in
let array_kind = Typeopt.array_kind e elt_sort in
begin match array_kind with
| Pgenarray | Paddrarray | Pintarray | Pfloatarray
| Punboxedfloatarray _ | Punboxedintarray _ -> ()
| Punboxedvectorarray _ ->
raise (Error(e.exp_loc, Unboxed_vector_in_array_comprehension))
| Pgcscannableproductarray _ | Pgcignorableproductarray _ ->
raise (Error(e.exp_loc, Unboxed_product_in_array_comprehension))
end;
Transl_array_comprehension.comprehension
~transl_exp ~scopes ~loc ~array_kind comp
| Texp_ifthenelse(cond, ifso, Some ifnot) ->
Lifthenelse(transl_exp ~scopes Jkind.Sort.Const.for_predef_value cond,
event_before ~scopes ifso (transl_exp ~scopes sort ifso),
event_before ~scopes ifnot (transl_exp ~scopes sort ifnot),
layout_exp sort e)
| Texp_ifthenelse(cond, ifso, None) ->
Lifthenelse(transl_exp ~scopes Jkind.Sort.Const.for_predef_value cond,
event_before ~scopes ifso (transl_exp ~scopes sort ifso),
lambda_unit,
Lambda.layout_unit)
| Texp_sequence(expr1, sort', expr2) ->
let sort' = Jkind.Sort.default_for_transl_and_get sort' in
sort_must_not_be_void expr1.exp_loc expr1.exp_type sort';
Lsequence(transl_exp ~scopes sort' expr1,
event_before ~scopes expr2 (transl_exp ~scopes sort expr2))
| Texp_while {wh_body; wh_body_sort; wh_cond} ->
let wh_body_sort = Jkind.Sort.default_for_transl_and_get wh_body_sort in
sort_must_not_be_void wh_body.exp_loc wh_body.exp_type wh_body_sort;
let cond = transl_exp ~scopes Jkind.Sort.Const.for_predef_value wh_cond in
let body = transl_exp ~scopes wh_body_sort wh_body in
Lwhile {
wh_cond = maybe_region_layout layout_int cond;
wh_body = event_before ~scopes wh_body
(maybe_region_layout layout_unit body);
}
| Texp_for {for_id; for_from; for_to; for_dir; for_body; for_body_sort} ->
let for_body_sort = Jkind.Sort.default_for_transl_and_get for_body_sort in
sort_must_not_be_void for_body.exp_loc for_body.exp_type for_body_sort;
let body = transl_exp ~scopes for_body_sort for_body in
Lfor {
for_id;
for_loc = of_location ~scopes e.exp_loc;
for_from = transl_exp ~scopes Jkind.Sort.Const.for_predef_value for_from;
for_to = transl_exp ~scopes Jkind.Sort.Const.for_predef_value for_to;
for_dir;
for_body = event_before ~scopes for_body
(maybe_region_layout layout_unit body);
}
| Texp_send(expr, met, pos) ->
let lam =
let pos = transl_apply_position pos in
let mode = Lambda.alloc_heap in
let loc = of_location ~scopes e.exp_loc in
let layout = layout_exp sort e in
match met with
| Tmeth_val id ->
let obj = transl_exp ~scopes Jkind.Sort.Const.for_object expr in
Lsend (Self, Lvar id, obj, [], pos, mode, loc, layout)
| Tmeth_name nm ->
let obj = transl_exp ~scopes Jkind.Sort.Const.for_object expr in
let (tag, cache) = Translobj.meth obj nm in
let kind = if cache = [] then Public else Cached in
Lsend (kind, tag, obj, cache, pos, mode, loc, layout)
| Tmeth_ancestor(meth, path_self) ->
let self = transl_value_path loc e.exp_env path_self in
Lapply {ap_loc = loc;
ap_func = Lvar meth;
ap_args = [self];
ap_result_layout = layout;
ap_mode = mode;
ap_region_close = pos;
ap_probe = None;
ap_tailcall = Default_tailcall;
ap_inlined = Default_inlined;
ap_specialised = Default_specialise}
in
event_after ~scopes e lam
| Texp_new (cl, {Location.loc=loc}, _, pos) ->
let loc = of_location ~scopes loc in
let pos = transl_apply_position pos in
Lapply{
ap_loc=loc;
ap_func=
Lprim(Pfield (0, Pointer, Reads_vary),
[transl_class_path loc e.exp_env cl], loc);
ap_args=[lambda_unit];
ap_result_layout=layout_exp sort e;
ap_region_close=pos;
ap_mode=alloc_heap;
ap_tailcall=Default_tailcall;
ap_inlined=Default_inlined;
ap_specialised=Default_specialise;
ap_probe=None;
}
| Texp_instvar(path_self, path, _) ->
let loc = of_location ~scopes e.exp_loc in
let self = transl_value_path loc e.exp_env path_self in
let var = transl_value_path loc e.exp_env path in
Lprim(Pfield_computed Reads_vary, [self; var], loc)
| Texp_setinstvar(path_self, path, _, expr) ->
let loc = of_location ~scopes e.exp_loc in
let self = transl_value_path loc e.exp_env path_self in
let var = transl_value_path loc e.exp_env path in
transl_setinstvar ~scopes loc self var expr
| Texp_override(path_self, modifs) ->
let loc = of_location ~scopes e.exp_loc in
let self = transl_value_path loc e.exp_env path_self in
let cpy = Ident.create_local "copy" in
Llet(Strict, Lambda.layout_object, cpy,
Lapply{
ap_loc=Loc_unknown;
ap_func=Translobj.oo_prim "copy";
ap_args=[self];
ap_result_layout=Lambda.layout_object;
ap_region_close=Rc_normal;
ap_mode=alloc_heap;
ap_tailcall=Default_tailcall;
ap_inlined=Default_inlined;
ap_specialised=Default_specialise;
ap_probe=None;
},
List.fold_right
(fun (id, _, expr) rem ->
Lsequence(transl_setinstvar ~scopes Loc_unknown
(Lvar cpy) (Lvar id) expr, rem))
modifs
(Lvar cpy))
| Texp_letmodule(None, loc, Mp_present, modl, body) ->
let lam = !transl_module ~scopes Tcoerce_none None modl in
Lsequence(Lprim(Pignore, [lam], of_location ~scopes loc.loc),
transl_exp ~scopes sort body)
| Texp_letmodule(Some id, _loc, Mp_present, modl, body) ->
let defining_expr =
let mod_scopes = enter_module_definition ~scopes id in
!transl_module ~scopes:mod_scopes Tcoerce_none None modl
in
Llet(Strict, Lambda.layout_module, id, defining_expr,
transl_exp ~scopes sort body)
| Texp_letmodule(_, _, Mp_absent, _, body) ->
transl_exp ~scopes sort body
| Texp_letexception(cd, body) ->
Llet(Strict, Lambda.layout_block,
cd.ext_id, transl_extension_constructor ~scopes e.exp_env None cd,
transl_exp ~scopes sort body)
| Texp_pack modl ->
!transl_module ~scopes Tcoerce_none None modl
| Texp_assert ({exp_desc=Texp_construct(_, {cstr_name="false"}, _, _)}, loc) ->
assert_failed loc ~scopes e
| Texp_assert (cond, loc) ->
if !Clflags.noassert
then lambda_unit
else begin
Lifthenelse
(transl_exp ~scopes Jkind.Sort.Const.for_predef_value cond,