forked from astrada/google-drive-ocamlfuse
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathdrive.ml
3089 lines (2902 loc) · 116 KB
/
drive.ml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
open GapiUtils.Infix
open GapiLens.Infix
open GapiMonad
open GapiMonad.SessionM.Infix
open GapiDriveV3Model
open GapiDriveV3Service
exception Directory_not_empty
exception Existing_attribute
exception File_not_found
exception IO_error
exception Invalid_operation
exception No_attribute
exception Permission_denied
let folder_mime_type = "application/vnd.google-apps.folder"
let shortcut_mime_type = "application/vnd.google-apps.shortcut"
let file_fields =
"appProperties,capabilities(canEdit),createdTime,explicitlyTrashed,fileExtension,fullFileExtension,id,md5Checksum,mimeType,modifiedTime,name,parents,size,trashed,version,viewedByMeTime,webViewLink,exportLinks,shortcutDetails(targetId,targetResourceKey),shared,resourceKey"
let file_std_params =
{
GapiService.StandardParameters.default with
GapiService.StandardParameters.fields = file_fields;
}
let file_list_std_params =
{
GapiService.StandardParameters.default with
GapiService.StandardParameters.fields =
"files(" ^ file_fields ^ "),nextPageToken";
}
let file_download_std_params =
{
GapiService.StandardParameters.default with
GapiService.StandardParameters.alt = "media";
}
let changes_std_params =
{
GapiService.StandardParameters.default with
GapiService.StandardParameters.fields =
"changes(removed,file(" ^ file_fields
^ "),fileId),nextPageToken,newStartPageToken";
}
let device_scope = "https://www.googleapis.com/auth/drive.file"
let device_root_folder = "gdfuse"
let do_request = Oauth2.do_request
let async_do_request f =
let thread = Thread.create (fun go -> do_request go) f in
let thread_id = Thread.id thread in
Utils.log_with_header "Spawning new thread id=%d\n%!" thread_id;
thread
let root_directory = "/"
let default_root_folder_id = "root"
let trash_directory = "/.Trash"
let trash_directory_name_length = String.length trash_directory
let trash_directory_base_path = "/.Trash/"
let lost_and_found_directory = "/lost+found"
let shared_with_me_directory = "/.shared"
let f_bsize = 4096L
let change_limit = 50
let max_link_target_length = 127
let max_attribute_length = 126
(* Utilities *)
let chars_blacklist_regexp = Str.regexp "[/\000]"
let clean_filename name = Str.global_replace chars_blacklist_regexp "_" name
let apostrophe_regexp = Str.regexp (Str.quote "'")
let escape_apostrophe name = Str.global_replace apostrophe_regexp "\\'" name
let json_length s =
let length_with_quotes =
`String s |> Yojson.Safe.to_string |> String.length
in
length_with_quotes - 2
let get_remote_id_fingerprint word_length remote_id =
if word_length > 4 then invalid_arg "Too many filename conflicts";
let md5 = Cryptokit.Hash.md5 () in
md5#add_string remote_id;
let md5_result = md5#result in
let hexa = Cryptokit.Hexa.encode () in
hexa#put_string md5_result;
hexa#finish;
let h = hexa#get_string in
let length = word_length * 8 in
let offset = 32 - length in
String.sub h offset length
let disambiguate_filename filename full_file_extension remote_id filename_table
=
let rec find_first_unique_filename filename counter =
let new_candidate =
let fingerprint = get_remote_id_fingerprint counter remote_id in
let filename_length = String.length filename in
let extension_opt =
if
String.length full_file_extension > 0
&& ExtString.String.ends_with filename full_file_extension
then Some full_file_extension
else
try
let dot_pos = String.rindex filename '.' in
let ext =
String.sub filename (dot_pos + 1) (filename_length - dot_pos - 1)
in
Some ext
with Not_found -> None
in
match extension_opt with
| None -> Printf.sprintf "%s (%s)" filename fingerprint
| Some extension ->
let extension_length = String.length extension in
let base_name =
String.sub filename 0 (filename_length - extension_length - 1)
in
if String.length base_name > 0 then
Printf.sprintf "%s (%s).%s" base_name fingerprint extension
else Printf.sprintf "(%s).%s" fingerprint extension
in
if not (Hashtbl.mem filename_table new_candidate) then (
Utils.log_with_header "Checking: %s: OK\n%!" new_candidate;
new_candidate)
else (
Utils.log_with_header "Checking: %s: KO\n%!" new_candidate;
find_first_unique_filename filename (counter + 1))
in
if Hashtbl.mem filename_table filename then (
Utils.log_with_header "Filename collision detected: %s\n%!" filename;
let unique_filename = find_first_unique_filename filename 1 in
let name_counter = Hashtbl.find filename_table filename in
Hashtbl.replace filename_table filename (name_counter + 1);
unique_filename)
else (
Utils.log_with_header "Filename (unused): %s\n%!" filename;
Hashtbl.add filename_table filename 0;
filename)
let is_in_trash_directory path config =
if path = trash_directory || config.Config.disable_trash then false
else ExtString.String.starts_with path trash_directory_base_path
let is_lost_and_found_root path trashed config =
if trashed || not config.Config.lost_and_found then false
else path = lost_and_found_directory
let is_lost_and_found path trashed config =
if trashed || not config.Config.lost_and_found then false
else ExtString.String.starts_with path lost_and_found_directory
let is_shared_with_me_root path trashed config =
if trashed then false else path = shared_with_me_directory
let is_shared_with_me path trashed config =
if trashed then false
else ExtString.String.starts_with path shared_with_me_directory
let get_path_in_cache path config =
if path = root_directory then (root_directory, false)
else if path = trash_directory && not config.Config.disable_trash then
(root_directory, true)
else if is_in_trash_directory path config then
let path_in_cache = Str.string_after path trash_directory_name_length in
(path_in_cache, true)
else (path, false)
let match_service_error reason = function
| GapiService.ServiceError (_, e) -> (
match e.GapiError.RequestError.errors with
| [] -> false
| e :: _ -> e.GapiError.SingleError.reason = reason)
| _ -> false
let handle_default_exceptions = function
| GapiService.ServiceError (_, e) -> (
let message =
e |> GapiError.RequestError.to_data_model |> GapiJson.data_model_to_json
|> Yojson.Safe.to_string
in
Utils.log_with_header "Service error: %s.\n%!" message;
match e.GapiError.RequestError.errors with
| [] -> Utils.raise_m IO_error
| e :: _ -> (
match e.GapiError.SingleError.reason with
| "userRateLimitExceeded" | "rateLimitExceeded" | "backendError"
| "downloadQuotaExceeded" ->
Utils.raise_m Utils.Temporary_error
| "insufficientFilePermissions" | "insufficientPermissions" ->
Utils.raise_m Permission_denied
| _ -> Utils.raise_m IO_error))
| GapiRequest.PermissionDenied _ ->
Utils.log_with_header "Server error: Permission denied.\n%!";
Utils.raise_m Permission_denied
| GapiRequest.RequestTimeout _ ->
Utils.log_with_header "Server error: Request Timeout.\n%!";
Utils.raise_m Utils.Temporary_error
| GapiRequest.PreconditionFailed _ | GapiRequest.Conflict _ ->
Utils.log_with_header "Server error: Conflict.\n%!";
Utils.raise_m Utils.Temporary_error
| GapiRequest.Forbidden _ ->
Utils.log_with_header "Server error: Forbidden.\n%!";
Utils.raise_m IO_error
| GapiRequest.Gone _ ->
Utils.log_with_header "Server error: Gone.\n%!";
Utils.raise_m IO_error
| GapiRequest.BadRequest _ ->
Utils.log_with_header "Server error: bad request.\n%!";
Utils.raise_m Utils.Temporary_error
| Buffering.Invalid_block -> Utils.raise_m Invalid_operation
| GapiRequest.NotFound _ ->
Utils.log_with_header "Server error: not found.\n%!";
Utils.raise_m File_not_found
| e -> Utils.raise_m e
let build_resource_keys_header ids_and_resource_keys =
let ids_with_valid_resource_keys =
List.filter
(fun (_, resource_key) ->
match resource_key with None -> false | Some "" -> false | _ -> true)
ids_and_resource_keys
in
let id_resource_key_pairs =
List.map
(fun id_resource_key ->
match id_resource_key with
| Some id, Some resource_key -> id ^ "/" ^ resource_key
| _ -> "")
ids_with_valid_resource_keys
in
let valid_id_resource_key_pairs =
List.filter
(fun p -> match p with "" -> false | _ -> true)
id_resource_key_pairs
in
match valid_id_resource_key_pairs with
| [] -> []
| _ ->
let header_value = String.concat "," valid_id_resource_key_pairs in
let custom_header =
GapiCore.Header.KeyValueHeader
("X-Goog-Drive-Resource-Keys", header_value)
in
[ custom_header ]
(* with_try with a default exception handler *)
let try_with_default f s = Utils.try_with_m f handle_default_exceptions s
(* retry with a default exception handler *)
let with_retry_default f =
let rec loop n =
Utils.try_with_m f (fun e ->
try handle_default_exceptions e with
| Utils.Temporary_error ->
if n >= !Utils.max_retries then Utils.raise_m IO_error
else (
GapiUtils.wait_exponential_backoff n;
let n' = n + 1 in
Utils.log_with_header "Retrying (%d/%d).\n%!" n'
!Utils.max_retries;
loop n')
| _ -> Utils.raise_m e)
in
loop 0
let get_file_extension file_name =
try
let dot_index = String.rindex file_name '.' in
String.sub file_name (dot_index + 1)
(String.length file_name - dot_index - 1)
with Not_found -> ""
(* Resource cache *)
let get_filename name is_document get_document_format =
let context = Context.get_ctx () in
let config = context |. Context.config_lens in
let clean_name = clean_filename name in
let document_format =
if is_document then get_document_format config else ""
in
if is_document && config.Config.docs_file_extension && document_format <> ""
then
let current_extension = get_file_extension clean_name in
if current_extension <> document_format then
clean_name ^ "." ^ document_format
else clean_name
else clean_name
let get_file_extension_from_format resource config =
let fmt = CacheData.Resource.get_format resource config in
match fmt with
| "desktop" when config.Config.desktop_entry_as_html -> "html"
| _ -> fmt
let get_file_extension_from_mime_type mime_type config =
let fmt = CacheData.Resource.get_format_from_mime_type mime_type config in
match fmt with
| "desktop" when config.Config.desktop_entry_as_html -> "html"
| _ -> fmt
let build_resource_tables parent_path trashed =
let context = Context.get_ctx () in
let cache = context.Context.cache in
let resources =
Cache.Resource.select_resources_with_parent_path cache parent_path trashed
in
let filename_table = Hashtbl.create Utils.hashtable_initial_size in
let remote_id_table = Hashtbl.create (List.length resources) in
List.iter
(fun resource ->
let name = Option.get resource.CacheData.Resource.name in
let clean_name =
get_filename name (CacheData.Resource.is_document resource)
(fun config -> get_file_extension_from_format resource config)
in
let filename = Filename.basename resource.CacheData.Resource.path in
(if clean_name <> filename then
let name_counter =
try Hashtbl.find filename_table clean_name with Not_found -> 0
in
Hashtbl.replace filename_table clean_name name_counter);
Hashtbl.add filename_table filename 0;
Hashtbl.add remote_id_table
(Option.get resource.CacheData.Resource.remote_id)
resource)
resources;
(filename_table, remote_id_table)
let clean_document_extension file_name resource config =
if CacheData.Resource.is_document resource then
let document_extension = get_file_extension_from_format resource config in
if config.Config.docs_file_extension && document_extension <> "" then
let current_extension = get_file_extension file_name in
if current_extension = document_extension then
let regexp = Str.quote document_extension |> Str.regexp in
try
let pos =
Str.search_backward regexp file_name (String.length file_name)
in
if pos > 0 then Str.string_before file_name (pos - 1) else file_name
with Not_found -> file_name
else file_name
else file_name
else file_name
let create_resource path =
let parent_path = Filename.dirname path in
{
CacheData.Resource.id = 0L;
remote_id = None;
name = None;
mime_type = None;
created_time = None;
modified_time = None;
viewed_by_me_time = None;
file_extension = None;
full_file_extension = None;
md5_checksum = None;
size = None;
can_edit = None;
trashed = None;
web_view_link = None;
export_links = None;
version = None;
resource_key = None;
target_id = None;
target_resource_key = None;
file_mode_bits = None;
uid = None;
gid = None;
link_target = None;
xattrs = "";
parent_path;
path;
state = CacheData.Resource.State.ToDownload;
last_update = Unix.gettimeofday ();
}
let create_root_resource root_folder_id trashed =
let resource = create_resource root_directory in
{
resource with
CacheData.Resource.remote_id = Some root_folder_id;
mime_type = Some folder_mime_type;
size = Some 0L;
parent_path = "";
trashed = Some trashed;
}
let create_well_known_resource path =
let resource = create_resource path in
{
resource with
CacheData.Resource.remote_id = Some "";
mime_type = Some folder_mime_type;
size = Some 0L;
parent_path = "";
trashed = Some false;
}
let get_unique_filename name full_file_extension remote_id is_document
get_document_format filename_table =
let complete_name = get_filename name is_document get_document_format in
disambiguate_filename complete_name full_file_extension remote_id
filename_table
let get_unique_filename_from_resource resource name filename_table =
get_unique_filename name
(Option.default "" resource.CacheData.Resource.full_file_extension)
(Option.default "" resource.CacheData.Resource.remote_id)
(CacheData.Resource.is_document resource)
(fun config -> get_file_extension_from_format resource config)
filename_table
let get_unique_filename_from_file file filename_table =
get_unique_filename file.File.name file.File.fullFileExtension file.File.id
(CacheData.Resource.is_document_mime_type file.File.mimeType)
(fun config -> get_file_extension_from_mime_type file.File.mimeType config)
filename_table
let recompute_path resource name =
(* TODO: make an optimized version of build_resource_tables that
* doesn't create resource table (useful for large directories). *)
let filename_table, _ =
build_resource_tables resource.CacheData.Resource.parent_path
(Option.default false resource.CacheData.Resource.trashed)
in
let filename =
get_unique_filename_from_resource resource name filename_table
in
Filename.concat resource.CacheData.Resource.parent_path filename
let update_resource_from_file ?state ?link_target resource file =
let path =
match resource.CacheData.Resource.name with
| Some cached_name ->
if cached_name <> file.File.name then
recompute_path resource file.File.name
else resource.CacheData.Resource.path
| None -> resource.CacheData.Resource.path
in
let parent_path = Filename.dirname path in
let new_state = Option.default resource.CacheData.Resource.state state in
let new_size =
match new_state with
| CacheData.Resource.State.Uploading | CacheData.Resource.State.ToUpload ->
resource.CacheData.Resource.size
| _ -> Some file.File.size
in
let resource_key =
match file.File.resourceKey with "" -> None | _ as s -> Some s
in
let target_id =
if file.File.mimeType = shortcut_mime_type then
match file.File.shortcutDetails with
| { File.ShortcutDetails.targetId; _ } when targetId <> "" ->
Some targetId
| _ -> None
else None
in
let target_resource_key =
if file.File.mimeType = shortcut_mime_type then
match file.File.shortcutDetails with
| { File.ShortcutDetails.targetResourceKey; _ }
when targetResourceKey <> "" ->
Some targetResourceKey
| _ -> None
else None
in
let link_target =
if file.File.mimeType = shortcut_mime_type then link_target
else CacheData.Resource.get_link_target file.File.appProperties
in
{
resource with
CacheData.Resource.remote_id = Some file.File.id;
name = Some file.File.name;
mime_type = Some file.File.mimeType;
created_time = Some (Netdate.since_epoch file.File.createdTime);
modified_time = Some (Netdate.since_epoch file.File.modifiedTime);
viewed_by_me_time = Some (Netdate.since_epoch file.File.viewedByMeTime);
file_extension = Some file.File.fileExtension;
full_file_extension = Some file.File.fullFileExtension;
md5_checksum = Some file.File.md5Checksum;
size = new_size;
can_edit = Some file.File.capabilities.File.Capabilities.canEdit;
trashed = Some file.File.trashed;
web_view_link = Some file.File.webViewLink;
export_links =
Some (CacheData.Resource.serialize_export_links file.File.exportLinks);
version = Some file.File.version;
resource_key;
target_id;
target_resource_key;
file_mode_bits =
CacheData.Resource.get_file_mode_bits file.File.appProperties;
uid = CacheData.Resource.get_uid file.File.appProperties;
gid = CacheData.Resource.get_gid file.File.appProperties;
link_target;
xattrs = CacheData.Resource.get_xattrs file.File.appProperties;
last_update = Unix.gettimeofday ();
path;
parent_path;
state = new_state;
}
let insert_resource_into_cache ?state ?link_target cache resource file =
let resource = update_resource_from_file ?state ?link_target resource file in
Utils.log_with_header "BEGIN: Saving resource to db (remote id=%s)\n%!"
file.File.id;
let inserted = Cache.Resource.insert_resource cache resource in
Utils.log_with_header
"END: Saving resource to db (remote id=%s, id=%Ld, state=%s)\n%!"
file.File.id inserted.CacheData.Resource.id
(CacheData.Resource.State.to_string inserted.CacheData.Resource.state);
inserted
let update_cached_resource cache resource =
Utils.log_with_header "BEGIN: Updating resource in db (id=%Ld, state=%s)\n%!"
resource.CacheData.Resource.id
(CacheData.Resource.State.to_string resource.CacheData.Resource.state);
Cache.Resource.update_resource cache resource;
Utils.log_with_header "END: Updating resource in db (id=%Ld)\n%!"
resource.CacheData.Resource.id
let update_cached_resource_state cache state id =
Utils.log_with_header
"BEGIN: Updating resource state in db (id=%Ld, state=%s)\n%!" id
(CacheData.Resource.State.to_string state);
Cache.Resource.update_resource_state cache state id;
Utils.log_with_header "END: Updating resource state in db (id=%Ld)\n%!" id
let update_cached_resource_state_and_size cache state size id =
Utils.log_with_header
"BEGIN: Updating resource state and size in db (id=%Ld, state=%s, size=%Ld)\n\
%!"
id
(CacheData.Resource.State.to_string state)
size;
Cache.Resource.update_resource_state_and_size cache state size id;
Utils.log_with_header
"END: Updating resource state and size in db (id=%Ld)\n%!" id
let lookup_resource path trashed =
Utils.log_with_header "BEGIN: Loading resource %s (trashed=%b) from db\n%!"
path trashed;
let cache = Context.get_cache () in
let resource = Cache.Resource.select_resource_with_path cache path trashed in
(if Option.is_none resource then
Utils.log_with_header
"END: Loading resource %s (trashed=%b) from db: Not found\n%!" path trashed
else
let id = resource |. GapiLens.option_get |. CacheData.Resource.id in
let state =
resource |. GapiLens.option_get |. CacheData.Resource.state
|> CacheData.Resource.State.to_string
in
Utils.log_with_header
"END: Loading resource %s (trashed=%b) from db: Found (id=%Ld, state=%s)\n\
%!"
path trashed id state);
resource
let update_cache_size delta metadata cache =
Utils.log_with_header "BEGIN: Updating cache size (delta=%Ld) in db\n%!" delta;
if delta = 0L then
Utils.log_with_header "END: No need to update cache size\n%!"
else (
Cache.Metadata.update_cache_size cache delta;
let update_metadata context =
let metadata =
context.Context.metadata |. GapiLens.option_get
|> CacheData.Metadata.cache_size
^= Int64.add metadata.CacheData.Metadata.cache_size delta
in
Utils.log_with_header "END: Updating cache size (new size=%Ld) in db\n%!"
metadata.CacheData.Metadata.cache_size;
context |> Context.metadata ^= Some metadata
in
Context.update_ctx update_metadata)
let shrink_cache ?(file_size = 0L) () =
let context = Context.get_ctx () in
let metadata = context |. Context.metadata_lens in
let config = context |. Context.config_lens in
let max_cache_size_mb = config.Config.max_cache_size_mb in
let cache = context.Context.cache in
Utils.with_lock context.Context.metadata_lock (fun () ->
let max_cache_size =
Int64.mul (Int64.of_int max_cache_size_mb) Utils.mb
in
let target_size =
Int64.add metadata.CacheData.Metadata.cache_size file_size
in
if target_size > max_cache_size then (
let resources =
Cache.Resource.select_resources_order_by_last_update cache
in
let new_cache_size, total_delta, resources_to_free =
List.fold_left
(fun (new_cache_size, delta, rs) resource ->
if new_cache_size <= max_cache_size then
(new_cache_size, delta, rs)
else
let size_to_free =
Option.default 0L resource.CacheData.Resource.size
in
let new_size = Int64.sub new_cache_size size_to_free in
let new_delta = Int64.add delta (Int64.neg size_to_free) in
(new_size, new_delta, resource :: rs))
(target_size, file_size, [])
resources
in
update_cache_size total_delta metadata cache;
List.iter
(fun resource ->
update_cached_resource_state cache
CacheData.Resource.State.ToDownload resource.CacheData.Resource.id)
resources_to_free;
Cache.delete_files_from_cache cache resources_to_free |> ignore)
else update_cache_size file_size metadata cache)
let delete_memory_buffers memory_buffers resource =
Option.may
(fun remote_id ->
Buffering.MemoryBuffers.remove_buffers remote_id memory_buffers)
resource.CacheData.Resource.remote_id
let delete_from_context context resource =
let memory_buffers = context.Context.memory_buffers in
delete_memory_buffers memory_buffers resource;
Option.may
(fun remote_id ->
Context.with_ctx_lock (fun () ->
Hashtbl.remove context.Context.file_locks remote_id))
resource.CacheData.Resource.remote_id
let delete_cached_resource resource =
let context = Context.get_ctx () in
let cache = context.Context.cache in
Cache.Resource.delete_resource cache resource;
let total_size = Cache.delete_files_from_cache cache [ resource ] in
Option.may
(fun metadata -> update_cache_size (Int64.neg total_size) metadata cache)
context.Context.metadata;
delete_from_context context resource
let delete_cached_resources metadata cache resources =
Cache.Resource.delete_resources cache resources;
let total_size = Cache.delete_files_from_cache cache resources in
update_cache_size (Int64.neg total_size) metadata cache;
let context = Context.get_ctx () in
List.iter (delete_from_context context) resources
let update_cache_size_for_documents cache resource content_path op =
let context = Context.get_ctx () in
Utils.with_lock context.Context.metadata_lock (fun () ->
if
resource.CacheData.Resource.size = Some 0L
&& Sys.file_exists content_path
then
try
let stats = Unix.LargeFile.stat content_path in
let size = stats.Unix.LargeFile.st_size in
let metadata = context |. Context.metadata_lens in
let delta = op size in
update_cache_size delta metadata cache
with e -> Utils.log_exception e)
let build_resource_keys_header_from_resource resource =
let ids_and_resource_keys =
[
( resource.CacheData.Resource.remote_id,
resource.CacheData.Resource.resource_key );
]
in
build_resource_keys_header ids_and_resource_keys
let build_resource_keys_header_from_resources resources =
let ids_and_resource_keys =
List.map
(fun resource ->
( resource.CacheData.Resource.remote_id,
resource.CacheData.Resource.resource_key ))
resources
in
build_resource_keys_header ids_and_resource_keys
(* END Resource cache *)
(* Metadata *)
let get_file_from_server parent_folder_id name trashed =
let config = Context.get_ctx () |. Context.config_lens in
Utils.log_with_header "BEGIN: Getting resource %s (%s) from server\n%!" name
(if parent_folder_id = "" then "shared with me"
else "in folder" ^ parent_folder_id);
let q =
if parent_folder_id <> "" then
Printf.sprintf "name='%s' and '%s' in parents and trashed=%b"
(escape_apostrophe name) parent_folder_id trashed
else
Printf.sprintf "name='%s' and sharedWithMe = true"
(escape_apostrophe name)
in
with_retry_default
(FilesResource.list ~supportsAllDrives:true
~driveId:config.Config.team_drive_id
~includeItemsFromAllDrives:(config.Config.team_drive_id <> "")
~corpora:(if config.Config.team_drive_id <> "" then "drive" else "user")
~std_params:file_list_std_params ~q ~pageSize:1)
>>= fun file_list ->
Utils.log_with_header
"END: Getting resource %s (in folder %s) from server\n%!" name
parent_folder_id;
let files = file_list.FileList.files in
if List.length files = 0 then SessionM.return None
else
let file = files |. GapiLens.head in
SessionM.return (Some file)
let get_root_folder_id_from_server config =
Utils.log_with_header "BEGIN: Getting root resource from server\n%!";
(if config.Config.scope = device_scope then
get_file_from_server default_root_folder_id device_root_folder false
>>= fun root_option ->
match root_option with
| None ->
let file =
{
File.empty with
File.name = device_root_folder;
mimeType = "application/vnd.google-apps.folder";
}
in
Utils.log_with_header "BEGIN: Creating root (%s) on server\n%!"
device_root_folder;
with_retry_default
(FilesResource.create ~enforceSingleParent:true ~supportsAllDrives:true
~std_params:file_std_params file)
>>= fun created_file ->
Utils.log_with_header "END: Creating root (id=%s) on server\n%!"
created_file.File.id;
SessionM.return created_file
| Some root -> SessionM.return root
else
with_retry_default
(FilesResource.get ~supportsAllDrives:true ~std_params:file_std_params
~fileId:default_root_folder_id)
>>= fun file -> SessionM.return file)
>>= fun file ->
Utils.log_with_header "END: Getting root resource (id=%s) from server\n%!"
file.File.id;
SessionM.return file.File.id
let get_root_folder_id config =
let rec loop path parent_folder_id =
let name, rest =
try ExtString.String.split path Filename.dir_sep
with ExtString.Invalid_string -> (path, "")
in
match name with
| "" -> SessionM.return parent_folder_id
| n -> (
get_file_from_server parent_folder_id n false >>= fun file ->
match file with
| None -> Utils.raise_m (Failure "Invalid root folder in configuration")
| Some f -> loop rest f.File.id)
in
Utils.log_with_header
"BEGIN: Getting root folder id (team drive id=%s, root folder=%s) from \
server\n\
%!"
config.Config.team_drive_id config.Config.root_folder;
let default_root_id =
match config.Config.team_drive_id with
| "" -> default_root_folder_id
| id -> id
in
(match config.Config.root_folder with
| "" -> SessionM.return default_root_id
| s when not (Filename.is_relative s) ->
loop (String.sub s 1 (String.length s - 1)) default_root_id
| s -> SessionM.return s)
>>= fun root_folder_id ->
(if root_folder_id = default_root_folder_id then
get_root_folder_id_from_server config
else SessionM.return root_folder_id)
>>= fun root_folder_id ->
Utils.log_with_header "END: Getting root folder id (id=%s) from server\n%!"
root_folder_id;
SessionM.return root_folder_id
let get_root_folder_id_from_context () =
let context = Context.get_ctx () in
let config = context |. Context.config_lens in
let root_folder_id_option = context.Context.root_folder_id in
match root_folder_id_option with
| None ->
let root_folder_id = do_request (get_root_folder_id config) |> fst in
Context.update_ctx (Context.root_folder_id ^= Some root_folder_id);
root_folder_id
| Some r -> r
let get_well_known_resource path trashed =
let root_folder_id = get_root_folder_id_from_context () in
let context = Context.get_ctx () in
let cache = context.Context.cache in
let config = context |. Context.config_lens in
match lookup_resource path trashed with
| None ->
let well_known_resource, label =
if path = root_directory then
(create_root_resource root_folder_id trashed, "root")
else if is_lost_and_found_root path trashed config then
(create_well_known_resource lost_and_found_directory, "lost+found")
else if is_shared_with_me_root path trashed config then
(create_well_known_resource shared_with_me_directory, "shared with me")
else
invalid_arg
("Invalid well known path: " ^ path ^ " trashed="
^ string_of_bool trashed)
in
Utils.log_with_header "BEGIN: Saving %s resource to db\n%!" label;
let inserted = Cache.Resource.insert_resource cache well_known_resource in
Utils.log_with_header "END: Saving %s resource to db (id=%Ld)\n%!" label
inserted.CacheData.Resource.id;
inserted
| Some resource -> resource
let get_metadata () =
let config = Context.get_ctx () |. Context.config_lens in
let request_new_start_page_token =
let std_params =
{
GapiService.StandardParameters.default with
GapiService.StandardParameters.fields = "startPageToken";
}
in
with_retry_default
(ChangesResource.getStartPageToken ~supportsAllDrives:true
~driveId:config.Config.team_drive_id ~std_params)
>>= fun startPageToken ->
SessionM.return startPageToken.StartPageToken.startPageToken
in
let get_start_page_token start_page_token_db =
if start_page_token_db = "" then request_new_start_page_token
else SessionM.return start_page_token_db
in
let request_metadata start_page_token_db cache_size =
let std_params =
{
GapiService.StandardParameters.default with
GapiService.StandardParameters.fields =
"user(displayName),storageQuota(limit,usage)";
}
in
with_retry_default (AboutResource.get ~std_params) >>= fun about ->
get_start_page_token start_page_token_db >>= fun start_page_token ->
let metadata =
{
CacheData.Metadata.display_name = about.About.user.User.displayName;
storage_quota_limit = about.About.storageQuota.About.StorageQuota.limit;
storage_quota_usage = about.About.storageQuota.About.StorageQuota.usage;
start_page_token;
cache_size;
last_update = Unix.gettimeofday ();
clean_shutdown = false;
}
in
SessionM.return metadata
in
let context = Context.get_ctx () in
let cache = context.Context.cache in
let config = context |. Context.config_lens in
let update_resource_cache new_metadata old_metadata =
let get_all_changes =
let rec loop pageToken accu =
with_retry_default
(ChangesResource.list ~supportsAllDrives:true
~driveId:config.Config.team_drive_id
~includeItemsFromAllDrives:(config.Config.team_drive_id <> "")
~std_params:changes_std_params ~includeRemoved:true ~pageToken)
>>= fun change_list ->
let changes = change_list.ChangeList.changes @ accu in
if change_list.ChangeList.nextPageToken = "" then
SessionM.return (changes, change_list.ChangeList.newStartPageToken)
else loop change_list.ChangeList.nextPageToken changes
in
loop new_metadata.CacheData.Metadata.start_page_token []
in
let request_changes =
Utils.log_with_header "BEGIN: Getting changes from server\n%!";
get_all_changes >>= fun (changes, new_start_page_token) ->
Utils.log_with_header "END: Getting changes from server\n%!";
SessionM.return (changes, new_start_page_token)
in
let get_resources_and_files_to_update change =
let selected_resources =
Cache.Resource.select_resources_with_remote_id cache
change.Change.fileId
in
List.filter
(fun r ->
change.Change.file.File.version > 0L
&& change.Change.file.File.version
> Option.default 0L r.CacheData.Resource.version)
selected_resources
|> List.map (fun r -> Some (r, change.Change.file))
in
let get_resource_from_change change =
Cache.Resource.select_resources_with_remote_id cache change.Change.fileId
|> List.map (fun r -> Some r)
in
let get_new_resource_from_change change =
match
Cache.Resource.select_resources_with_remote_id cache
change.Change.fileId
with
| [] -> (
let parent_resources =
let parent_remote_ids =
match change.Change.file.File.parents with [] -> [] | ids -> ids
in
List.map
(Cache.Resource.select_resources_with_remote_id cache)
parent_remote_ids
|> List.concat
|> List.filter (fun r ->
r.CacheData.Resource.state
= CacheData.Resource.State.Synchronized)
in
match parent_resources with
| [] -> []
| prs ->
let parent_path = List.hd prs |. CacheData.Resource.path in
let filename_table, _ = build_resource_tables parent_path false in
let filename =
get_unique_filename_from_file change.Change.file filename_table
in
let resource_path = Filename.concat parent_path filename in
let resource = create_resource resource_path in
[ Some (resource, change.Change.file) ])
| _ -> []
in
let request_remaining_changes start_page_token_db =
if start_page_token_db = "" then SessionM.return (false, true)
else
let std_params =
{
GapiService.StandardParameters.default with
GapiService.StandardParameters.fields = "newStartPageToken";
}
in
with_retry_default
(ChangesResource.list ~supportsAllDrives:true
~driveId:config.Config.team_drive_id
~includeItemsFromAllDrives:(config.Config.team_drive_id <> "")
~std_params ~includeRemoved:true ~pageSize:change_limit
~pageToken:start_page_token_db)
>>= fun change_list ->
let no_changes, over_limit =
( change_list.ChangeList.newStartPageToken = start_page_token_db,
change_list.ChangeList.newStartPageToken = "" )
in
SessionM.return (no_changes, over_limit)
in
request_remaining_changes new_metadata.CacheData.Metadata.start_page_token
>>= fun (no_changes, over_limit) ->
if no_changes then (
Utils.log_with_header
"END: Getting metadata: No need to update resource cache\n%!";
Utils.log_with_header "BEGIN: Updating timestamps\n%!";
Cache.Resource.update_all_timestamps cache
new_metadata.CacheData.Metadata.last_update;
Utils.log_with_header "END: Updating timestamps\n%!";
SessionM.return new_metadata)
else if over_limit then (
Utils.log_with_header "END: Getting metadata: Too many changes\n";
Utils.log_with_header "BEGIN: Getting new start page token\n%!";
get_start_page_token "" >>= fun new_start_page_token ->
Utils.log_with_header "END: Getting new start page token (%s)\n%!"
new_start_page_token;
Utils.log_with_header "BEGIN: Invalidating resources\n%!";
Cache.Resource.invalidate_all cache;
Utils.log_with_header "END: Invalidating resources\n%!";