-
Notifications
You must be signed in to change notification settings - Fork 42
/
documents.rs
2595 lines (2304 loc) · 88.8 KB
/
documents.rs
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
use crate::utils::schemas;
use events::publish;
use eyre::{bail, Result};
use formats::FormatSpec;
use graph::{Graph, Plan, PlanOptions, PlanOrdering, PlanScope};
use graph_triples::{resources, Relations};
use kernels::{KernelInfos, KernelSpace, KernelSymbols};
use maplit::hashset;
use node_address::AddressMap;
use node_execute::{
compile, execute, CancelRequest, CancelResponse, CompileRequest, CompileResponse,
ExecuteRequest, ExecuteResponse, PatchRequest, PatchResponse, RequestId,
};
use node_patch::{apply, diff, merge, Patch};
use node_reshape::reshape;
use notify::DebouncedEvent;
use once_cell::sync::Lazy;
use schemars::{gen::SchemaGenerator, schema::Schema, JsonSchema};
use serde::Serialize;
use serde_with::skip_serializing_none;
use std::{
collections::{hash_map::Entry, HashMap, HashSet},
env, fs,
ops::Deref,
path::{Path, PathBuf},
sync::Arc,
time::{Duration, Instant},
};
use stencila_schema::{Article, Node};
use strum::Display;
use tokio::{
sync::{mpsc, watch, Mutex, RwLock},
task::JoinHandle,
};
#[derive(Debug, JsonSchema, Serialize, Display)]
#[serde(rename_all = "lowercase")]
#[strum(serialize_all = "lowercase")]
enum DocumentEventType {
Deleted,
Renamed,
Modified,
Patched,
Encoded,
}
#[skip_serializing_none]
#[derive(Debug, JsonSchema, Serialize)]
#[schemars(deny_unknown_fields)]
struct DocumentEvent {
/// The type of event
#[serde(rename = "type")]
type_: DocumentEventType,
/// The document associated with the event
#[schemars(schema_with = "DocumentEvent::schema_document")]
document: Document,
/// The content associated with the event, only provided for, `modified`
/// and `encoded` events.
content: Option<String>,
/// The format of the document, only provided for `modified` (the format
/// of the document) and `encoded` events (the format of the encoding).
#[schemars(schema_with = "DocumentEvent::schema_format")]
format: Option<FormatSpec>,
/// The `Patch` associated with a `Patched` event
#[schemars(schema_with = "DocumentEvent::schema_patch")]
patch: Option<Patch>,
}
impl DocumentEvent {
/// Generate the JSON Schema for the `document` property to avoid nesting
fn schema_document(_generator: &mut SchemaGenerator) -> Schema {
schemas::typescript("Document", true)
}
/// Generate the JSON Schema for the `format` property to avoid nesting
fn schema_format(_generator: &mut schemars::gen::SchemaGenerator) -> Schema {
schemas::typescript("Format", false)
}
/// Generate the JSON Schema for the `patch` property to avoid nesting
fn schema_patch(_generator: &mut schemars::gen::SchemaGenerator) -> Schema {
schemas::typescript("Patch", false)
}
}
/// The status of a document with respect to on-disk synchronization
#[derive(Debug, Clone, JsonSchema, Serialize, Display)]
#[serde(rename_all = "lowercase")]
#[strum(serialize_all = "lowercase")]
enum DocumentStatus {
/// The document `content` is the same as on disk at its `path`.
Synced,
/// The document `content` has modifications that have not yet
/// been written to its `path`.
Unwritten,
/// The document `path` has modifications that have not yet
/// been read into its `content`.
Unread,
/// The document `path` no longer exists and is now set to `None`.
/// The user will need to choose a new path for the document if they
/// want to save it.
Deleted,
}
/// An in-memory representation of a document
#[derive(Debug, JsonSchema, Serialize)]
#[schemars(deny_unknown_fields)]
pub struct Document {
/// The document identifier
pub id: String,
/// The absolute path of the document's file.
pub path: PathBuf,
/// The project directory for this document.
///
/// Used to restrict file links (e.g. image paths) to within
/// the project for both security and reproducibility reasons.
/// For documents opened from within a project, this will be project directory.
/// For "orphan" documents (opened by themselves) this will be the
/// parent directory of the document. When the document is compiled,
/// an error will be returned if a file link is outside of the root.
project: PathBuf,
/// Whether or not the document's file is in the temporary
/// directory.
temporary: bool,
/// The synchronization status of the document.
/// This is orthogonal to `temporary` because a document's
/// `content` can be synced or un-synced with the file system
/// regardless of whether or not its `path` is temporary..
status: DocumentStatus,
/// The last time that the document was written to disk.
/// Used to ignore subsequent file modification events.
#[serde(skip)]
last_write: Option<Instant>,
/// The name of the document
///
/// Usually the filename from the `path` but "Untitled"
/// for temporary documents.
name: String,
/// The format of the document.
///
/// On initialization, this is inferred, if possible, from the file name extension
/// of the document's `path`. However, it may change whilst the document is
/// open in memory (e.g. if the `load` function sets a different format).
#[schemars(schema_with = "Document::schema_format")]
format: FormatSpec,
/// Whether a HTML preview of the document is supported
///
/// This is determined by the type of the `root` node of the document.
/// Will be `true` if the `root` is a type for which HTML previews are
/// implemented e.g. `Article`, `ImageObject` and `false` if the `root`
/// is `None`, or of some other type e.g. `Entity`.
///
/// This flag is intended for dynamically determining whether to open
/// a preview panel for a document by default. Regardless of its value,
/// a user should be able to open a preview panel, in HTML or some other
/// format, for any document.
previewable: bool,
/// The current UTF8 string content of the document.
///
/// When a document is `read()` from a file the `content` is the content
/// of the file. The `content` may subsequently be changed using
/// the `load()` function. A call to `write()` will write the content
/// back to `path`.
///
/// Skipped during serialization because will often be large.
#[serde(skip)]
content: String,
/// The root Stencila Schema node of the document
///
/// Can be any type of `Node` but defaults to an empty `Article`.
///
/// A [`RwLock`] to enable separate, concurrent tasks to read (e.g. for dumping to some
/// format) and write (e.g. to apply patches from clients) the node.
///
/// Skipped during serialization because will often be large.
#[serde(skip)]
root: Arc<RwLock<Node>>,
/// Addresses of nodes in `root` that have an `id`
///
/// Used to fetch a particular node (and do something with it like `patch`
/// or `execute` it) rather than walking the node tree looking for it.
/// It is necessary to use [`Address`] here (rather than say raw pointers) because
/// pointers or references will change as the document is patched.
/// These addresses are shifted when the document is patched to account for this.
#[serde(skip)]
addresses: Arc<RwLock<AddressMap>>,
/// The kernel space for this document.
///
/// This is where document variables are stored and executable nodes such as
/// `CodeChunk`s and `Parameters`s are executed.
#[serde(skip)]
kernels: Arc<RwLock<KernelSpace>>,
/// The set of dependency relations between this document, or nodes in this document,
/// and other resources.
///
/// Relations may be external (e.g. the document links to another `Resource::File`),
/// or internal (e.g. the second code chunk uses a `Resource::Symbol` defined in the
/// first code chunk).
///
/// Stored for use in building the project's graph, but that may be removed
/// in the future. Not serialized since this information is in `self.graph`.
#[serde(skip)]
pub relations: Relations,
/// The document's dependency graph
///
/// This is derived from `relations`.
#[serde(skip)]
pub graph: Arc<RwLock<Graph>>,
/// The clients that are subscribed to each topic for this document
///
/// Keeping track of client ids per topics allows for a some
/// optimizations. For example, events will only be published on topics that have at least one
/// subscriber.
///
/// Valid subscription topics are the names of the `DocumentEvent` types:
///
/// - `removed`: published when document file is deleted
/// - `renamed`: published when document file is renamed
/// - `modified`: published when document file is modified
/// - `encoded:<format>` published when a document's content
/// is changed internally or externally and conversions have been
/// completed e.g. `encoded:html`
subscriptions: HashMap<String, HashSet<String>>,
#[serde(skip)]
patch_request_sender: mpsc::UnboundedSender<PatchRequest>,
#[serde(skip)]
patch_response_receiver: watch::Receiver<PatchResponse>,
#[serde(skip)]
compile_request_sender: mpsc::Sender<CompileRequest>,
#[serde(skip)]
compile_response_receiver: watch::Receiver<CompileResponse>,
#[serde(skip)]
execute_request_sender: mpsc::Sender<ExecuteRequest>,
#[serde(skip)]
execute_response_receiver: watch::Receiver<ExecuteResponse>,
#[serde(skip)]
cancel_request_sender: mpsc::Sender<CancelRequest>,
#[serde(skip)]
cancel_response_receiver: watch::Receiver<CancelResponse>,
}
#[allow(unused)]
impl Document {
/// Generate the JSON Schema for the `format` property to avoid duplicated
/// inline type.
fn schema_format(_generator: &mut schemars::gen::SchemaGenerator) -> Schema {
schemas::typescript("Format", true)
}
/// Generate the JSON Schema for the `addresses` property to avoid duplicated types.
fn schema_addresses(_generator: &mut schemars::gen::SchemaGenerator) -> Schema {
schemas::typescript("Record<string, Address>", true)
}
/// Create a new empty document.
///
/// # Arguments
///
/// - `path`: The path of the document; defaults to a temporary path.
/// - `format`: The format of the document; defaults to plain text.
///
/// This function is intended to be used by editors when creating
/// a new document. If the `path` is not specified, the created document
/// will be `temporary: true` and have a temporary file path.
#[tracing::instrument]
fn new(path: Option<PathBuf>, format: Option<String>) -> Document {
let id = uuids::generate("do").to_string();
let format = if let Some(format) = format {
formats::match_path(&format)
} else if let Some(path) = path.as_ref() {
formats::match_path(path)
} else {
formats::match_name("txt")
}
.spec();
let previewable = format.preview;
let (path, name, temporary) = match path {
Some(path) => {
let name = path
.file_name()
.map(|os_str| os_str.to_string_lossy())
.unwrap_or_else(|| "Untitled".into())
.into();
(path, name, false)
}
None => {
let path = env::temp_dir().join(
[
uuids::generate("fi").to_string(),
".".to_string(),
format.extension.clone(),
]
.concat(),
);
// Ensure that the file exists
if !path.exists() {
fs::write(path.clone(), "").expect("Unable to write temporary file");
}
let name = "Untitled".into();
(path, name, true)
}
};
let project = path
.parent()
.expect("Unable to get path parent")
.to_path_buf();
let root = Arc::new(RwLock::new(Node::Article(Article::default())));
let addresses = Arc::new(RwLock::new(AddressMap::default()));
let graph = Arc::new(RwLock::new(Graph::default()));
let kernels = Arc::new(RwLock::new(KernelSpace::new()));
let (patch_request_sender, mut patch_request_receiver) =
mpsc::unbounded_channel::<PatchRequest>();
let (patch_response_sender, mut patch_response_receiver) =
watch::channel::<PatchResponse>(PatchResponse::null());
let (compile_request_sender, mut compile_request_receiver) =
mpsc::channel::<CompileRequest>(100);
let (compile_response_sender, mut compile_response_receiver) =
watch::channel::<CompileResponse>(CompileResponse::null());
let (execute_request_sender, mut execute_request_receiver) =
mpsc::channel::<ExecuteRequest>(100);
let (execute_response_sender, mut execute_response_receiver) =
watch::channel::<ExecuteResponse>(ExecuteResponse::null());
let (cancel_request_sender, mut cancel_request_receiver) =
mpsc::channel::<CancelRequest>(100);
let (cancel_response_sender, mut cancel_response_receiver) =
watch::channel::<CancelResponse>(CancelResponse::null());
let id_clone = id.clone();
let root_clone = root.clone();
let addresses_clone = addresses.clone();
let compile_sender_clone = compile_request_sender.clone();
tokio::spawn(async move {
Self::patch_task(
&id_clone,
&root_clone,
&addresses_clone,
&compile_sender_clone,
&mut patch_request_receiver,
&patch_response_sender,
)
.await
});
let id_clone = id.clone();
let path_clone = path.clone();
let project_clone = project.clone();
let root_clone = root.clone();
let addresses_clone = addresses.clone();
let graph_clone = graph.clone();
let patch_sender_clone = patch_request_sender.clone();
let execute_sender_clone = execute_request_sender.clone();
tokio::spawn(async move {
Self::compile_task(
&id_clone,
&path_clone,
&project_clone,
&root_clone,
&addresses_clone,
&graph_clone,
&patch_sender_clone,
&execute_sender_clone,
&mut compile_request_receiver,
&compile_response_sender,
)
.await
});
let id_clone = id.clone();
let path_clone = path.clone();
let project_clone = project.clone();
let root_clone = root.clone();
let addresses_clone = addresses.clone();
let graph_clone = graph.clone();
let kernels_clone = kernels.clone();
let patch_sender_clone = patch_request_sender.clone();
tokio::spawn(async move {
Self::execute_task(
&id_clone,
&path_clone,
&project_clone,
&root_clone,
&addresses_clone,
&graph_clone,
&kernels_clone,
&patch_sender_clone,
&mut cancel_request_receiver,
&mut execute_request_receiver,
&execute_response_sender,
)
.await
});
Document {
id,
path,
project,
temporary,
name,
format,
previewable,
status: DocumentStatus::Synced,
last_write: Default::default(),
content: Default::default(),
root,
addresses,
graph,
kernels,
relations: Default::default(),
subscriptions: Default::default(),
patch_request_sender,
patch_response_receiver,
compile_request_sender,
compile_response_receiver,
execute_request_sender,
execute_response_receiver,
cancel_request_sender,
cancel_response_receiver,
}
}
/// Create a representation of the document
///
/// Used to represent the document in events and as the return value of functions without
/// to provide properties such as `path` and `status` without cloning things such as
/// its `kernels`.
///
/// TODO: This function needs to be factored out of existence or create a lighter weight
/// repr / summary of a document for serialization.
pub fn repr(&self) -> Self {
Self {
id: self.id.clone(),
path: self.path.clone(),
project: self.project.clone(),
temporary: self.temporary,
status: self.status.clone(),
name: self.name.clone(),
format: self.format.clone(),
previewable: self.previewable,
addresses: self.addresses.clone(),
graph: self.graph.clone(),
subscriptions: self.subscriptions.clone(),
last_write: self.last_write,
content: Default::default(),
kernels: Default::default(),
relations: Default::default(),
root: Arc::new(RwLock::new(Node::Article(Article::default()))),
patch_request_sender: self.patch_request_sender.clone(),
patch_response_receiver: self.patch_response_receiver.clone(),
compile_request_sender: self.compile_request_sender.clone(),
compile_response_receiver: self.compile_response_receiver.clone(),
execute_request_sender: self.execute_request_sender.clone(),
execute_response_receiver: self.execute_response_receiver.clone(),
cancel_request_sender: self.cancel_request_sender.clone(),
cancel_response_receiver: self.cancel_response_receiver.clone(),
}
}
/// Create a new document, optionally with content.
pub async fn create<P: AsRef<Path>>(
path: Option<P>,
content: Option<String>,
format: Option<String>,
) -> Result<Document> {
let path = path.map(|path| PathBuf::from(path.as_ref()));
let mut document = Document::new(path, format);
if let Some(content) = content {
document.load(content, None).await?;
}
Ok(document)
}
/// Open a document from an existing file.
///
/// # Arguments
///
/// - `path`: The path of the file to create the document from
///
/// - `format`: The format of the document. If `None` will be inferred from
/// the path's file extension.
/// TODO: add project: Option<PathBuf> so that project can be explictly set
#[tracing::instrument(skip(path))]
pub async fn open<P: AsRef<Path>>(path: P, format: Option<String>) -> Result<Document> {
let path = PathBuf::from(path.as_ref());
let mut document = Document::new(Some(path.clone()), format);
if let Err(error) = document.read(true).await {
tracing::warn!("While reading document `{}`: {}", path.display(), error)
};
Ok(document)
}
/// Alter properties of the document
///
/// # Arguments
///
/// - `path`: The path of document's file
///
/// - `format`: The format of the document. If `None` will be inferred from
/// the path's file extension.
#[tracing::instrument(skip(self, path))]
pub async fn alter<P: AsRef<Path>>(
&mut self,
path: Option<P>,
format: Option<String>,
) -> Result<()> {
if let Some(path) = &path {
let path = path.as_ref().canonicalize()?;
if path.is_dir() {
bail!("Can not open a folder as a document; maybe try opening it as a project instead.")
}
self.project = path
.parent()
.expect("Unable to get path parent")
.to_path_buf();
self.name = path
.file_name()
.map(|os_str| os_str.to_string_lossy())
.unwrap_or_else(|| "Untitled".into())
.into();
self.path = path;
self.temporary = false;
self.status = DocumentStatus::Unwritten;
}
if let Some(format) = format {
self.format = formats::match_path(&format).spec();
} else if let Some(path) = path {
self.format = formats::match_path(&path).spec();
};
self.previewable = self.format.preview;
// Given that the `format` may have changed, it is necessary
// to update the `root` of the document
self.update(true).await?;
Ok(())
}
/// Read the document from the file system, update it and return its content.
///
/// # Arguments
///
/// - `force_load`: if `false` then if the file is empty, or is the same as the existing
/// content then do not load the content into the document
///
/// Using `force_load: false` is recommended when calling this function in response to
/// file modification events as writes in quick succession can cause the file to be momentarily
/// empty when read.
///
/// Sets `status` to `Synced`. For binary files, does not actually read the content
/// but will update the document nonetheless (possibly delegating the actual read
/// to a binary or plugin)
#[tracing::instrument(skip(self))]
pub async fn read(&mut self, force_load: bool) -> Result<String> {
let content = if !self.format.binary {
let content = fs::read_to_string(&self.path)?;
if force_load || (!content.is_empty() && content != self.content) {
self.load(content.clone(), None).await?;
}
content
} else {
self.update(true).await?;
"".to_string()
};
self.status = DocumentStatus::Synced;
Ok(content)
}
/// Write the document to the file system, optionally load new `content`
/// and set `format` before doing so.
///
/// # Arguments
///
/// - `content`: the content to load into the document
/// - `format`: the format of the content; if not supplied assumed to be
/// the document's existing format.
///
/// Sets `status` to `Synced`.
#[tracing::instrument(skip(self, content))]
pub async fn write(&mut self, content: Option<String>, format: Option<String>) -> Result<()> {
if let Some(content) = content {
self.load(content, format.clone()).await?;
}
let content_to_write = if let Some(input_format) = format.as_ref() {
let input_format = formats::match_path(&input_format).spec();
if input_format != self.format {
self.dump(None).await?
} else {
self.content.clone()
}
} else {
self.content.clone()
};
fs::write(&self.path, content_to_write.as_bytes())?;
self.status = DocumentStatus::Synced;
self.last_write = Some(Instant::now());
Ok(())
}
/// Write the document to the file system, as an another file, possibly in
/// another format.
///
/// # Arguments
///
/// - `path`: the path for the new file.
/// - `format`: the format to dump the content as; if not supplied assumed to be
/// the document's existing format.
/// - `theme`: theme to apply to the new document (HTML and PDF only).
///
/// Note: this does not change the `path`, `format` or `status` of the current
/// document.
#[tracing::instrument(skip(self, path))]
pub async fn write_as<P: AsRef<Path>>(
&self,
path: P,
format: Option<String>,
theme: Option<String>,
) -> Result<()> {
let path = path.as_ref();
let format = format.unwrap_or_else(|| {
path.extension().map_or_else(
|| self.format.extension.clone(),
|ext| ext.to_string_lossy().to_string(),
)
});
let mut options = codecs::EncodeOptions {
standalone: true,
..Default::default()
};
if let Some(theme) = theme {
options.theme = theme
}
let root = &*self.root.read().await;
codecs::to_path(root, path, &format, Some(options)).await?;
Ok(())
}
/// Dump the document's content to a string in its current, or
/// alternative, format.
///
/// # Arguments
///
/// - `format`: the format to dump the content as; if not supplied assumed to be
/// the document's existing format.
#[tracing::instrument(skip(self))]
pub async fn dump(&self, format: Option<String>) -> Result<String> {
let format = match format {
Some(format) => format,
None => return Ok(self.content.clone()),
};
let root = &*self.root.read().await;
codecs::to_string(root, &format, None).await
}
/// Load content into the document
///
/// If the format of the new content is different to the document's format
/// then the content will be converted to the document's format.
///
/// # Arguments
///
/// - `content`: the content to load into the document
/// - `format`: the format of the content; if not supplied assumed to be
/// the document's existing format.
#[tracing::instrument(skip(self, content))]
pub async fn load(&mut self, content: String, format: Option<String>) -> Result<()> {
let mut decode_content = true;
if let Some(format) = format {
let other_format = formats::match_path(&format).spec();
if other_format != self.format {
let node = codecs::from_str(&content, &other_format.extension, None).await?;
if !self.format.binary {
self.content = codecs::to_string(&node, &self.format.extension, None).await?;
}
let mut root = &mut *self.root.write().await;
*root = node;
decode_content = false;
} else {
self.content = content;
}
} else {
self.content = content;
};
self.status = DocumentStatus::Unwritten;
self.update(decode_content).await
}
/// Generate a [`Patch`] describing the operations needed to modify this
/// document so that it is equal to another.
#[tracing::instrument(skip(self, other))]
pub async fn diff(&self, other: &Document) -> Result<Patch> {
let me = &*self.root.read().await;
let other = &*other.root.read().await;
let patch = diff(me, other);
Ok(patch)
}
/// Merge changes from two or more derived version into this document.
///
/// See documentation on the [`merge`] function for how any conflicts
/// are resolved.
#[tracing::instrument(skip(self, deriveds))]
pub async fn merge(&mut self, deriveds: &[Document]) -> Result<()> {
let mut guard = self.root.write().await;
// Need to store `let` bindings to read guards before dereferencing them
let mut guards = Vec::new();
for derived in deriveds {
let guard = derived.root.read().await;
guards.push(guard)
}
let others: Vec<&Node> = guards.iter().map(|guard| guard.deref()).collect();
// Do the merge into root
merge(&mut *guard, &others);
// TODO updating of *content from root* and publishing of events etc needs to be sorted out
if !self.format.binary {
self.content = codecs::to_string(&*guard, &self.format.extension, None).await?;
}
// Drop root guard to allow update
drop(guard);
self.update(false).await?;
Ok(())
}
/// A background task to patch the root node of the document on request
///
/// Use an unbounded channel for sending patches, so that sending threads never
/// block (if there are lots of patches) and thereby hold on to locks causing a
/// deadlock (because `patch_impl` needs them)
///
/// # Arguments
///
/// - `id`: The id of the document (used in the published event topic)
///
/// - `root`: The root [`Node`] to apply the patch to (will be write locked)
///
/// - `addresses`: The [`AddressMap`] to use to locate nodes within the root
/// node (will be read locked)
async fn patch_task(
id: &str,
root: &Arc<RwLock<Node>>,
addresses: &Arc<RwLock<AddressMap>>,
compile_sender: &mpsc::Sender<CompileRequest>,
request_receiver: &mut mpsc::UnboundedReceiver<PatchRequest>,
response_sender: &watch::Sender<PatchResponse>,
) {
while let Some(request) = request_receiver.recv().await {
tracing::trace!("Patching document `{}` for request `{}`", &id, request.id);
let mut patch = request.patch;
let start = patch.target.clone();
// If the patch is empty then continue early rather than take locks etc
if patch.is_empty() {
continue;
}
// Block for minimal longevity locks
{
let root = &mut *root.write().await;
let addresses = &*addresses.read().await;
// If the patch has a `target` but no `address` then use `address_map` to populate the address
// for faster patch application.
if let (None, Some(node_id)) = (&patch.address, &patch.target) {
if let Some(address) = addresses.get(node_id) {
patch.address = Some(address.clone());
}
}
// Apply the patch to the root node
apply(root, &patch);
// Publish the patch
patch.prepublish(root);
}
// Publish the patch
publish(
&["documents:", id, ":patched"].concat(),
&DocumentEvent {
type_: DocumentEventType::Patched,
patch: Some(patch),
// TODO: The following are made `None` to keep the size of the event smaller but really
// should be removed from the event (`Document:new()` is particularly wasteful of compute)
document: Document::new(None, None),
content: None,
format: None,
},
);
// Send response
if let Err(..) = response_sender.send(PatchResponse::new(request.id.clone())) {
tracing::error!(
"While sending patch response for document `{}`: channel closed",
id
);
}
// Possibly compile
if request.compile {
tracing::trace!(
"Sending compile request for document `{}` for request `{}`",
&id,
request.id
);
if let Err(error) = compile_sender
.send(CompileRequest {
id: request.id,
execute: request.execute,
start,
})
.await
{
tracing::error!(
"While sending compile request for document `{}`: {}",
id,
error
);
}
}
}
}
/// Apply a [`Patch`] to the root node of the document
///
/// # Arguments
///
/// - `patch`: The patch to apply
///
/// - `compile`: Should the document be compiled after the patch is applied?
///
/// - `execute`: Should the document be executed after the patch is applied and it is compiled?
/// If the patch as a `target` then the document will be executed from that
/// node, otherwise the entire document will be executed.
///
/// This function will trigger a recompile of the document
#[tracing::instrument(skip(self, patch))]
pub async fn patch(&self, patch: Patch, compile: bool, execute: bool) -> Result<RequestId> {
tracing::debug!("Patching document `{}`", self.id);
let request = PatchRequest::new(patch, compile, execute);
let request_id = request.id.clone();
self.patch_request_sender.send(request).or_else(|_| {
bail!(
"When sending patch request for document `{}`: the receiver has dropped",
self.id
)
});
Ok(request_id)
}
/// A background task to compile the root node of the document on request
///
/// # Arguments
///
/// - `id`: The id of the document
///
/// - `path`: The path of the document to be compiled
///
/// - `project`: The project of the document to be compiled
///
/// - `root`: The root [`Node`] to apply the compilation patch to
///
/// - `addresses`: The [`AddressMap`] to be updated
///
/// - `graph`: The [`Graph`] to be updated
///
/// - `patch_sender`: A [`Patch`] channel sender to send patches describing the changes to
/// compiled nodes
///
/// - `execute_sender`: An [`ExecuteMessage`] sender
///
/// - `compile_receiver`: An [`CompileMessage`] receiver
#[allow(clippy::too_many_arguments)]
pub async fn compile_task(
id: &str,
path: &Path,
project: &Path,
root: &Arc<RwLock<Node>>,
addresses: &Arc<RwLock<AddressMap>>,
graph: &Arc<RwLock<Graph>>,
patch_sender: &mpsc::UnboundedSender<PatchRequest>,
execute_sender: &mpsc::Sender<ExecuteRequest>,
request_receiver: &mut mpsc::Receiver<CompileRequest>,
response_sender: &watch::Sender<CompileResponse>,
) {
let duration = Duration::from_millis(300);
let mut last_request = None;
loop {
match tokio::time::timeout(duration, request_receiver.recv()).await {
// Compile request received, so record it and continue to wait for timeout
Ok(Some(request)) => {
last_request = Some(request);
continue;
}
// Sender dropped, end of task
Ok(None) => break,
// Timeout so do the following with the last unhandled request, if any
Err(..) => {}
};
if let Some(request) = last_request {
tracing::trace!("Compiling document `{}` for request `{}`", &id, request.id);
// Compile the root node
match compile(path, project, root, patch_sender).await {
Ok((new_addresses, new_graph)) => {
*addresses.write().await = new_addresses;
*graph.write().await = new_graph;
}
Err(error) => tracing::error!("While compiling document `{}`: {}", id, error),
}
// Send response
if let Err(..) = response_sender.send(CompileResponse::new(request.id.clone())) {
tracing::error!(
"While sending patch response for document `{}`: channel closed",
id
);
}
// Possibly execute
if request.execute {
tracing::trace!(
"Sending execute request for document `{}` for request `{}`",