/
stories-writer.ts
85 lines (82 loc) · 3.03 KB
/
stories-writer.ts
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
import { Readable, Transform, Writable } from 'stream';
import { asyncToStream, GenerateToFileOptions, writeToFiles } from './async-writer';
import { ExternalId, MetadataStreamOptions, Story } from './editor-types';
/**
* Takes a generator of stories, and writes the stories into .txt.gz files. Each file will have 1000 stories.
*
* You can provide a streams for various metadata to be written to.
* See {@link createAuthorStream} and {@link createSectionStream} for examples
*
* ### Example
* ```ts
* import { batchStream, Story, writeStories } from '@quintype/migration-helpers';
*
* async function* readStoriesFromDatabase(): AsyncIterableIterator<Story> {
* const txn = createDbTxn();
* const results = txn.runQuery("select * from stories");
* while(results.hasNext()) {
* yield rowToStory(results.next());
* }
* txn.close();
* }
*
* writeStories(readStoriesFromDatabase(), 'interviews')
* ```
*
* You may also use a stream to produce stories. Use {@link batchStream} to efficiently load stories.
*
* ```ts
* import { Story, writeStories } from '@quintype/migration-helpers';
* import { Transform } from 'stream'
*
* const stream = conn.query("select * from stories").stream();
*
* async function convertRowsToStories(rows: ReadonlyArray<any>): Promise<ReadonlyArray<Story>> {
* const relatedData = await loadSomeData(rows.map(row => row.storyId));
* return row.map(row => rowToStory(row, relatedData));
* }
*
* writeStories(stream.pipe(batchStream(100, convertRowsToStories)));
* ```
*
* @param stream An Async Generator or Readable which yields stories
* @param source A string describing where the stories come from. ex: interviews
* @param opts Control some fine grained tuning
*/
export function writeStories(
generator: AsyncIterableIterator<Story> | Readable,
source: string = 'export',
opts: GenerateToFileOptions & MetadataStreamOptions = {}
): Promise<void> {
const filePrefix = opts.filePrefix ? `story-${source}-${opts.filePrefix}` : `story-${source}`;
const stream = asyncToStream(generator)
.pipe(teeStoryToMetadataStream(story => story.authors, opts.authorStream))
.pipe(teeStoryToMetadataStream(story => story.sections, opts.sectionStream))
.pipe(
teeStoryToMetadataStream(
story =>
(story.metadata && story.metadata['story-attributes']
? Object.keys(story.metadata['story-attributes'])
: []
).map(i => ({ 'external-id': i, name: i })),
opts.storyAttributeStream
)
);
return writeToFiles(stream, { filePrefix, ...opts });
}
/** @private */
function teeStoryToMetadataStream(f: (story: Story) => ReadonlyArray<ExternalId>, stream?: Writable): Transform {
return new Transform({
objectMode: true,
transform(story: Story, _, callback): void {
// tslint:disable:no-if-statement no-expression-statement
if (stream) {
for (const entity of f(story)) {
stream.write(entity);
}
}
callback(null, story);
// tslint:enable:no-if-statement no-expression-statement
}
});
}