-
Notifications
You must be signed in to change notification settings - Fork 24
/
Copy paths3-files.js
80 lines (69 loc) · 1.87 KB
/
s3-files.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
const Stream = require('stream')
const { S3Client, GetObjectCommand } = require('@aws-sdk/client-s3')
const streamify = require('stream-array')
const concat = require('concat-stream')
const path = require('path')
const s3Files = {}
module.exports = s3Files
s3Files.connect = function (opts) {
const self = this
if ('s3' in opts) {
self.s3 = opts.s3
} else {
self.s3 = new S3Client({
region: opts.region
})
}
self.bucket = opts.bucket
return self
}
s3Files.createKeyStream = function (folder, keys) {
if (!keys) return null
const paths = []
keys.forEach(function (key) {
if (folder) {
paths.push(path.posix.join(folder, key))
} else {
paths.push(key)
}
})
return streamify(paths)
}
s3Files.createFileStream = function (keyStream, preserveFolderPath) {
const self = this
if (!self.bucket) return null
const rs = new Stream()
rs.readable = true
let fileCounter = 0
keyStream.on('data', async function (file) {
fileCounter += 1
if (fileCounter > 5) {
keyStream.pause() // we add some 'throttling' there
}
// console.log('->file', file);
const params = { Bucket: self.bucket, Key: file }
const { Body: s3File } = await self.s3.send(new GetObjectCommand(params))
s3File.pipe(
concat(function buffersEmit (buffer) {
// console.log('buffers concatenated, emit data for ', file);
const path = preserveFolderPath ? file : file.replace(/^.*[\\/]/, '')
rs.emit('data', { data: buffer, path })
})
)
s3File.on('end', function () {
fileCounter -= 1
if (keyStream.isPaused()) {
keyStream.resume()
}
if (fileCounter < 1) {
// console.log('all files processed, emit end');
rs.emit('end')
}
})
s3File.on('error', function (err) {
err.file = file
rs.emit('error', err)
})
})
return rs
}