upload files or folds recrusively

  1. 1. 利用Async-await读取并上传本地文件夹

利用Async-await读取并上传本地文件夹

利用Async-await写了读取本地文件并上传至服务器的程序。

Async-await写起来有种一气呵成的感觉,然而实际业务中要考虑到更为复杂的状态管理却用不了,故记录在这里。

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
const fs = require('fs')
const path = require('path')
const { createFoldAsync, uploadFileWithStreamAsync } = require('./server')
const crypto = require('crypto')
Promise.promisifyAll(fs) // babel would transform Promise to bluebird
/* return a new file name */
const getName = async (currPath, dirUUID, driveUUID) => {
return currPath.replace(/^.*\//, '') // TODO
}
/* splice file by given size */
const spliceFile = (size, perSize) => {
const parts = []
let position = 0
while (position < size) {
if (position + perSize >= size) {
parts.push({ start: position, end: size - 1 })
break
} else {
parts.push({ start: position, end: position + perSize - 1 })
position += perSize
}
}
return parts
}
/* calculate file's hash by part */
const hashFile = (filePath, part) => {
const hash = crypto.createHash('sha256')
hash.setEncoding('hex')
const fileStream = fs.createReadStream(filePath, { start: part.start, end: part.end })
const promise = new Promise((resolve, reject) => {
fileStream.on('end', () => {
hash.end()
resolve(hash.read())
})
fileStream.on('error', reject)
})
fileStream.pipe(hash)
return promise
}
/* calculate file's fingerprint */
const calcFingerprint = (hashs) => {
const hashBuffer = hashs.map(hash => typeof hash === 'string' ? Buffer.from(hash, 'hex') : hash)
return hashBuffer.reduce((accumulator, currentValue, currentIndex, array) => {
if (!currentIndex) {
accumulator.push(currentValue.toString('hex'))
} else {
const hash = crypto.createHash('sha256')
hash.update(Buffer.from(accumulator[currentIndex - 1], 'hex'))
hash.update(currentValue)
const digest = hash.digest('hex')
accumulator.push(digest)
}
return accumulator
}, [])
}
/* spliceFile -> hashFile -> calcFingerprint -> upload */
const uploadFileAsync = async (filePath, dirUUID, driveUUID, stat) => {
// console.log('uploadFileAsync', filePath, dirUUID, driveUUID, stat.size)
const parts = spliceFile(stat.size, 1024 * 1024 * 1024)
const promises = parts.map(part => hashFile(filePath, part))
const hashs = await Promise.all(promises)
const fp = calcFingerprint(hashs)
const newParts = parts.map((part, index) => Object.assign({}, part, { sha: hashs[index], fingerprint: fp[index] }))
const name = await getName(filePath, dirUUID, driveUUID)
const readStreams = newParts.map(part => fs.createReadStream(filePath, { start: part.start, end: part.end, autoClose: true }))
console.log('start upload file: ', name, 'size', stat.size)
for (let i = 0; i < newParts.length; i++) {
await uploadFileWithStreamAsync(driveUUID, dirUUID, name, newParts[i], readStreams[i])
}
}
/* create fold and return the uuid */
const creatFoldAsync = async (foldPath, dirUUID, driveUUID) => {
const dirname = await getName(foldPath, dirUUID, driveUUID)
const entries = await createFoldAsync(driveUUID, dirUUID, dirname)
if (!entries) return null
const uuid = entries.find(entry => entry.name === dirname).uuid
console.log('creatFoldAsync success uuid: ', uuid)
return uuid
}
/* readUploadInfo would visit list of directories or files recursively */
const readUploadInfo = async (entries, dirUUID, driveUUID) => {
for (let i = 0; i < entries.length; i++) {
const entry = entries[i]
const stat = await fs.lstatAsync(path.resolve(entry))
if (stat.isDirectory()) {
const children = await fs.readdirAsync(path.resolve(entry))
const uuid = await creatFoldAsync(entry, dirUUID, driveUUID)
const newEntries = []
children.forEach(c => newEntries.push(path.join(entry, c)))
await readUploadInfo(newEntries, uuid, driveUUID)
} else {
await uploadFileAsync(entry, dirUUID, driveUUID, stat)
}
}
}
export { readUploadInfo }