|
1 | 1 | import path from "path";
|
2 | 2 | import fs from "fs";
|
3 | 3 | import {
|
4 |
| - addDirectory, |
| 4 | + listFilesRecursively, |
| 5 | + asyncPool, |
5 | 6 | calculateFolderSize,
|
6 | 7 | waitForPromise,
|
7 | 8 | DataStore,
|
@@ -29,34 +30,51 @@ export const commit = async (): Promise<void> => {
|
29 | 30 |
|
30 | 31 | const digConfig = await loadDigConfig(process.cwd());
|
31 | 32 |
|
32 |
| - await addDirectory( |
33 |
| - dataStore.Tree, |
34 |
| - path.join(process.cwd(), digConfig.deploy_dir) |
| 33 | + // Group into 1000 files per batch to make management and upload easier |
| 34 | + const fileGroups = listFilesRecursively( |
| 35 | + path.join(process.cwd(), digConfig.deploy_dir), |
| 36 | + 1000 |
35 | 37 | );
|
36 | 38 |
|
37 |
| - const newRootHash = dataStore.Tree.commit(); |
| 39 | + const concurrencyLimit = 10; |
38 | 40 |
|
39 |
| - if (!newRootHash) { |
40 |
| - return; |
41 |
| - } |
| 41 | + for (const files of fileGroups) { |
| 42 | + await asyncPool(concurrencyLimit, files as string[], async (file) => { |
| 43 | + const filePath = path.join(process.cwd(), digConfig.deploy_dir, file); |
| 44 | + const relativePath = file.replace(/\\/g, "/"); |
| 45 | + const stream = fs.createReadStream(filePath); |
| 46 | + await dataStore.Tree.upsertKey( |
| 47 | + stream, |
| 48 | + Buffer.from(relativePath).toString("hex") |
| 49 | + ); |
| 50 | + }); |
42 | 51 |
|
43 |
| - const totalBytes = calculateFolderSize( |
44 |
| - path.resolve(STORE_PATH, dataStore.StoreId) |
45 |
| - ); |
| 52 | + const newRootHash = dataStore.Tree.commit(); |
46 | 53 |
|
47 |
| - console.log( |
48 |
| - `Updating store metadata with new root hash: ${newRootHash}, bytes: ${totalBytes}` |
49 |
| - ); |
| 54 | + if (!newRootHash) { |
| 55 | + return; |
| 56 | + } |
50 | 57 |
|
51 |
| - const updatedStoreInfo = await dataStore.updateMetadata({ |
52 |
| - ...latestStore.metadata, |
53 |
| - rootHash: Buffer.from(newRootHash, "hex"), |
54 |
| - bytes: totalBytes, |
55 |
| - }); |
| 58 | + const totalBytes = calculateFolderSize( |
| 59 | + path.resolve(STORE_PATH, dataStore.StoreId) |
| 60 | + ); |
56 | 61 |
|
57 |
| - await FullNodePeer.waitForConfirmation( |
58 |
| - updatedStoreInfo.coin.parentCoinInfo |
59 |
| - ); |
| 62 | + console.log( |
| 63 | + `Updating store metadata with new root hash: ${newRootHash}, bytes: ${totalBytes}` |
| 64 | + ); |
| 65 | + |
| 66 | + const updatedStoreInfo = await dataStore.updateMetadata({ |
| 67 | + ...latestStore.metadata, |
| 68 | + rootHash: Buffer.from(newRootHash, "hex"), |
| 69 | + bytes: totalBytes, |
| 70 | + }); |
| 71 | + |
| 72 | + await FullNodePeer.waitForConfirmation( |
| 73 | + updatedStoreInfo.coin.parentCoinInfo |
| 74 | + ); |
| 75 | + |
| 76 | + await dataStore.fetchCoinInfo(); |
| 77 | + } |
60 | 78 |
|
61 | 79 | await waitForPromise(
|
62 | 80 | () => dataStore.fetchCoinInfo(),
|
|
0 commit comments