Skip to content

Commit eaaa87c

Browse files
Merge pull request #15 from DIG-Network/release/v0.0.1-alpha.15
Release/v0.0.1 alpha.15
2 parents 119cede + 36f1f13 commit eaaa87c

File tree

5 files changed

+90
-38
lines changed

5 files changed

+90
-38
lines changed

CHANGELOG.md

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,13 @@
22

33
All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
44

5+
### [0.0.1-alpha.15](https://github.com/DIG-Network/dig-chia-sdk/compare/v0.0.1-alpha.14...v0.0.1-alpha.15) (2024-09-10)
6+
7+
8+
### Bug Fixes
9+
10+
* write stream in tree ([b10d6a2](https://github.com/DIG-Network/dig-chia-sdk/commit/b10d6a2489fc66ee8c8c51546b0521f39aee3c24))
11+
512
### [0.0.1-alpha.14](https://github.com/DIG-Network/dig-chia-sdk/compare/v0.0.1-alpha.10...v0.0.1-alpha.14) (2024-09-10)
613

714

package-lock.json

Lines changed: 2 additions & 2 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

package.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
{
22
"name": "@dignetwork/dig-sdk",
3-
"version": "0.0.1-alpha.14",
3+
"version": "0.0.1-alpha.15",
44
"description": "",
55
"type": "commonjs",
66
"main": "./dist/index.js",

src/DataIntegrityTree/DataIntegrityTree.ts

Lines changed: 61 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -188,46 +188,52 @@ class DataIntegrityTree {
188188
if (!isHexString(key)) {
189189
throw new Error(`key must be a valid hex string: ${key}`);
190190
}
191+
191192
const uncompressedHash = crypto.createHash("sha256");
192193
const gzip = zlib.createGzip();
193-
194+
194195
let sha256: string;
195196
const tempDir = path.join(this.storeDir, "tmp");
196197
if (!fs.existsSync(tempDir)) {
197198
fs.mkdirSync(tempDir, { recursive: true });
198199
}
199200
const tempFilePath = path.join(tempDir, `${crypto.randomUUID()}.gz`);
200-
201+
201202
return new Promise((resolve, reject) => {
202203
const tempWriteStream = fs.createWriteStream(tempFilePath);
203-
204+
205+
// Update the hash with the original data
204206
readStream.on("data", (chunk) => {
205207
uncompressedHash.update(chunk);
206208
});
207-
209+
210+
// Pipe the read stream through gzip into the temporary write stream
208211
readStream.pipe(gzip).pipe(tempWriteStream);
209-
212+
210213
tempWriteStream.on("finish", async () => {
211-
sha256 = uncompressedHash.digest("hex");
212-
213-
const finalWriteStream = this._createWriteStream(sha256);
214-
const finalPath = finalWriteStream.path as string;
215-
216-
// Ensure the directory exists before copying the file
217-
const finalDir = path.dirname(finalPath);
218-
if (!fs.existsSync(finalDir)) {
219-
fs.mkdirSync(finalDir, { recursive: true });
220-
}
221-
214+
let finalWriteStream: fs.WriteStream | undefined;
222215
try {
216+
sha256 = uncompressedHash.digest("hex");
217+
218+
finalWriteStream = this._createWriteStream(sha256);
219+
const finalPath = finalWriteStream.path as string;
220+
221+
// Ensure the directory exists
222+
const finalDir = path.dirname(finalPath);
223+
if (!fs.existsSync(finalDir)) {
224+
fs.mkdirSync(finalDir, { recursive: true });
225+
}
226+
227+
// Copy the temporary gzipped file to the final destination
223228
await this._streamFile(tempFilePath, finalPath);
224-
await unlink(tempFilePath);
225-
229+
await unlink(tempFilePath); // Clean up the temporary file
230+
226231
const combinedHash = crypto
227232
.createHash("sha256")
228233
.update(`${key}/${sha256}`)
229234
.digest("hex");
230-
235+
236+
// Check if the key already exists with the same hash
231237
if (
232238
Array.from(this.files.values()).some(
233239
(file) => file.hash === combinedHash
@@ -236,28 +242,58 @@ class DataIntegrityTree {
236242
console.log(`No changes detected for key: ${key}`);
237243
return resolve();
238244
}
239-
245+
246+
// Delete existing key if present
240247
if (this.files.has(key)) {
241248
this.deleteKey(key);
242249
}
243-
250+
251+
// Insert the new key with the hash
244252
console.log(`Inserted key: ${key}`);
245253
this.files.set(key, {
246254
hash: combinedHash,
247255
sha256: sha256,
248256
});
257+
249258
this._rebuildTree();
250259
resolve();
251260
} catch (err) {
261+
// On error, cleanup the temporary file and reject
262+
await unlink(tempFilePath).catch(() => {});
252263
reject(err);
264+
} finally {
265+
// Always close the final write stream if it exists
266+
if (finalWriteStream) {
267+
finalWriteStream.end();
268+
}
253269
}
254270
});
255-
256-
tempWriteStream.on("error", (err) => {
271+
272+
tempWriteStream.on("error", async (err) => {
273+
// Close streams and clean up in case of error
274+
tempWriteStream.destroy();
275+
gzip.destroy();
276+
readStream.destroy();
277+
278+
await unlink(tempFilePath).catch(() => {}); // Clean up the temp file
257279
reject(err);
258280
});
259-
260-
readStream.on("error", (err) => {
281+
282+
readStream.on("error", async (err) => {
283+
// Close streams and clean up in case of error
284+
tempWriteStream.destroy();
285+
gzip.destroy();
286+
readStream.destroy();
287+
288+
await unlink(tempFilePath).catch(() => {}); // Clean up the temp file
289+
reject(err);
290+
});
291+
292+
gzip.on("error", (err) => {
293+
// Handle errors in the gzip stream
294+
tempWriteStream.destroy();
295+
gzip.destroy();
296+
readStream.destroy();
261297
reject(err);
262298
});
263299
});

src/utils/directoryUtils.ts

Lines changed: 19 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,10 @@ import ignore from "ignore";
44
import { DataIntegrityTree } from "../DataIntegrityTree";
55

66
// Custom concurrency handler
7-
const limitConcurrency = async (concurrencyLimit: number, tasks: (() => Promise<void>)[]) => {
7+
const limitConcurrency = async (
8+
concurrencyLimit: number,
9+
tasks: (() => Promise<void>)[]
10+
) => {
811
const results = [];
912
const executing: Promise<void>[] = [];
1013

@@ -61,16 +64,22 @@ export const addDirectory = async (
6164
tasks.push(() => addDirectory(datalayer, filePath, baseDir));
6265
} else {
6366
// Add a task for each file to be processed
64-
tasks.push(() =>
65-
new Promise<void>((resolve, reject) => {
66-
const stream = fs.createReadStream(filePath);
67-
datalayer
68-
.upsertKey(stream, Buffer.from(relativePath).toString("hex"))
69-
.then(resolve)
70-
.catch(reject);
71-
})
67+
tasks.push(
68+
() =>
69+
new Promise<void>((resolve, reject) => {
70+
const stream = fs.createReadStream(filePath);
71+
datalayer
72+
.upsertKey(stream, Buffer.from(relativePath).toString("hex"))
73+
.then(async () => {
74+
await new Promise<void>((resolve) => setTimeout(resolve, 100));
75+
resolve();
76+
})
77+
.catch(reject);
78+
})
7279
);
7380
}
81+
82+
await new Promise<void>((resolve) => setTimeout(resolve, 100));
7483
}
7584

7685
// Run tasks with limited concurrency (set the concurrency limit as needed)
@@ -99,4 +108,4 @@ export const calculateFolderSize = (folderPath: string): bigint => {
99108
}
100109

101110
return totalSize;
102-
};
111+
};

0 commit comments

Comments
 (0)