Skip to content

Commit

Permalink
fix: file append transaction id after deserialization (#2583)
Browse files Browse the repository at this point in the history
* feat: add chunk interval property in FileAppendTransaction

Signed-off-by: Svetoslav Borislavov <[email protected]>

* feat: add unit test for setChunkInterval with freeze

Signed-off-by: Svetoslav Borislavov <[email protected]>

* fix: lost transaction id after deserializing incomplete transaction

Signed-off-by: Svetoslav Borislavov <[email protected]>

* feat: add integration test for validation of transaction id after deserialization

Signed-off-by: Svetoslav Borislavov <[email protected]>

* feat: add check in file append integration test

Signed-off-by: Svetoslav Borislavov <[email protected]>

* refactor: remove code duplication

Signed-off-by: Svetoslav Borislavov <[email protected]>

* fix: add mising properties from file append

Signed-off-by: Svetoslav Borislavov <[email protected]>

* refactor: FileAppend unit test

Signed-off-by: Svetoslav Borislavov <[email protected]>

* feat: add chunk interval recover on deserialize if possible

Signed-off-by: Svetoslav Borislavov <[email protected]>

---------

Signed-off-by: Svetoslav Borislavov <[email protected]>
  • Loading branch information
SvetBorislavov authored Oct 16, 2024
1 parent 34ffa6b commit e1e119e
Show file tree
Hide file tree
Showing 3 changed files with 203 additions and 10 deletions.
56 changes: 53 additions & 3 deletions src/file/FileAppendTransaction.js
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,7 @@ export default class FileAppendTransaction extends Transaction {
* @param {Uint8Array | string} [props.contents]
* @param {number} [props.maxChunks]
* @param {number} [props.chunkSize]
* @param {number} [props.chunkInterval]
*/
constructor(props = {}) {
super();
Expand Down Expand Up @@ -88,6 +89,12 @@ export default class FileAppendTransaction extends Transaction {
*/
this._chunkSize = 4096;

/**
* @private
* @type {number}
*/
this._chunkInterval = 10;

this._defaultMaxTransactionFee = new Hbar(5);

if (props.fileId != null) {
Expand All @@ -106,6 +113,10 @@ export default class FileAppendTransaction extends Transaction {
this.setChunkSize(props.chunkSize);
}

if (props.chunkInterval != null) {
this.setChunkInterval(props.chunkInterval);
}

/** @type {List<TransactionId>} */
this._transactionIds = new List();
}
Expand Down Expand Up @@ -168,6 +179,19 @@ export default class FileAppendTransaction extends Transaction {
contents = concat;
}

const chunkSize = append.contents?.length || undefined;
const maxChunks = bodies.length || undefined;
let chunkInterval;
if (transactionIds.length > 1) {
const firstValidStart = transactionIds[0].validStart;
const secondValidStart = transactionIds[1].validStart;
if (firstValidStart && secondValidStart) {
chunkInterval = secondValidStart.nanos
.sub(firstValidStart.nanos)
.toNumber();
}
}

return Transaction._fromProtobufTransactions(
new FileAppendTransaction({
fileId:
Expand All @@ -178,7 +202,10 @@ export default class FileAppendTransaction extends Transaction {
),
)
: undefined,
contents: contents,
contents,
chunkSize,
maxChunks,
chunkInterval,
}),
transactions,
signedTransactions,
Expand Down Expand Up @@ -300,6 +327,22 @@ export default class FileAppendTransaction extends Transaction {
return this;
}

/**
* @returns {number}
*/
get chunkInterval() {
return this._chunkInterval;
}

/**
* @param {number} chunkInterval The valid start interval between chunks in nanoseconds
* @returns {this}
*/
setChunkInterval(chunkInterval) {
this._chunkInterval = chunkInterval;
return this;
}

/**
* Freeze this transaction from further modification to prepare for
* signing or serialization.
Expand Down Expand Up @@ -344,7 +387,7 @@ export default class FileAppendTransaction extends Transaction {
).seconds,
/** @type {Timestamp} */ (
nextTransactionId.validStart
).nanos.add(1),
).nanos.add(this._chunkInterval),
),
);
}
Expand Down Expand Up @@ -465,6 +508,10 @@ export default class FileAppendTransaction extends Transaction {
*/
_buildIncompleteTransactions() {
const dummyAccountId = AccountId.fromString("0.0.0");
const accountId = this.transactionId?.accountId || dummyAccountId;
const validStart =
this.transactionId?.validStart || Timestamp.fromDate(new Date());

if (this._contents == null) {
throw new Error("contents is not set");
}
Expand All @@ -483,7 +530,10 @@ export default class FileAppendTransaction extends Transaction {
this._signedTransactions.clear();

for (let chunk = 0; chunk < this.getRequiredChunks(); chunk++) {
let nextTransactionId = TransactionId.generate(dummyAccountId);
let nextTransactionId = TransactionId.withValidStart(
accountId,
validStart.plusNanos(this._chunkInterval * chunk),
);
this._transactionIds.push(nextTransactionId);
this._transactionIds.advance();

Expand Down
93 changes: 93 additions & 0 deletions test/integration/FileAppendIntegrationTest.js
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,8 @@ import {
FileInfoQuery,
Hbar,
Status,
Timestamp,
TransactionId,
} from "../../src/exports.js";
import { bigContents } from "./contents.js";
import IntegrationTestEnv from "./client/NodeIntegrationTestEnv.js";
Expand All @@ -22,6 +24,7 @@ describe("FileAppend", function () {
newContents = generateUInt8Array(newContentsLength);
operatorKey = env.operatorKey.publicKey;
});

it("should be executable", async function () {
let response = await new FileCreateTransaction()
.setKeys([operatorKey])
Expand Down Expand Up @@ -351,6 +354,96 @@ describe("FileAppend", function () {
expect(receipt.status).to.be.equal(Status.Success);
});

it("should keep transaction id after non-frozen deserialization", async function () {
const operatorKey = env.operatorKey.publicKey;

let response = await new FileCreateTransaction()
.setKeys([operatorKey])
.setContents(Buffer.from(""))
.execute(env.client);

let { fileId } = await response.getReceipt(env.client);

const chunkInterval = 230;
const validStart = Timestamp.fromDate(new Date());

const tx = new FileAppendTransaction()
.setTransactionId(
TransactionId.withValidStart(env.operatorId, validStart),
)
.setFileId(fileId)
.setChunkInterval(chunkInterval)
.setChunkSize(1000)
.setContents(newContents);

const txBytes = tx.toBytes();
const txFromBytes = FileAppendTransaction.fromBytes(txBytes);

expect(
txFromBytes.transactionId.accountId._toProtobuf(),
).to.be.deep.equal(env.operatorId?._toProtobuf());
expect(txFromBytes.transactionId.validStart).to.be.deep.equal(
validStart,
);

txFromBytes._transactionIds.list.forEach(
(transactionId, index, array) => {
if (index > 0) {
const previousTimestamp = array[index - 1].validStart;
const currentTimestamp = transactionId.validStart;
const difference =
currentTimestamp.nanos - previousTimestamp.nanos;
expect(difference).to.be.equal(chunkInterval);
}
},
);

txFromBytes.freezeWith(env.client);
await txFromBytes.sign(env.operatorKey);

const receipt = await (
await txFromBytes.execute(env.client)
).getReceipt(env.client);
expect(receipt.status).to.be.equal(Status.Success);
});

it("should keep chunk size, chunk interval and correct max chunks after deserialization", async function () {
const operatorKey = env.operatorKey.publicKey;
const chunkSize = 1024;
const chunkInterval = 230;

let response = await new FileCreateTransaction()
.setKeys([operatorKey])
.setContents(Buffer.from(""))
.execute(env.client);

let { fileId } = await response.getReceipt(env.client);

const tx = new FileAppendTransaction()
.setFileId(fileId)
.setChunkSize(chunkSize)
.setChunkInterval(chunkInterval)
.setMaxChunks(99999)
.setContents(newContents);

const txBytes = tx.toBytes();
const txFromBytes = FileAppendTransaction.fromBytes(txBytes);

expect(txFromBytes.chunkSize).to.be.equal(1024);
expect(txFromBytes.maxChunks).to.be.equal(
txFromBytes.getRequiredChunks(),
);
expect(txFromBytes.chunkInterval).to.be.equal(230);

txFromBytes.freezeWith(env.client);
await txFromBytes.sign(env.operatorKey);

const receipt = await (
await txFromBytes.execute(env.client)
).getReceipt(env.client);
expect(receipt.status).to.be.equal(Status.Success);
});

after(async function () {
await env.close();
});
Expand Down
64 changes: 57 additions & 7 deletions test/unit/FileAppendTransaction.js
Original file line number Diff line number Diff line change
Expand Up @@ -11,21 +11,24 @@ import {
import Long from "long";

describe("FileAppendTransaction", function () {
const spenderAccountId1 = new AccountId(7);
const fileId = new FileId(8);
const nodeAccountId = new AccountId(10, 11, 12);
const timestamp1 = new Timestamp(14, 15);
const fee = new Hbar(5);
const chunkSize = 1000;

it("setChunkSize()", function () {
const spenderAccountId1 = new AccountId(7);
const fileId = new FileId(8);
const nodeAccountId = new AccountId(10, 11, 12);
const timestamp1 = new Timestamp(14, 15);
const fee = new Hbar(5);
const contents = "1".repeat(1000) + "2".repeat(1000) + "3".repeat(1000);

let transaction = new FileAppendTransaction()
.setTransactionId(
TransactionId.withValidStart(spenderAccountId1, timestamp1),
)
.setNodeAccountIds([nodeAccountId])
.setFileId(fileId)
.setChunkSize(1000)
.setContents("1".repeat(1000) + "2".repeat(1000) + "3".repeat(1000))
.setChunkSize(chunkSize)
.setContents(contents)
.freeze();

const transactionId = transaction.transactionId;
Expand Down Expand Up @@ -88,4 +91,51 @@ describe("FileAppendTransaction", function () {
expect(body.fileAppend.contents.length).to.be.equal(1000);
expect(body.fileAppend.contents[0]).to.be.equal(51);
});

it("setChunkInterval()", function () {
const contents = "1".repeat(1000) + "2".repeat(1000) + "3".repeat(1000);
const chunkInterval = 200;

let transaction = new FileAppendTransaction()
.setTransactionId(
TransactionId.withValidStart(spenderAccountId1, timestamp1),
)
.setNodeAccountIds([nodeAccountId])
.setFileId(fileId)
.setChunkSize(chunkSize)
.setContents(contents)
.setChunkInterval(chunkInterval)
.freeze();

expect(transaction._transactionIds.list.length).to.be.equal(3);
const requiredChunks = contents.length / chunkSize;

let body = transaction._makeTransactionBody(nodeAccountId);

expect(body.transactionID).to.deep.equal(
transaction._transactionIds.list[0]._toProtobuf(),
);

for (let i = 1; i < requiredChunks; i++) {
transaction._transactionIds.advance();
body = transaction._makeTransactionBody(nodeAccountId);
expect(body.transactionID).to.deep.equal(
transaction._transactionIds.list[i]._toProtobuf(),
);

expect(
transaction._transactionIds.list[i].validStart.nanos.sub(
transaction._transactionIds.list[i - 1].validStart.nanos,
),
).to.deep.equal(Long.fromNumber(chunkInterval));
}

expect(
transaction._transactionIds.list[
requiredChunks - 1
].validStart.nanos.sub(
transaction._transactionIds.list[0].validStart.nanos,
),
).to.deep.equal(Long.fromNumber(chunkInterval * (requiredChunks - 1)));
});
});

0 comments on commit e1e119e

Please sign in to comment.