Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
23 changes: 22 additions & 1 deletion contracts/HistoricalSPVGateway.sol
Original file line number Diff line number Diff line change
Expand Up @@ -14,14 +14,14 @@ import {SPVGateway} from "./SPVGateway.sol";

contract HistoricalSPVGateway is IHistoricalSPVGateway, SPVGateway {
using BlockHeader for bytes;
using TargetsHelper for bytes32;
using EndianConverter for bytes32;
using BlockHistory for bytes32[];

bytes32 public constant HISTORICAL_SPV_GATEWAY_STORAGE_SLOT =
keccak256("spv.gateway.historical.spv.gateway.storage");

struct HistoricalSPVGatewayStorage {
uint32 lastHistoryEpochStartTime;
uint256 historyBlocksCount;
bytes32 historyBlocksTreeRoot;
}
Expand All @@ -41,6 +41,7 @@ contract HistoricalSPVGateway is IHistoricalSPVGateway, SPVGateway {
function __HistoricalSPVGateway_init(
bytes calldata blockHeaderRaw_,
uint64 blockHeight_,
uint32 lastHistoryEpochStartTime_,
uint256 cumulativeWork_,
bytes32 historyBlocksTreeRoot_,
BlockHistory.HistoryProofData calldata proofData_
Expand All @@ -53,12 +54,14 @@ contract HistoricalSPVGateway is IHistoricalSPVGateway, SPVGateway {
historyBlocksTreeRoot_,
blockHash_,
blockHeight_,
lastHistoryEpochStartTime_,
cumulativeWork_,
proofData_
);

_initialize(blockHeader_, blockHash_, blockHeight_, cumulativeWork_);

_getHistoricalSPVGatewayStorage().lastHistoryEpochStartTime = lastHistoryEpochStartTime_;
_getHistoricalSPVGatewayStorage().historyBlocksCount = blockHeight_;
_getHistoricalSPVGatewayStorage().historyBlocksTreeRoot = historyBlocksTreeRoot_;
}
Expand Down Expand Up @@ -107,6 +110,11 @@ contract HistoricalSPVGateway is IHistoricalSPVGateway, SPVGateway {
);
}

/// @inheritdoc IHistoricalSPVGateway
function getLastHistoryEpochStartTime() public view returns (uint32) {
return _getHistoricalSPVGatewayStorage().lastHistoryEpochStartTime;
}

/// @inheritdoc IHistoricalSPVGateway
function getHistoryBlocksCount() public view returns (uint256) {
return _getHistoricalSPVGatewayStorage().historyBlocksCount;
Expand All @@ -116,4 +124,17 @@ contract HistoricalSPVGateway is IHistoricalSPVGateway, SPVGateway {
function getHistoryBlocksTreeRoot() public view returns (bytes32) {
return _getHistoricalSPVGatewayStorage().historyBlocksTreeRoot;
}

function _getEpochPassedTime(uint64 blockHeight_) internal view override returns (uint32) {
uint64 startEpochBlockHeight_ = blockHeight_ -
TargetsHelper.DIFFICULTY_ADJUSTMENT_INTERVAL;

if (startEpochBlockHeight_ > getHistoryBlocksCount()) {
return super._getEpochPassedTime(blockHeight_);
}

uint32 epochEndTime_ = _getBlockHeaderTime(getBlockHash(blockHeight_ - 1));

return epochEndTime_ - getLastHistoryEpochStartTime();
}
}
18 changes: 12 additions & 6 deletions contracts/SPVGateway.sol
Original file line number Diff line number Diff line change
Expand Up @@ -335,13 +335,10 @@ contract SPVGateway is ISPVGateway, Initializable {
if (TargetsHelper.isTargetAdjustmentBlock(blockHeight_)) {
$.lastEpochCumulativeWork += TargetsHelper.countEpochCumulativeWork(currentTarget_);

uint32 epochStartTime_ = _getBlockHeaderTime(
getBlockHash(blockHeight_ - TargetsHelper.DIFFICULTY_ADJUSTMENT_INTERVAL)
currentTarget_ = TargetsHelper.countNewRoundedTarget(
currentTarget_,
_getEpochPassedTime(blockHeight_)
);
uint32 epochEndTime_ = _getBlockHeaderTime(getBlockHash(blockHeight_ - 1));
uint32 passedTime_ = epochEndTime_ - epochStartTime_;

currentTarget_ = TargetsHelper.countNewRoundedTarget(currentTarget_, passedTime_);
}

return currentTarget_;
Expand Down Expand Up @@ -439,6 +436,15 @@ contract SPVGateway is ISPVGateway, Initializable {
return _getMedianTime(blocksTime_, needsSort_);
}

function _getEpochPassedTime(uint64 blockHeight_) internal view virtual returns (uint32) {
uint32 epochStartTime_ = _getBlockHeaderTime(
getBlockHash(blockHeight_ - TargetsHelper.DIFFICULTY_ADJUSTMENT_INTERVAL)
);
uint32 epochEndTime_ = _getBlockHeaderTime(getBlockHash(blockHeight_ - 1));

return epochEndTime_ - epochStartTime_;
}

function _getBlockCumulativeWork(
uint64 blockHeight_,
bytes32 blockHash_
Expand Down
6 changes: 6 additions & 0 deletions contracts/interfaces/IHistoricalSPVGateway.sol
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,12 @@ interface IHistoricalSPVGateway is ISPVGateway {
HistoryBlockInclusionProofData calldata inclusionProofData_
) external view returns (bool);

/**
* @notice Gets the last history epoch start time.
* @return Returns the start time of the last history epoch.
*/
function getLastHistoryEpochStartTime() external view returns (uint32);

/**
* @notice Gets the total number of historical blocks stored.
* @return Returns the number of historical blocks.
Expand Down
40 changes: 34 additions & 6 deletions contracts/libs/BlockHistory.sol
Original file line number Diff line number Diff line change
Expand Up @@ -39,8 +39,9 @@ library BlockHistory {
error InvalidProofBlockHash();
error InvalidProofBlockHeight();
error InvalidProofCumulativeWork();
error InvalidProof();
error InvalidProofEpochStartTime();
error InvalidHistoryBlocksTreeRoot();
error InvalidProof();

/**
* @notice Verifies a ZK-SNARK proof for the Bitcoin history.
Expand All @@ -55,6 +56,7 @@ library BlockHistory {
bytes32 historyBlocksTreeRoot_,
bytes32 blockHash_,
uint64 blockHeight_,
uint32 currentEpochStartTime_,
uint256 cumulativeWork_,
HistoryProofData calldata proofData_
) internal view returns (bool) {
Expand All @@ -64,6 +66,10 @@ library BlockHistory {
cumulativeWork_ == getProofCumulativeWork(proofData_),
InvalidProofCumulativeWork()
);
require(
currentEpochStartTime_ == getProofEpochStartTime(blockHeight_ + 1, proofData_),
InvalidProofEpochStartTime()
);
require(
historyBlocksTreeRoot_ == getHistoryBlocksTreeRoot(blockHeight_ + 1, proofData_),
InvalidHistoryBlocksTreeRoot()
Expand Down Expand Up @@ -180,11 +186,9 @@ library BlockHistory {
uint64 provedBlocksCount_,
HistoryProofData calldata proofData_
) internal pure returns (bytes32 parsedBlocksTreeRoot_) {
uint256 frontierLength_ = Math.log2(provedBlocksCount_ / CHUNK_SIZE) + 1;

bool isPowOf2_ = provedBlocksCount_ & (provedBlocksCount_ - 1) == 0;
uint256 frontierLength_ = _countFrontierLength(provedBlocksCount_);

if (isPowOf2_) {
if (LibBit.isPo2(provedBlocksCount_)) {
parsedBlocksTreeRoot_ = _getBytes32FromInputs(
proofData_,
PROOF_FRONTIER_OFFSET + 32 * (frontierLength_ - 1)
Expand Down Expand Up @@ -227,6 +231,26 @@ library BlockHistory {
return uint256(proofData_.publicInputs[PROOF_CUMULATIVE_WORK_OFFSET]);
}

/**
* @notice Retrieves the last proved epoch start time from the ZK proof's public inputs.
* @param provedBlocksCount_ The total number of blocks included in the proof.
* @param proofData_ The struct containing the proof and public inputs.
* @return The epoch start time.
*/
function getProofEpochStartTime(
uint64 provedBlocksCount_,
HistoryProofData calldata proofData_
) internal pure returns (uint32) {
return
uint32(
uint256(
proofData_.publicInputs[
PROOF_FRONTIER_OFFSET + 32 * _countFrontierLength(provedBlocksCount_)
]
)
);
}

/**
* @notice Calculates the chunk number for a given block height.
* @param blockHeight_ The height of the block.
Expand Down Expand Up @@ -322,7 +346,7 @@ library BlockHistory {
uint256 frontierLength_,
HistoryProofData calldata proofData_
) internal pure returns (bytes32 computedRoot_) {
for (uint256 i = 0; i < frontierLength_; ++i) {
for (uint256 i = 0; i < frontierLength_ - 1; ++i) {
bytes32 currentNode_ = _getBytes32FromInputs(
proofData_,
PROOF_FRONTIER_OFFSET + 32 * i
Expand All @@ -348,6 +372,10 @@ library BlockHistory {
}
}

function _countFrontierLength(uint64 provedBlocksCount_) private pure returns (uint256) {
return Math.log2(provedBlocksCount_ / CHUNK_SIZE, Math.Rounding.Ceil) + 1;
}

function _processProof(
bytes32[] calldata merkleProof_,
bytes32 value_,
Expand Down
Loading