Skip to content

Commit

Permalink
Merge branch 'main' into avoid-double-validation-v3
Browse files Browse the repository at this point in the history
  • Loading branch information
ahamlat committed Sep 11, 2024
2 parents e7ccc94 + 6ea0ed4 commit 0bd2a8a
Show file tree
Hide file tree
Showing 4 changed files with 118 additions and 80 deletions.
59 changes: 41 additions & 18 deletions .github/workflows/release.yml
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,28 @@ env:
GRADLE_OPTS: "-Dorg.gradle.parallel=true -Dorg.gradle.caching=true"

jobs:
preprocess_release:
runs-on: ubuntu-22.04
steps:
- name: Pre-process Release Name
id: pre_process_release_name
run: |
RELEASE_NAME="${{ github.event.release.name }}"
# strip all whitespace
RELEASE_NAME="${RELEASE_NAME//[[:space:]]/}"
if [[ ! "$RELEASE_NAME" =~ ^[0-9]+\.[0-9]+(\.[0-9]+)?(-.*)?$ ]]; then
echo "Release name does not conform to a valid besu release format YY.M.v[-suffix], e.g. 24.8.0-RC1."
exit 1
fi
echo "release_name=$RELEASE_NAME" >> $GITHUB_OUTPUT # Set as output using the new syntax
outputs:
release_name: ${{ steps.pre_process_release_name.outputs.release_name }}

artifacts:
runs-on: ubuntu-22.04
needs: preprocess_release
env:
RELEASE_NAME: ${{ needs.preprocess_release.outputs.release_name }} # Use the output from the pre_process_release job
permissions:
contents: write
outputs:
Expand All @@ -19,17 +39,6 @@ jobs:
steps:
- name: checkout
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
- name: Pre-process Release Name
id: pre_process_release_name
run: |
RELEASE_NAME="${{ github.event.release.name }}"
# strip all whitespace
RELEASE_NAME="${RELEASE_NAME//[[:space:]]/}"
if [[ ! "$RELEASE_NAME" =~ ^[0-9]+\.[0-9]+(\.[0-9]+)?(-.*)?$ ]]; then
echo "Release name does not conform to a valid besu release format YY.M.v[-suffix], e.g. 24.8.0-RC1."
exit 1
fi
echo "RELEASE_NAME=$RELEASE_NAME" >> $GITHUB_ENV # Store in environment variable
- name: Set up Java
uses: actions/setup-java@387ac29b308b003ca37ba93a6cab5eb57c8f5f93
with:
Expand Down Expand Up @@ -90,7 +99,9 @@ jobs:
publish:
runs-on: ubuntu-22.04
needs: [testWindows, artifacts]
needs: [preprocess_release, testWindows, artifacts]
env:
RELEASE_NAME: ${{ needs.preprocess_release.outputs.release_name }} # Use the output from the pre_process_release job
permissions:
contents: write
steps:
Expand All @@ -115,7 +126,9 @@ jobs:
artifactoryPublish:
runs-on: ubuntu-22.04
needs: artifacts
needs: [preprocess_release, artifacts]
env:
RELEASE_NAME: ${{ needs.preprocess_release.outputs.release_name }} # Use the output from the pre_process_release job
steps:
- name: checkout
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
Expand Down Expand Up @@ -152,7 +165,9 @@ jobs:
run: docker run --rm -i hadolint/hadolint < docker/Dockerfile

buildDocker:
needs: hadolint
needs: [preprocess_release, hadolint]
env:
RELEASE_NAME: ${{ needs.preprocess_release.outputs.release_name }} # Use the output from the pre_process_release job
permissions:
contents: read
packages: write
Expand Down Expand Up @@ -213,7 +228,9 @@ jobs:
run: ./gradlew --no-daemon dockerUpload -PdockerOrgName=${{ env.registry }}/${{ secrets.DOCKER_ORG }} -Pversion=${{env.RELEASE_NAME}} -Prelease.releaseVersion=${{ env.RELEASE_NAME }}

multiArch:
needs: buildDocker
needs: [preprocess_release, buildDocker]
env:
RELEASE_NAME: ${{ needs.preprocess_release.outputs.release_name }} # Use the output from the pre_process_release job
runs-on: ubuntu-22.04
permissions:
contents: read
Expand All @@ -240,7 +257,9 @@ jobs:
run: ./gradlew manifestDocker -PdockerOrgName=${{ env.registry }}/${{ secrets.DOCKER_ORG }} -Pversion=${{env.RELEASE_NAME}} -Prelease.releaseVersion=${{ env.RELEASE_NAME }}

amendNotes:
needs: multiArch
needs: [preprocess_release, multiArch]
env:
RELEASE_NAME: ${{ needs.preprocess_release.outputs.release_name }} # Use the output from the pre_process_release job
runs-on: ubuntu-22.04
permissions:
contents: write
Expand All @@ -253,7 +272,9 @@ jobs:
`docker pull ${{env.registry}}/${{secrets.DOCKER_ORG}}/besu:${{env.RELEASE_NAME}}`
dockerPromoteX64:
needs: multiArch
needs: [preprocess_release, multiArch]
env:
RELEASE_NAME: ${{ needs.preprocess_release.outputs.release_name }} # Use the output from the pre_process_release job
runs-on: ubuntu-22.04
steps:
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
Expand All @@ -278,7 +299,9 @@ jobs:
run: ./gradlew "-Prelease.releaseVersion=${{ env.RELEASE_NAME }}" "-PdockerOrgName=${{ env.registry }}/${{ secrets.DOCKER_ORG }}" manifestDockerRelease

verifyContainer:
needs: dockerPromoteX64
needs: [preprocess_release, dockerPromoteX64]
env:
RELEASE_NAME: ${{ needs.preprocess_release.outputs.release_name }} # Use the output from the pre_process_release job
runs-on: ubuntu-22.04
permissions:
contents: read
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,6 @@
import java.nio.file.Files;
import java.nio.file.Path;

import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;

public class TxPoolOptionsTest extends CommandTestAbstract {
Expand Down Expand Up @@ -98,7 +97,6 @@ public void txpoolSaveFileAbsolutePathOutsideDataPath() throws IOException {
}

@Test
@Disabled // Failing in CI, but not locally
public void txpoolForcePriceBumpToZeroWhenZeroBaseFeeMarket() throws IOException {
final Path genesisFile = createFakeGenesisFile(GENESIS_WITH_ZERO_BASE_FEE_MARKET);
parseCommand("--genesis-file", genesisFile.toString());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -116,6 +116,37 @@ private DefaultBlockchain(
chainHeadTransactionCount = chainHeadBody.getTransactions().size();
chainHeadOmmerCount = chainHeadBody.getOmmers().size();

this.reorgLoggingThreshold = reorgLoggingThreshold;
this.blockChoiceRule = heaviestChainBlockChoiceRule;
this.numberOfBlocksToCache = numberOfBlocksToCache;

if (numberOfBlocksToCache != 0) {
blockHeadersCache =
Optional.of(
CacheBuilder.newBuilder().recordStats().maximumSize(numberOfBlocksToCache).build());
blockBodiesCache =
Optional.of(
CacheBuilder.newBuilder().recordStats().maximumSize(numberOfBlocksToCache).build());
transactionReceiptsCache =
Optional.of(
CacheBuilder.newBuilder().recordStats().maximumSize(numberOfBlocksToCache).build());
totalDifficultyCache =
Optional.of(
CacheBuilder.newBuilder().recordStats().maximumSize(numberOfBlocksToCache).build());
CacheMetricsCollector cacheMetrics = new CacheMetricsCollector();
cacheMetrics.addCache("blockHeaders", blockHeadersCache.get());
cacheMetrics.addCache("blockBodies", blockBodiesCache.get());
cacheMetrics.addCache("transactionReceipts", transactionReceiptsCache.get());
cacheMetrics.addCache("totalDifficulty", totalDifficultyCache.get());
if (metricsSystem instanceof PrometheusMetricsSystem prometheusMetricsSystem)
prometheusMetricsSystem.addCollector(BesuMetricCategory.BLOCKCHAIN, () -> cacheMetrics);
} else {
blockHeadersCache = Optional.empty();
blockBodiesCache = Optional.empty();
transactionReceiptsCache = Optional.empty();
totalDifficultyCache = Optional.empty();
}

metricsSystem.createLongGauge(
BesuMetricCategory.ETHEREUM,
"blockchain_height",
Expand Down Expand Up @@ -179,37 +210,6 @@ private DefaultBlockchain(
"chain_head_ommer_count",
"Number of ommers in the current chain head block",
() -> chainHeadOmmerCount);

this.reorgLoggingThreshold = reorgLoggingThreshold;
this.blockChoiceRule = heaviestChainBlockChoiceRule;
this.numberOfBlocksToCache = numberOfBlocksToCache;

if (numberOfBlocksToCache != 0) {
blockHeadersCache =
Optional.of(
CacheBuilder.newBuilder().recordStats().maximumSize(numberOfBlocksToCache).build());
blockBodiesCache =
Optional.of(
CacheBuilder.newBuilder().recordStats().maximumSize(numberOfBlocksToCache).build());
transactionReceiptsCache =
Optional.of(
CacheBuilder.newBuilder().recordStats().maximumSize(numberOfBlocksToCache).build());
totalDifficultyCache =
Optional.of(
CacheBuilder.newBuilder().recordStats().maximumSize(numberOfBlocksToCache).build());
CacheMetricsCollector cacheMetrics = new CacheMetricsCollector();
cacheMetrics.addCache("blockHeaders", blockHeadersCache.get());
cacheMetrics.addCache("blockBodies", blockBodiesCache.get());
cacheMetrics.addCache("transactionReceipts", transactionReceiptsCache.get());
cacheMetrics.addCache("totalDifficulty", totalDifficultyCache.get());
if (metricsSystem instanceof PrometheusMetricsSystem prometheusMetricsSystem)
prometheusMetricsSystem.addCollector(BesuMetricCategory.BLOCKCHAIN, () -> cacheMetrics);
} else {
blockHeadersCache = Optional.empty();
blockBodiesCache = Optional.empty();
transactionReceiptsCache = Optional.empty();
totalDifficultyCache = Optional.empty();
}
}

public static MutableBlockchain createMutable(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -132,42 +132,59 @@ public ValidationResult<TransactionInvalidReason> validate(
}

if (transactionType == TransactionType.DELEGATE_CODE) {
if (isDelegateCodeEmpty(transaction)) {
return ValidationResult.invalid(
TransactionInvalidReason.EMPTY_CODE_DELEGATION,
"transaction code delegation transactions must have a non-empty code delegation list");
ValidationResult<TransactionInvalidReason> codeDelegationValidation =
validateCodeDelegation(transaction);
if (!codeDelegationValidation.isValid()) {
return codeDelegationValidation;
}
}

return validateCostAndFee(transaction, baseFee, blobFee, transactionValidationParams);
}

final BigInteger halfCurveOrder = SignatureAlgorithmFactory.getInstance().getHalfCurveOrder();
final Optional<ValidationResult<TransactionInvalidReason>> validationResult =
transaction
.getCodeDelegationList()
.map(
codeDelegations -> {
for (CodeDelegation codeDelegation : codeDelegations) {
if (codeDelegation.signature().getS().compareTo(halfCurveOrder) > 0) {
return ValidationResult.invalid(
TransactionInvalidReason.INVALID_SIGNATURE,
"Invalid signature for code delegation. S value must be less or equal than the half curve order.");
}

if (codeDelegation.signature().getRecId() != 0
&& codeDelegation.signature().getRecId() != 1) {
return ValidationResult.invalid(
TransactionInvalidReason.INVALID_SIGNATURE,
"Invalid signature for code delegation. RecId value must be 0 or 1.");
}
private static ValidationResult<TransactionInvalidReason> validateCodeDelegation(
final Transaction transaction) {
if (isDelegateCodeEmpty(transaction)) {
return ValidationResult.invalid(
TransactionInvalidReason.EMPTY_CODE_DELEGATION,
"transaction code delegation transactions must have a non-empty code delegation list");
}

if (transaction.getTo().isEmpty()) {
return ValidationResult.invalid(
TransactionInvalidReason.INVALID_TRANSACTION_FORMAT,
"transaction code delegation transactions must have a to address");
}

final BigInteger halfCurveOrder = SignatureAlgorithmFactory.getInstance().getHalfCurveOrder();
final Optional<ValidationResult<TransactionInvalidReason>> validationResult =
transaction
.getCodeDelegationList()
.map(
codeDelegations -> {
for (CodeDelegation codeDelegation : codeDelegations) {
if (codeDelegation.signature().getS().compareTo(halfCurveOrder) > 0) {
return ValidationResult.invalid(
TransactionInvalidReason.INVALID_SIGNATURE,
"Invalid signature for code delegation. S value must be less or equal than the half curve order.");
}

return ValidationResult.valid();
});
if (codeDelegation.signature().getRecId() != 0
&& codeDelegation.signature().getRecId() != 1) {
return ValidationResult.invalid(
TransactionInvalidReason.INVALID_SIGNATURE,
"Invalid signature for code delegation. RecId value must be 0 or 1.");
}
}

if (validationResult.isPresent() && !validationResult.get().isValid()) {
return validationResult.get();
}
return ValidationResult.valid();
});

if (validationResult.isPresent() && !validationResult.get().isValid()) {
return validationResult.get();
}

return validateCostAndFee(transaction, baseFee, blobFee, transactionValidationParams);
return ValidationResult.valid();
}

private static boolean isDelegateCodeEmpty(final Transaction transaction) {
Expand Down

0 comments on commit 0bd2a8a

Please sign in to comment.