Skip to content

Commit

Permalink
Merge branch 'main' into deprecate-unused-interfaces
Browse files Browse the repository at this point in the history
  • Loading branch information
0xDEnYO authored Mar 5, 2025
2 parents 0ba2c07 + c6f83d4 commit 16b1003
Show file tree
Hide file tree
Showing 24 changed files with 1,627 additions and 165 deletions.
112 changes: 112 additions & 0 deletions .github/workflows/healthCheckForNewNetworkDeployment.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,112 @@
name: Health Check for New Network Deployment

# - designed to perform health checks for newly added networks
# - triggers on pull requests and first checks if the config/networks.json file was modified
# - validates that each new network has corresponding deployment and state configuration files
# - runs network-specific health checks
# - any required file is missing or a health check fails, the action exits with an error

on:
pull_request:
types: [opened, synchronize, reopened, ready_for_review]

jobs:
check-new-network-health:
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
fetch-depth: 0

- name: Check if config/networks.json was changed in this branch
id: check-file-change
run: |
if git diff --name-only origin/main...HEAD | grep -q "config/networks.json"; then
echo "config/networks.json has been modified in this branch"
echo "CONTINUE=true" >> $GITHUB_ENV
else
echo "No changes in config/networks.json detected in this branch"
echo "CONTINUE=false" >> $GITHUB_ENV
fi
- name: Detect Newly Added Networks
if: env.CONTINUE == 'true'
id: detect-changes
run: |
echo "Comparing config/networks.json with the previous commit..."
git fetch origin main --depth=1 || echo "No previous commit found."
if git show origin/main:config/networks.json > /dev/null 2>&1; then
OLD_NETWORKS=$(git show origin/main:config/networks.json | jq 'keys')
else
echo "❌ Error: No previous networks.json found. Expected existing network configuration."
exit 1
fi
NEW_NETWORKS=$(jq 'keys' config/networks.json)
ADDED_NETWORKS=$(jq -n --argjson old "$OLD_NETWORKS" --argjson new "$NEW_NETWORKS" '$new - $old')
echo "Added networks: $ADDED_NETWORKS"
if [[ "$ADDED_NETWORKS" == "[]" ]]; then
echo "No new networks detected."
echo "SKIP_CHECK=true" >> $GITHUB_ENV
else
echo "New networks detected: $ADDED_NETWORKS"
echo "added_networks=$(echo $ADDED_NETWORKS | jq -c .)" >> $GITHUB_ENV
fi
- name: Validate Network Deployment Files
if: env.CONTINUE == 'true' && env.SKIP_CHECK != 'true'
run: |
echo "Validating required files for new networks..."
for network in $(echo $added_networks | jq -r '.[]'); do
echo "🔍 Checking files for network: $network"
# Check if network exists in _targetState.json
if ! jq -e 'has("'"$network"'")' script/deploy/_targetState.json > /dev/null; then
echo "❌ Error: Network '$network' not found in script/deploy/_targetState.json"
exit 1
else
echo "✅ Confirmed: Network '$network' exists in script/deploy/_targetState.json"
fi
# Check if deployments/{network}.json file exists
if [[ ! -f "deployments/$network.json" ]]; then
echo "❌ Error: Missing deployment file: deployments/$network.json"
exit 1
else
echo "✅ Confirmed: Deployment file: deployments/$network.json exists"
fi
done
- name: Install Bun
if: env.CONTINUE == 'true' && env.SKIP_CHECK != 'true'
uses: oven-sh/setup-bun@v1
with:
bun-version: latest

- name: Install Foundry (provides cast)
if: env.CONTINUE == 'true' && env.SKIP_CHECK != 'true'
uses: foundry-rs/foundry-toolchain@v1

- name: Install dependencies
if: env.CONTINUE == 'true' && env.SKIP_CHECK != 'true'
run: bun install

- name: Run Health Checks on New Networks
if: env.CONTINUE == 'true' && env.SKIP_CHECK != 'true'
run: |
echo "Running health check for new networks..."
set -e
for network in $(echo $added_networks | jq -r '.[]'); do
echo "🔍 Checking network: $network"
if bun run script/deploy/healthCheck.ts --network "$network"; then
echo "✅ $network is fine."
else
echo "❌ Health check failed for $network. Exiting..."
exit 1
fi
done
2 changes: 1 addition & 1 deletion .solhint.json
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
"code-complexity": ["error", 20],
"explicit-types": ["error", "explicit"],
"max-states-count": ["error", 15],
"no-empty-blocks": "error",
"no-empty-blocks": "off",
"no-global-import": "error",
"no-unused-import": "error",
"no-unused-vars": "error",
Expand Down
13 changes: 13 additions & 0 deletions audit/auditLog.json
Original file line number Diff line number Diff line change
Expand Up @@ -161,6 +161,13 @@
"auditReportPath": "./audit/reports/2025.01.17_LiFiDexAggregator(v1.4.0).pdf",
"auditCommitHash": "n/a (one deployed contract instance was audited)"
},
"audit20250219": {
"auditCompletedOn": "19.02.2025",
"auditedBy": "Sujith Somraaj (individual security researcher)",
"auditorGitHandle": "sujithsomraaj",
"auditReportPath": "./audit/reports/2025.02.19_GlacisFacet(v1.0.0).pdf",
"auditCommitHash": "9e623bc2a218399172e734af0ba6dfa3f76963a5"
},
"audit20250220": {
"auditCompletedOn": "20.02.2025",
"auditedBy": "Sujith Somraaj (individual security researcher)",
Expand Down Expand Up @@ -206,12 +213,18 @@
"1.0.0": ["audit20241107"],
"1.0.1": ["audit20250110_1"]
},
"GlacisFacet": {
"1.0.0": ["audit20250219"]
},
"IAcrossSpokePool": {
"1.0.0": ["audit20250106"]
},
"IGasZip": {
"1.0.0": ["audit20241107"]
},
"IGlacisAirlift": {
"1.0.0": ["audit20250219"]
},
"LibAsset": {
"1.0.1": ["audit20241202"],
"1.0.2": ["audit20250110_1"]
Expand Down
Binary file added audit/reports/2025.02.19_GlacisFacet(v1.0.0).pdf
Binary file not shown.
12 changes: 12 additions & 0 deletions config/glacis.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
{
"important": "these values are test deployments only. We need to update this file when Glacis has deployed their final versions",
"arbitrum": {
"airlift": "0xD9E7f6f7Dc7517678127D84dBf0F0b4477De14E0"
},
"optimism": {
"airlift": "0xdEedFc11fCd2bC3E63915e8060ec48875E890BCB"
},
"base": {
"airlift": "0x30095227Eb6d72FA6c09DfdeFFC766c33f7FA2DD"
}
}
Loading

0 comments on commit 16b1003

Please sign in to comment.