Skip to content

Commit

Permalink
add workflow file
Browse files Browse the repository at this point in the history
  • Loading branch information
ngundotra committed Jan 23, 2025
1 parent c65216a commit 6e58e33
Show file tree
Hide file tree
Showing 3 changed files with 173 additions and 0 deletions.
34 changes: 34 additions & 0 deletions .github/workflows/update-feature-gates.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
name: Update Feature Gates
on:
workflow_dispatch:
schedule:
- cron: '0 0 * * *' # Daily at midnight

jobs:
update:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4

- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: '3.10'

- name: Install dependencies
run: |
python -m pip install requests beautifulsoup4
- name: Scrape and generate features
run: python scripts/parse_feature_gates.py

- name: Update featureGate.tsx
run: node scripts/updateFeatureGates.mjs

- name: Commit and push changes
run: |
git config --global user.name "GitHub Actions"
git config --global user.email "[email protected]"
git add app/utils/feature-gate/constants.ts
git commit -m "Update feature gates from Agave wiki" || exit 0
git push
87 changes: 87 additions & 0 deletions scripts/parse_feature_gates.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,87 @@
import requests
from bs4 import BeautifulSoup
import json
import os

def parse_wiki():
url = "https://github.com/anza-xyz/agave/wiki/Feature-Gate-Tracker-Schedule"
proposals_url = "https://github.com/solana-foundation/solana-improvement-documents/tree/main/proposals"

# Get main wiki page
response = requests.get(url)
soup = BeautifulSoup(response.text, 'html.parser')

# Get proposals directory listing
proposals_response = requests.get(proposals_url)
proposals_soup = BeautifulSoup(proposals_response.text, 'html.parser')
proposal_files = [a.find('a')['title'] for a in proposals_soup.find_all('tr', {'class': 'react-directory-row'})
if a.find('a') and a.find('a')['title'].startswith(tuple('0123456789'))
and len(a.find('a')['title'].split('-')[0]) == 4
and a.find('a')['title'].endswith('.md')]

features = []

# Parse all feature tables
tables = soup.find_all('table')
for table in tables[1:]: # Skip first table
if 'Pending' in table.find_previous('h3').text:
for row in table.find_all('tr')[1:]: # Skip header
cols = [col.get_text(strip=True) for col in row.find_all('td')]
if len(cols) >= 6:
simd = cols[1].strip() if cols[1].strip() != 'v2.0.0' else None

# Find matching proposal file if SIMD exists
simd_link = None
if simd and simd.isdigit():
simd_prefix = simd.zfill(4)
matching_files = [f for f in proposal_files if f.startswith(simd_prefix)]
if matching_files:
simd_link = f"https://github.com/solana-foundation/solana-improvement-documents/blob/main/proposals/{matching_files[0]}"

feature = {
"key": cols[0],
"simd": str(int(simd)) if simd and simd.isdigit() else "",
"version": cols[2],
"testnet": cols[3],
"devnet": cols[4],
# Has to be updated via script
"mainnet": None,
"title": cols[5],
# Has to be manually updated
"description": None,
"simd_link": simd_link
}

# Generate enhanced description
features.append(feature)

# Load existing features if file exists
existing_features = []
if os.path.exists('features.json'):
with open('features.json', 'r') as f:
existing_features = json.load(f)

# Update existing features and add new ones
features_by_key = {f['key']: f for f in features}
for i, existing in enumerate(existing_features):
if existing['key'] in features_by_key:
# Only update devnet and testnet epochs
new_feature = features_by_key[existing['key']]
existing_features[i]['devnet'] = new_feature['devnet']
existing_features[i]['testnet'] = new_feature['testnet']
del features_by_key[existing['key']]

new_features = list(features_by_key.values())
if len(new_features) > 0:
print("New features:")
for f in new_features:
print(f"{f['key']} - {f['title']}")

# Combine existing and new features
all_features = existing_features + new_features

with open('features.json', 'w') as f:
json.dump(all_features, f, indent=2)

if __name__ == "__main__":
parse_wiki()
52 changes: 52 additions & 0 deletions scripts/updateFeatureGates.mjs
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
import { PublicKey } from '@solana/web3.js';
import { readFileSync, writeFileSync } from 'fs';

// Read generated features and existing feature gate file
const features = JSON.parse(readFileSync('features.json'));
const featureGatePath = 'app/utils/feature-gate/constants.ts';

// Map features to the desired format
const newFeatures = features.map(feature => ({
description: feature.description,
devnetActivationEpoch: feature.devnet ? parseInt(feature.devnet) : null,
key: feature.key,
mainnetActivationEpoch: feature.mainnet ? parseInt(feature.mainnet) : null,
simd: feature.simd ? {
link: feature.simd_link,
number: parseInt(feature.simd)
} : null,
testnetActivationEpoch: feature.testnet ? parseInt(feature.testnet) : null,
title: feature.title,
}));

// Generate new file content
const newContent = `import { FeatureInfoType } from './types';
export const FEATURES: FeatureInfoType[] = [
${newFeatures.map(f => ` {
description: '${f.description?.replace(/'/g, "\\'") || ''}',
devnetActivationEpoch: ${f.devnetActivationEpoch ?? 'null'},
key: '${f.key}',
mainnetActivationEpoch: ${f.mainnetActivationEpoch ?? 'null'},
simd: ${f.simd ? `{
link: '${f.simd.link}',
number: ${f.simd.number},
}` : 'null'},
testnetActivationEpoch: ${f.testnetActivationEpoch ?? 'null'},
title: '${f.title?.replace(/'/g, "\\'")}',
},`).join('\n')}
];`;

// Write the new file
writeFileSync(featureGatePath, newContent);
console.log(`Updated ${featureGatePath} with ${newFeatures.length} features`);

// Validate public keys
newFeatures.forEach(feature => {
try {
new PublicKey(feature.key);
} catch {
console.error(`Invalid public key: ${feature.key}`);
process.exit(1);
}
});

0 comments on commit 6e58e33

Please sign in to comment.