This repository has been archived by the owner on Feb 24, 2025. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 0
229 lines (196 loc) · 7.97 KB
/
main.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
name: Task1 QC
on:
push:
branches:
- main
permissions:
contents: write
packages: write
issues: write
id-token: write
pages: write
jobs:
process_raw:
runs-on: self-hosted
outputs:
sub: ${{ steps.set_vars.outputs.sub }}
task: ${{ steps.set_vars.outputs.task }}
version: ${{ steps.set_vars.outputs.version }}
run_part: ${{ steps.set_vars.outputs.run_part }}
steps:
- name: checkout code and return recently uploaded file in /data
uses: actions/checkout@v3
- name: Get changed files
run: |
#!/bin/bash
# Get the list of CSV files changed in the last 24 hours
data=$(git log --since="24 hours ago" --name-only --pretty=format: -- '*.csv' | sort | uniq)
# Export the data variable to the environment
echo "data=$data" >> $GITHUB_ENV
# Print the changed CSV files
echo "Changed CSV files in the last 24 hours: $data"
- name: set up python
run: |
python -m pip install --upgrade pip
- name: parse raw
id: set_vars
run: |
# Loop through each CSV file in $data
for file in $data; do
# Extract the directory and filename
dir=$(dirname "$file")
filename=$(basename "$file")
# Extract the run-* part from the directory
run_part=$(basename "$dir")
# Split the filename into sub, task, and version
IFS='_' read -r sub task version <<< "$filename"
version="${version%.csv}" # Remove the .csv extension from version
# Set outputs
echo "::set-output name=run_part::$run_part"
echo "::set-output name=sub::$sub"
echo "::set-output name=task::$task"
echo "::set-output name=version::$version"
# Print the extracted values
echo "Run Part: $run_part"
echo "Subject: $sub"
echo "Task: $task"
echo "Version: $version"
done
run_qc:
runs-on: self-hosted
needs: process_raw
steps:
- name: Checkout code
uses: actions/checkout@v3
- name: Debug env vars
run: |
echo "sub=${{ needs.process_raw.outputs.sub }}"
echo "task=${{ needs.process_raw.outputs.task }}"
echo "version=${{ needs.process_raw.outputs.version }}"
echo "run_part=${{ needs.process_raw.outputs.run_part }}"
- name: run quality control
run: |
sub=${{ needs.process_raw.outputs.sub }}
task=${{ needs.process_raw.outputs.task }}
vers=${{ needs.process_raw.outputs.version }}
run_part=${{ needs.process_raw.outputs.run_part }}
for sub in ${sub}; do
echo "Processing subject: $sub"
for task in ${task}; do
echo "Processing task: $task"
for vers in ${vers}; do
echo "Processing version: $vers"
csv_file="./data/${sub}/processed/${run_part}/${sub}_${task}_${vers}.csv"
mkdir -p "./data/${sub}/${run_part}"
log_file="./data/${sub}/${run_part}/qc_${task}_${vers}.log"
echo "CSV file: $csv_file"s
echo "Log file: $log_file"
if [ -f "$csv_file" ]; then
python ./code/AFqC.py -s "$csv_file" -o "./data/${sub}/${run_part}" -sub "$sub" | tee "$log_file"
echo "QC for ${sub}_${task}_${vers} running"
else
echo "CSV file $csv_file does not exist"
fi
done
done
done
add:
concurrency:
group: "pages"
cancel-in-progress: false
runs-on: ubuntu-latest
needs: run_qc # Ensure 'run_qc' job exists if referenced
steps:
# 1. Checkout the Repository
- name: Checkout Repository
uses: actions/checkout@v4
with:
persist-credentials: false # Recommended for security
fetch-depth: 0 # Ensure full history is fetched for git commands
# 2. Find New PNG Files
- name: Find New PNG Files
id: find_png
run: |
# Find newly added PNG files in this commit
png_files=$(git diff --name-only --relative --diff-filter=A HEAD^ HEAD -- '*.png')
echo "PNG_FILES=$png_files" >> $GITHUB_ENV
# 3. Generate Jekyll Posts
- name: Add _posts
run: |
POSTS_DIR="_posts"
mkdir -p $POSTS_DIR # Ensure the _posts directory exists
# Initialize an associative array to group images by subject
declare -A subjects
# Iterate over each PNG file and group them by subject number
for file in $PNG_FILES; do
# Extract the subject number (assuming it's the first part of the filename before '_')
subject=$(echo "$file" | awk -F_ '{print $1}')
# Append the filename to the subject's array
subjects["$subject"]+="$file "
done
# Generate Jekyll posts for each subject
for subject in "${!subjects[@]}"; do
# Define the post filename with current date and subject number
post_filename="$POSTS_DIR/$(date +%Y-%m-%d)-subject-$subject.md"
# Create the Jekyll post
{
echo "---"
echo "layout: post"
echo "title: Subject $subject"
echo "date: $(date +%Y-%m-%d)"
echo "categories: subjects"
echo "---"
echo ""
echo "# $subject"
# Add images to the post
for image in ${subjects["$subject"]}; do
echo ""
done
} > "$post_filename"
echo "Created post: $post_filename"
done
# 4. (Optional) Commit and Push Generated Posts
# This step commits the newly created posts back to the repository.
# 5. Set Up Ruby Environment
- name: Setup Ruby
uses: ruby/setup-ruby@v1
with:
ruby-version: '3.1' # Specify your Ruby version
bundler-cache: true # Caches installed gems automatically
cache-version: 1 # Increment if you need to reset the cache
# 6. Install Dependencies
- name: Install Dependencies
run: bundle install
# 7. Build the Jekyll Site
- name: Build with Jekyll
run: bundle exec jekyll build --baseurl "${{ github.event.inputs.base_path || '' }}"
env:
JEKYLL_ENV: production
# Alternatively, if you prefer using GitHub's built-in Pages action:
# Uncomment the following steps and remove the above Deploy step
#
- name: Configure GitHub Pages
uses: actions/configure-pages@v5
#
- name: Upload Pages Artifact
uses: actions/upload-pages-artifact@v1
with:
path: ./_site
- name: Deploy to GitHub Pages
uses: actions/deploy-pages@v1
with:
token: ${{ secrets.GITHUB_TOKEN }}
push:
runs-on: self-hosted
needs: add
steps:
- name: Commit and Push Changes
run: |
git config --global user.name "miloswrath"
git config --global user.email "[email protected]"
git remote set-url origin https://x-access-token:${{ secrets.GIT_TOKEN }}@github.com/$GITHUB_REPOSITORY
git add .
git commit -m "Automated commit by GitHub Actions"
git push
env:
GITHUB_TOKEN: ${{ secrets.GIT_TOKEN }}