Workflows: Reduce Geodata update frequency (#4369)

This commit is contained in:
𐲓𐳛𐳪𐳂𐳐 𐲀𐳢𐳦𐳫𐳢 𐲥𐳔𐳛𐳪𐳌𐳑𐳖𐳇 2025-02-08 16:07:46 +08:00 committed by GitHub
parent c81d8e488a
commit db5f18b98c
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -1,4 +1,4 @@
name: Timely assets update name: Scheduled assets update
# NOTE: This Github Actions is required by other actions, for preparing other packaging assets in a # NOTE: This Github Actions is required by other actions, for preparing other packaging assets in a
# routine manner, for example: GeoIP/GeoSite. # routine manner, for example: GeoIP/GeoSite.
@ -8,19 +8,20 @@ name: Timely assets update
on: on:
workflow_dispatch: workflow_dispatch:
schedule: schedule:
# Update assets on every hour (xx:30) # Update GeoData on every day (22:30 UTC)
- cron: '30 * * * *' - cron: '30 22 * * *'
push: push:
# Prevent triggering update request storm # Prevent triggering update request storm
paths: paths:
- ".github/workflows/hourly-prepare.yml" - ".github/workflows/scheduled-assets-update.yml"
pull_request: pull_request:
# Prevent triggering update request storm # Prevent triggering update request storm
paths: paths:
- ".github/workflows/hourly-prepare.yml" - ".github/workflows/scheduled-assets-update.yml"
jobs: jobs:
geodat: geodat:
if: github.event.schedule == '30 22 * * *' || github.event_name == 'push'|| github.event_name == 'pull_request'
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Restore Geodat Cache - name: Restore Geodat Cache
@ -38,18 +39,18 @@ jobs:
max_attempts: 60 max_attempts: 60
command: | command: |
[ -d 'resources' ] || mkdir resources [ -d 'resources' ] || mkdir resources
LIST=('v2ray-rules-dat geoip geoip' 'v2ray-rules-dat geosite geosite') LIST=('Loyalsoldier v2ray-rules-dat geoip geoip' 'Loyalsoldier v2ray-rules-dat geosite geosite')
for i in "${LIST[@]}" for i in "${LIST[@]}"
do do
INFO=($(echo $i | awk 'BEGIN{FS=" ";OFS=" "} {print $1,$2,$3}')) INFO=($(echo $i | awk 'BEGIN{FS=" ";OFS=" "} {print $1,$2,$3,$4}'))
FILE_NAME="${INFO[2]}.dat" FILE_NAME="${INFO[3]}.dat"
echo -e "Verifying HASH key..." echo -e "Verifying HASH key..."
HASH="$(curl -sL "https://raw.githubusercontent.com/Loyalsoldier/${INFO[0]}/release/${INFO[1]}.dat.sha256sum" | awk -F ' ' '{print $1}')" HASH="$(curl -sL "https://raw.githubusercontent.com/${INFO[0]}/${INFO[1]}/release/${INFO[2]}.dat.sha256sum" | awk -F ' ' '{print $1}')"
if [ -s "./resources/${FILE_NAME}" ] && [ "$(sha256sum "./resources/${FILE_NAME}" | awk -F ' ' '{print $1}')" == "${HASH}" ]; then if [ -s "./resources/${FILE_NAME}" ] && [ "$(sha256sum "./resources/${FILE_NAME}" | awk -F ' ' '{print $1}')" == "${HASH}" ]; then
continue continue
else else
echo -e "Downloading https://raw.githubusercontent.com/Loyalsoldier/${INFO[0]}/release/${INFO[1]}.dat..." echo -e "Downloading https://raw.githubusercontent.com/${INFO[0]}/${INFO[1]}/release/${INFO[2]}.dat..."
curl -L "https://raw.githubusercontent.com/Loyalsoldier/${INFO[0]}/release/${INFO[1]}.dat" -o ./resources/${FILE_NAME} curl -L "https://raw.githubusercontent.com/${INFO[0]}/${INFO[1]}/release/${INFO[2]}.dat" -o ./resources/${FILE_NAME}
echo -e "Verifying HASH key..." echo -e "Verifying HASH key..."
[ "$(sha256sum "./resources/${FILE_NAME}" | awk -F ' ' '{print $1}')" == "${HASH}" ] || { echo -e "The HASH key of ${FILE_NAME} does not match cloud one."; exit 1; } [ "$(sha256sum "./resources/${FILE_NAME}" | awk -F ' ' '{print $1}')" == "${HASH}" ] || { echo -e "The HASH key of ${FILE_NAME} does not match cloud one."; exit 1; }
echo "unhit=true" >> $GITHUB_OUTPUT echo "unhit=true" >> $GITHUB_OUTPUT