mirror of https://github.com/RT-Thread/rt-thread
ci/maintainer: merge same tag with different paths, remove Path display from CI comment
This commit is contained in:
parent
8a4890a9e5
commit
156259b499
|
@ -9,6 +9,7 @@
|
||||||
# 2025-03-14 hydevcode
|
# 2025-03-14 hydevcode
|
||||||
# 2025-05-10 kurisaW Fixed file existence, cache, and comment time issues
|
# 2025-05-10 kurisaW Fixed file existence, cache, and comment time issues
|
||||||
# 2025-05-11 kurisaW Fixed missing unique files creation and cache logic
|
# 2025-05-11 kurisaW Fixed missing unique files creation and cache logic
|
||||||
|
# 2025-07-14 kurisaW Merge same tag with different paths, remove Path display from CI comment
|
||||||
|
|
||||||
# Script Function Description: Assign PR reviews based on the MAINTAINERS list.
|
# Script Function Description: Assign PR reviews based on the MAINTAINERS list.
|
||||||
|
|
||||||
|
@ -44,107 +45,209 @@ jobs:
|
||||||
- name: Get changed files
|
- name: Get changed files
|
||||||
id: changed_files
|
id: changed_files
|
||||||
run: |
|
run: |
|
||||||
# 通过 GitHub API 获取 PR 的变更文件列表
|
# 通过 GitHub API 获取 PR 的变更文件列表(带重试机制和错误处理)
|
||||||
changed_files=$(curl -s \
|
max_retries=3
|
||||||
"https://api.github.com/repos/${{ github.repository }}/pulls/${{ steps.extract-pr.outputs.PR_NUMBER }}/files" | \
|
retry_count=0
|
||||||
jq -r '.[].filename') # 使用 jq 提取文件名
|
changed_files=""
|
||||||
echo "$changed_files" > changed_files.txt
|
api_response=""
|
||||||
|
|
||||||
|
echo "Fetching changed files for PR #${{ steps.extract-pr.outputs.PR_NUMBER }}..."
|
||||||
|
|
||||||
|
while [ $retry_count -lt $max_retries ]; do
|
||||||
|
api_response=$(curl -s \
|
||||||
|
-H "Authorization: Bearer ${{ secrets.GITHUB_TOKEN }}" \
|
||||||
|
-H "Accept: application/vnd.github.v3+json" \
|
||||||
|
"https://api.github.com/repos/${{ github.repository }}/pulls/${{ steps.extract-pr.outputs.PR_NUMBER }}/files")
|
||||||
|
|
||||||
|
# 验证响应是否为有效JSON且包含文件数组
|
||||||
|
if jq -e 'if type=="array" then .[0].filename else empty end' <<<"$api_response" >/dev/null 2>&1; then
|
||||||
|
changed_files=$(jq -r '.[].filename' <<<"$api_response")
|
||||||
|
break
|
||||||
|
else
|
||||||
|
echo "Retry $((retry_count+1)): API response not ready or invalid format"
|
||||||
|
echo "API Response: $api_response"
|
||||||
|
sleep 5
|
||||||
|
((retry_count++))
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
if [ -z "$changed_files" ]; then
|
||||||
|
echo "Error: Failed to get changed files after $max_retries attempts"
|
||||||
|
echo "Final API Response: $api_response"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "$changed_files" > changed_files.txt
|
||||||
|
echo "Successfully fetched $(wc -l < changed_files.txt) changed files"
|
||||||
|
|
||||||
|
# 以下是原有的评论处理逻辑(保持不变)
|
||||||
existing_comment=$(curl -s \
|
existing_comment=$(curl -s \
|
||||||
"https://api.github.com/repos/${{ github.repository }}/issues/${{ steps.extract-pr.outputs.PR_NUMBER }}/comments" | \
|
-H "Authorization: Bearer ${{ secrets.GITHUB_TOKEN }}" \
|
||||||
jq -r '.[] | select(.user.login == "github-actions[bot]") | {body: .body} | @base64')
|
"https://api.github.com/repos/${{ github.repository }}/issues/${{ steps.extract-pr.outputs.PR_NUMBER }}/comments")
|
||||||
|
|
||||||
echo "=== Changed Files ==="
|
# Check if response is valid JSON
|
||||||
cat changed_files.txt
|
if jq -e . >/dev/null 2>&1 <<<"$existing_comment"; then
|
||||||
echo "====================="
|
existing_comment=$(jq -r '.[] | select(.user.login == "github-actions[bot]") | {body: .body} | @base64' <<< "$existing_comment")
|
||||||
|
else
|
||||||
|
existing_comment=""
|
||||||
|
echo "Warning: Invalid JSON response from GitHub API for comments"
|
||||||
|
echo "Response: $existing_comment"
|
||||||
|
fi
|
||||||
|
|
||||||
comment_body=""
|
comment_body=""
|
||||||
if [[ ! -z "$existing_comment" ]]; then
|
if [[ ! -z "$existing_comment" ]]; then
|
||||||
comment_body=$(echo "$existing_comment" | head -1 | base64 -d | jq -r .body | sed -nE 's/.*Last Updated: ([0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2} UTC).*/\1/p')
|
comment_body=$(echo "$existing_comment" | head -1 | base64 -d | jq -r .body | sed -nE 's/.*Last Updated: ([0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2} CST).*/\1/p')
|
||||||
comment_time=$(date -d "$comment_body" +%s)
|
comment_time=$(TZ='Asia/Shanghai' date -d "$comment_body" +%s)
|
||||||
echo "${comment_body}"
|
echo "CACHE_TIMESTAMP=${comment_time}" >> $GITHUB_OUTPUT # 统一使用这个变量名
|
||||||
echo "COMMENT_TIME=${comment_time}" >> $GITHUB_OUTPUT
|
echo "COMMENT_TIME=${comment_time}" >> $GITHUB_OUTPUT
|
||||||
else
|
else
|
||||||
comment_time=""
|
comment_time=""
|
||||||
echo "COMMENT_TIME=${comment_time}" >> $GITHUB_OUTPUT
|
echo "CACHE_TIMESTAMP=" >> $GITHUB_OUTPUT
|
||||||
|
echo "COMMENT_TIME=" >> $GITHUB_OUTPUT
|
||||||
fi
|
fi
|
||||||
echo "COMMENT_TIME=${comment_time}"
|
echo "Debug - CACHE_TIMESTAMP: $comment_time"
|
||||||
|
|
||||||
- name: Parse MAINTAINERS file
|
- name: Parse MAINTAINERS file
|
||||||
id: parse_maintainer
|
id: parse_maintainer
|
||||||
run: |
|
run: |
|
||||||
# 使用 AWK 解析 MAINTAINERS 文件格式:
|
set -euo pipefail
|
||||||
# 提取 tag(标签)、path(路径)和 owners(维护者 GitHub ID)
|
|
||||||
awk '
|
awk '
|
||||||
|
BEGIN{ tag=""; paths=""; owners="" }
|
||||||
/^tag:/ {
|
/^tag:/ {
|
||||||
tag = substr($0, index($0, $2)) # 提取标签内容
|
tag = substr($0, index($0, $2));
|
||||||
|
paths=""; owners=""
|
||||||
}
|
}
|
||||||
/^path:/ {
|
/^path:/ {
|
||||||
# 提取 path 字段并去除前后空格
|
path = substr($0, index($0, $2))
|
||||||
path = substr($0, index($0, $2))
|
gsub(/^[ \t]+|[ \t]+$/, "", path)
|
||||||
gsub(/^[ \t]+|[ \t]+$/, "", path) # 清理前后空格和制表符
|
paths = (paths == "" ? path : paths "|" path)
|
||||||
}
|
}
|
||||||
/^owners:/ {
|
/^owners:/ {
|
||||||
owners = substr($0, index($0, $2)) # 提取维护者信息
|
owners = substr($0, index($0, $2))
|
||||||
split(owners, parts, /[()]/) # 拆分出 GitHub ID(括号内内容)
|
n = split(owners, parts, /[()]/)
|
||||||
github_ids = ""
|
github_ids=""
|
||||||
for (i=2; i<=length(parts); i+=2) {
|
for (i=2; i<=n; i+=2) {
|
||||||
github_ids = github_ids "@" parts[i] " " # 拼接为 @user 格式
|
id=parts[i]
|
||||||
}
|
gsub(/^[ \t@]+|[ \t]+$/, "", id)
|
||||||
print tag "|" path "|" github_ids
|
if(id != "") github_ids=github_ids "@" id " "
|
||||||
|
}
|
||||||
|
print tag "|" paths "|" github_ids
|
||||||
|
tag=""; paths=""; owners=""
|
||||||
}
|
}
|
||||||
' MAINTAINERS > tag_data.csv
|
' MAINTAINERS > tag_data.csv
|
||||||
|
|
||||||
- name: Generate reviewers list
|
- name: Generate reviewers list and tag-file mapping
|
||||||
id: generate_reviewers
|
id: generate_reviewers
|
||||||
run: |
|
run: |
|
||||||
rm -f triggered_reviewers.txt triggered_tags.txt unique_reviewers.txt unique_tags.txt
|
rm -f triggered_reviewers.txt triggered_tags.txt unique_reviewers.txt unique_tags.txt tag_files_map.json tag_reviewers_map.txt
|
||||||
touch triggered_reviewers.txt triggered_tags.txt unique_reviewers.txt unique_tags.txt
|
touch triggered_reviewers.txt triggered_tags.txt unique_reviewers.txt unique_tags.txt
|
||||||
|
|
||||||
while IFS='|' read -r tag path reviewers; do
|
# 1. 读取 tag_data.csv,建立 tag -> [paths], tag -> reviewers
|
||||||
# 转义路径中的正则特殊字符
|
declare -A tag_paths_map
|
||||||
escaped_path=$(sed 's/[.[\*^$]/\\&/g' <<< "$path")
|
declare -A tag_reviewers_map
|
||||||
|
|
||||||
# 使用增强型正则匹配路径及其所有子目录
|
while IFS='|' read -r tag paths reviewers; do
|
||||||
if grep -qE "^$escaped_path(/.*)*" changed_files.txt; then
|
IFS='|' read -ra path_arr <<< "$paths"
|
||||||
echo "$reviewers" | tr -s ' ' '\n' | sed '/^$/d' >> triggered_reviewers.txt
|
for p in "${path_arr[@]}"; do
|
||||||
echo "$tag" >> triggered_tags.txt
|
tag_paths_map["$tag"]+="$p;"
|
||||||
echo "Matched: $path → $tag"
|
done
|
||||||
fi
|
# 合并 reviewers,去重,只保留合法格式
|
||||||
|
existing_reviewers="${tag_reviewers_map["$tag"]}"
|
||||||
|
all_reviewers="$existing_reviewers $reviewers"
|
||||||
|
# 只保留 @xxx 格式,去重
|
||||||
|
all_reviewers=$(echo "$all_reviewers" | grep -o '@[A-Za-z0-9_-]\+' | sort -u | tr '\n' ' ')
|
||||||
|
tag_reviewers_map["$tag"]="$all_reviewers"
|
||||||
done < tag_data.csv
|
done < tag_data.csv
|
||||||
|
|
||||||
|
# 2. 针对每个 tag,找出它所有 path 匹配的变更文件
|
||||||
|
declare -A tag_changedfiles_map
|
||||||
|
while IFS= read -r changed; do
|
||||||
|
for tag in "${!tag_paths_map[@]}"; do
|
||||||
|
IFS=';' read -ra tpaths <<< "${tag_paths_map[$tag]}"
|
||||||
|
for tpath in "${tpaths[@]}"; do
|
||||||
|
[[ -z "$tpath" ]] && continue
|
||||||
|
if [[ -f "$tpath" ]]; then
|
||||||
|
# 精确文件名
|
||||||
|
[[ "$changed" == "$tpath" ]] && tag_changedfiles_map["$tag"]+="$changed;"
|
||||||
|
else
|
||||||
|
# 目录前缀
|
||||||
|
[[ "$changed" == $tpath* ]] && tag_changedfiles_map["$tag"]+="$changed;"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
done
|
||||||
|
done < changed_files.txt
|
||||||
|
|
||||||
|
# 3. 输出合并后的 tag reviewers、tag、并去重
|
||||||
|
for tag in "${!tag_changedfiles_map[@]}"; do
|
||||||
|
reviewers="${tag_reviewers_map[$tag]}"
|
||||||
|
echo "$reviewers" | tr -s ' ' '\n' | sed '/^$/d' >> triggered_reviewers.txt
|
||||||
|
echo "$tag" >> triggered_tags.txt
|
||||||
|
done
|
||||||
|
|
||||||
# 生成去重的 unique_reviewers.txt 和 unique_tags.txt
|
# 生成去重的 unique_reviewers.txt 和 unique_tags.txt
|
||||||
sort -u triggered_reviewers.txt > unique_reviewers.txt
|
sort -u triggered_reviewers.txt > unique_reviewers.txt
|
||||||
sort -u triggered_tags.txt > unique_tags.txt
|
sort -u triggered_tags.txt > unique_tags.txt
|
||||||
|
|
||||||
# 检查是否有匹配的 reviewers
|
# 4. 输出 tag_files_map.json,格式 { "tag1": ["file1","file2"], ... }
|
||||||
|
{
|
||||||
|
echo "{"
|
||||||
|
first_tag=1
|
||||||
|
for tag in "${!tag_changedfiles_map[@]}"; do
|
||||||
|
[[ $first_tag -eq 0 ]] && echo ","
|
||||||
|
echo -n " \"${tag}\": ["
|
||||||
|
IFS=';' read -ra files <<< "${tag_changedfiles_map[$tag]}"
|
||||||
|
file_list=""
|
||||||
|
for f in "${files[@]}"; do
|
||||||
|
[[ -z "$f" ]] && continue
|
||||||
|
[[ -n "$file_list" ]] && file_list+=", "
|
||||||
|
file_list+="\"$f\""
|
||||||
|
done
|
||||||
|
echo -n "$file_list"
|
||||||
|
echo -n "]"
|
||||||
|
first_tag=0
|
||||||
|
done
|
||||||
|
echo ""
|
||||||
|
echo "}"
|
||||||
|
} > tag_files_map.json
|
||||||
|
|
||||||
|
# 5. 保存聚合去重后的 reviewers 到 tag_reviewers_map.txt
|
||||||
|
{
|
||||||
|
for tag in "${!tag_reviewers_map[@]}"; do
|
||||||
|
echo "$tag|${tag_reviewers_map[$tag]}"
|
||||||
|
done
|
||||||
|
} > tag_reviewers_map.txt
|
||||||
|
|
||||||
|
# 6. 标记是否有 reviewer
|
||||||
if [[ -s unique_reviewers.txt ]]; then
|
if [[ -s unique_reviewers.txt ]]; then
|
||||||
echo "HAS_REVIEWERS=true" >> $GITHUB_OUTPUT
|
echo "HAS_REVIEWERS=true" >> $GITHUB_OUTPUT
|
||||||
else
|
else
|
||||||
echo "HAS_REVIEWERS=false" >> $GITHUB_OUTPUT
|
echo "HAS_REVIEWERS=false" >> $GITHUB_OUTPUT
|
||||||
fi
|
fi
|
||||||
|
|
||||||
echo "=== Matched Paths ==="
|
echo "=== Matched Tags ==="
|
||||||
cat unique_tags.txt
|
cat unique_tags.txt
|
||||||
echo "=== Matched Reviewers ==="
|
echo "=== Matched Reviewers ==="
|
||||||
cat unique_reviewers.txt
|
cat unique_reviewers.txt
|
||||||
|
echo "=== Tag-ChangedFiles Map ==="
|
||||||
|
cat tag_files_map.json
|
||||||
|
|
||||||
- name: Restore Reviewers Cache
|
- name: Restore Reviewers Cache
|
||||||
id: reviewers-cache-restore
|
id: reviewers-cache-restore
|
||||||
if: ${{ steps.changed_files.outputs.COMMENT_TIME != '' }}
|
if: ${{ steps.changed_files.outputs.CACHE_TIMESTAMP != '' }}
|
||||||
uses: actions/cache/restore@v4
|
uses: actions/cache/restore@v4
|
||||||
with:
|
with:
|
||||||
path: |
|
path: |
|
||||||
unique_tags_bak.txt
|
unique_tags_bak.txt
|
||||||
unique_reviewers_bak.txt
|
unique_reviewers_bak.txt
|
||||||
key: ${{ runner.os }}-auto-assign-reviewers-${{ steps.extract-pr.outputs.PR_NUMBER }}-${{ steps.changed_files.outputs.COMMENT_TIME }}
|
key: ${{ runner.os }}-auto-assign-reviewers-${{ steps.extract-pr.outputs.PR_NUMBER }}-${{ steps.changed_files.outputs.CACHE_TIMESTAMP }}-${{ github.run_id }}
|
||||||
|
restore-keys: |
|
||||||
|
${{ runner.os }}-auto-assign-reviewers-${{ steps.extract-pr.outputs.PR_NUMBER }}-${{ steps.changed_files.outputs.CACHE_TIMESTAMP }}-
|
||||||
|
${{ runner.os }}-auto-assign-reviewers-${{ steps.extract-pr.outputs.PR_NUMBER }}-
|
||||||
|
|
||||||
- name: Get approval status
|
- name: Get approval status
|
||||||
id: get_approval
|
id: get_approval
|
||||||
run: |
|
run: |
|
||||||
current_time=$(date -u +"%Y-%m-%d %H:%M UTC")
|
current_time=$(TZ='Asia/Shanghai' date +"%Y-%m-%d %H:%M CST")
|
||||||
|
|
||||||
# 检查 unique_reviewers.txt 是否存在且非空
|
|
||||||
if [[ ! -s unique_reviewers.txt ]]; then
|
if [[ ! -s unique_reviewers.txt ]]; then
|
||||||
echo "No reviewers found, creating empty unique_reviewers.txt"
|
echo "No reviewers found, creating empty unique_reviewers.txt"
|
||||||
touch unique_reviewers.txt
|
touch unique_reviewers.txt
|
||||||
|
@ -183,14 +286,12 @@ jobs:
|
||||||
select($mention | inside($reviewers)) | # 过滤有效审查者
|
select($mention | inside($reviewers)) | # 过滤有效审查者
|
||||||
"\($mention) \(.created_at)" # 输出审查者和时间
|
"\($mention) \(.created_at)" # 输出审查者和时间
|
||||||
' <<< "$comments" >> approval_data.txt
|
' <<< "$comments" >> approval_data.txt
|
||||||
|
|
||||||
notified_users=""
|
notified_users=""
|
||||||
if [[ -f unique_reviewers_bak.txt ]]; then
|
if [[ -f unique_reviewers_bak.txt ]]; then
|
||||||
notified_users=$(cat unique_reviewers_bak.txt | xargs)
|
notified_users=$(cat unique_reviewers_bak.txt | xargs)
|
||||||
else
|
else
|
||||||
notified_users=""
|
notified_users=""
|
||||||
fi
|
fi
|
||||||
|
|
||||||
{
|
{
|
||||||
echo "---"
|
echo "---"
|
||||||
echo "### 📊 Current Review Status (Last Updated: $current_time)"
|
echo "### 📊 Current Review Status (Last Updated: $current_time)"
|
||||||
|
@ -198,24 +299,22 @@ jobs:
|
||||||
formatted_reviewers=""
|
formatted_reviewers=""
|
||||||
for r in $reviewers; do
|
for r in $reviewers; do
|
||||||
if [[ " ${notified_users[@]} " =~ " $reviewer " ]]; then
|
if [[ " ${notified_users[@]} " =~ " $reviewer " ]]; then
|
||||||
formatted_reviewers+="${reviewer#@}"
|
formatted_reviewers+="${reviewer#@}"
|
||||||
else
|
else
|
||||||
formatted_reviewers+="$reviewer"
|
formatted_reviewers+="$reviewer"
|
||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
|
|
||||||
if [[ -n "${approvals[$reviewer]}" ]]; then
|
if [[ -n "${approvals[$reviewer]}" ]]; then
|
||||||
timestamp=$(date -d "${approvals[$reviewer]}" -u +"%Y-%m-%d %H:%M UTC")
|
timestamp=$(TZ='Asia/Shanghai' date -d "${approvals[$reviewer]}" +"%Y-%m-%d %H:%M CST")
|
||||||
echo "- ✅ **$formatted_reviewers** Reviewed On $timestamp"
|
echo "- ✅ **$formatted_reviewers** Reviewed On $timestamp"
|
||||||
else
|
else
|
||||||
echo "- ⌛ **$formatted_reviewers** Pending Review"
|
echo "- ⌛ **$formatted_reviewers** Pending Review"
|
||||||
fi
|
fi
|
||||||
done < unique_reviewers.txt
|
done < unique_reviewers.txt
|
||||||
} > review_status.md
|
} > review_status.md
|
||||||
|
|
||||||
echo "CURRENT_TIME=${current_time}" >> $GITHUB_OUTPUT
|
echo "CURRENT_TIME=${current_time}" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
- name: Generate review data
|
- name: Generate review data (tag merge, no path in comment, changed files summary per tag)
|
||||||
id: generate_review
|
id: generate_review
|
||||||
if: steps.generate_reviewers.outputs.HAS_REVIEWERS == 'true'
|
if: steps.generate_reviewers.outputs.HAS_REVIEWERS == 'true'
|
||||||
run: |
|
run: |
|
||||||
|
@ -227,49 +326,55 @@ jobs:
|
||||||
if [[ -f unique_tags_bak.txt ]]; then
|
if [[ -f unique_tags_bak.txt ]]; then
|
||||||
unique_tags_bak=$(cat unique_tags_bak.txt | xargs)
|
unique_tags_bak=$(cat unique_tags_bak.txt | xargs)
|
||||||
fi
|
fi
|
||||||
|
# 读取 tag->files 映射
|
||||||
existing_tags=""
|
declare -A tag_files_map
|
||||||
for r in $unique_tags; do
|
eval "$(jq -r 'to_entries[] | "tag_files_map[\"\(.key)\"]=\"\(.value | join(";"))\"" ' tag_files_map.json)"
|
||||||
if [[ " ${unique_tags_bak[@]} " =~ " $r " ]]; then
|
# 读取 tag->reviewers(只读聚合去重后的结果)
|
||||||
echo "$r 不存在于数组中"
|
declare -A tag_reviewers_map
|
||||||
else
|
while IFS='|' read -r tag reviewers; do
|
||||||
existing_tags+="$r "
|
tag_reviewers_map["$tag"]="$reviewers"
|
||||||
fi
|
done < tag_reviewers_map.txt
|
||||||
done
|
# 获取已通知的 reviewers
|
||||||
|
notified_users=""
|
||||||
current_time=$(date -u +"%Y-%m-%d %H:%M UTC")
|
if [[ -f unique_reviewers_bak.txt ]]; then
|
||||||
|
notified_users=$(cat unique_reviewers_bak.txt | xargs)
|
||||||
|
fi
|
||||||
|
current_time=$(TZ='Asia/Shanghai' date +"%Y-%m-%d %H:%M CST")
|
||||||
{
|
{
|
||||||
# 生成审查分配信息
|
|
||||||
echo "## 📌 Code Review Assignment"
|
echo "## 📌 Code Review Assignment"
|
||||||
echo ""
|
echo ""
|
||||||
|
for tag in $unique_tags; do
|
||||||
while IFS='|' read -r tag path reviewers; do
|
reviewers="${tag_reviewers_map[$tag]}"
|
||||||
if grep -qE "^$path(/|$)" changed_files.txt; then
|
# 移除尾部空格并提取有效的@username格式
|
||||||
echo "### 🏷️ Tag: $tag"
|
reviewers=$(echo "$reviewers" | sed 's/[[:space:]]*$//' | grep -o '@[A-Za-z0-9_-]\+' | sort -u | tr '\n' ' ')
|
||||||
echo "**Path:** \`$path\` "
|
|
||||||
|
# 格式化reviewers显示(仅对已通知用户去掉@)
|
||||||
if [[ " ${existing_tags[@]} " =~ " $tag " ]]; then
|
formatted_reviewers=""
|
||||||
echo "**Reviewers:** $reviewers "
|
for reviewer in $reviewers; do
|
||||||
|
if [[ " ${notified_users[@]} " =~ " $reviewer " ]]; then
|
||||||
|
formatted_reviewers+="${reviewer#@} " # 已通知用户去掉@
|
||||||
else
|
else
|
||||||
formatted_reviewers=""
|
formatted_reviewers+="$reviewer " # 未通知用户保留@
|
||||||
for r in $reviewers; do
|
|
||||||
formatted_reviewers+="${r#@} "
|
|
||||||
done
|
|
||||||
echo "**Reviewers:** $formatted_reviewers "
|
|
||||||
fi
|
fi
|
||||||
|
done
|
||||||
echo "<details>"
|
|
||||||
echo "<summary><b>Changed Files</b> (Click to expand)</summary>"
|
echo "### 🏷️ Tag: $tag"
|
||||||
echo ""
|
echo ""
|
||||||
grep -E "^$path(/|$)" changed_files.txt | sed 's/^/- /' # 列出匹配的变更文件
|
echo "**Reviewers:** $formatted_reviewers" # 确保显示Reviewers
|
||||||
echo ""
|
echo "<details>"
|
||||||
echo "</details>"
|
echo "<summary><b>Changed Files</b> (Click to expand)</summary>"
|
||||||
echo ""
|
echo ""
|
||||||
fi
|
IFS=';' read -ra files <<< "${tag_files_map[$tag]}"
|
||||||
done < tag_data.csv
|
for file in "${files[@]}"; do
|
||||||
|
[[ -z "$file" ]] && continue
|
||||||
|
echo "- $file"
|
||||||
|
done
|
||||||
|
echo ""
|
||||||
|
echo "</details>"
|
||||||
|
echo ""
|
||||||
|
done
|
||||||
# 插入审查状态
|
# 插入审查状态
|
||||||
cat review_status.md
|
cat review_status.md
|
||||||
|
|
||||||
echo "---"
|
echo "---"
|
||||||
echo "### 📝 Review Instructions"
|
echo "### 📝 Review Instructions"
|
||||||
echo ""
|
echo ""
|
||||||
|
@ -318,20 +423,42 @@ jobs:
|
||||||
if: steps.generate_reviewers.outputs.HAS_REVIEWERS == 'true'
|
if: steps.generate_reviewers.outputs.HAS_REVIEWERS == 'true'
|
||||||
run: |
|
run: |
|
||||||
existing_comment=$(curl -s \
|
existing_comment=$(curl -s \
|
||||||
"https://api.github.com/repos/${{ github.repository }}/issues/${{ steps.extract-pr.outputs.PR_NUMBER }}/comments" | \
|
-H "Authorization: Bearer ${{ secrets.GITHUB_TOKEN }}" \
|
||||||
jq -r '.[] | select(.user.login == "github-actions[bot]") | {body: .body} | @base64')
|
"https://api.github.com/repos/${{ github.repository }}/issues/${{ steps.extract-pr.outputs.PR_NUMBER }}/comments")
|
||||||
|
|
||||||
|
# Check if response is valid JSON
|
||||||
|
if jq -e . >/dev/null 2>&1 <<<"$existing_comment"; then
|
||||||
|
existing_comment=$(jq -r '.[] | select(.user.login == "github-actions[bot]") | {body: .body} | @base64' <<< "$existing_comment")
|
||||||
|
else
|
||||||
|
existing_comment=""
|
||||||
|
echo "Warning: Invalid JSON response from GitHub API"
|
||||||
|
echo "Response: $existing_comment"
|
||||||
|
fi
|
||||||
comment_body="${{ steps.get_approval.outputs.CURRENT_TIME }}"
|
comment_body="${{ steps.get_approval.outputs.CURRENT_TIME }}"
|
||||||
comment_time=$(date -d "$comment_body" +%s)
|
comment_time=$(TZ='Asia/Shanghai' date -d "$comment_body" +%s)
|
||||||
echo "CURRENT_TIME=${comment_time}" >> $GITHUB_OUTPUT
|
echo "CACHE_TIMESTAMP=${comment_time}" >> $GITHUB_OUTPUT # 统一使用这个变量名
|
||||||
cp unique_reviewers.txt unique_reviewers_bak.txt
|
echo "Debug - Saving cache with timestamp: $comment_time"
|
||||||
cp unique_tags.txt unique_tags_bak.txt
|
|
||||||
|
mkdir -p $(dirname unique_reviewers_bak.txt)
|
||||||
|
if [[ -s unique_reviewers.txt ]]; then
|
||||||
|
cp unique_reviewers.txt unique_reviewers_bak.txt
|
||||||
|
else
|
||||||
|
touch unique_reviewers_bak.txt
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ -s unique_tags.txt ]]; then
|
||||||
|
cp unique_tags.txt unique_tags_bak.txt
|
||||||
|
else
|
||||||
|
touch unique_tags_bak.txt
|
||||||
|
fi
|
||||||
|
|
||||||
- name: Save Reviewers Cache
|
- name: Save Reviewers Cache
|
||||||
id: reviewers-cache-save
|
id: reviewers-cache-save
|
||||||
if: steps.generate_reviewers.outputs.HAS_REVIEWERS == 'true'
|
if: steps.generate_reviewers.outputs.HAS_REVIEWERS == 'true'
|
||||||
|
continue-on-error: true
|
||||||
uses: actions/cache/save@v4
|
uses: actions/cache/save@v4
|
||||||
with:
|
with:
|
||||||
path: |
|
path: |
|
||||||
unique_tags_bak.txt
|
unique_tags_bak.txt
|
||||||
unique_reviewers_bak.txt
|
unique_reviewers_bak.txt
|
||||||
key: ${{ runner.os }}-auto-assign-reviewers-${{ steps.extract-pr.outputs.PR_NUMBER }}-${{ steps.get_comment_time.outputs.CURRENT_TIME }}
|
key: ${{ runner.os }}-auto-assign-reviewers-${{ steps.extract-pr.outputs.PR_NUMBER }}-${{ steps.get_comment_time.outputs.CACHE_TIMESTAMP }}-${{ github.run_id }}
|
|
@ -20,16 +20,28 @@ on:
|
||||||
branches:
|
branches:
|
||||||
- master
|
- master
|
||||||
paths-ignore:
|
paths-ignore:
|
||||||
|
- .github/**
|
||||||
|
- .gitee/**
|
||||||
|
- .hook/**
|
||||||
- documentation/**
|
- documentation/**
|
||||||
|
- examples/**
|
||||||
- '**/README.md'
|
- '**/README.md'
|
||||||
- '**/README_zh.md'
|
- '**/README_zh.md'
|
||||||
|
- ChangeLog.md
|
||||||
|
- MAINTAINERS
|
||||||
pull_request:
|
pull_request:
|
||||||
branches:
|
branches:
|
||||||
- master
|
- master
|
||||||
paths-ignore:
|
paths-ignore:
|
||||||
|
- .github/**
|
||||||
|
- .gitee/**
|
||||||
|
- .hook/**
|
||||||
- documentation/**
|
- documentation/**
|
||||||
|
- examples/**
|
||||||
- '**/README.md'
|
- '**/README.md'
|
||||||
- '**/README_zh.md'
|
- '**/README_zh.md'
|
||||||
|
- ChangeLog.md
|
||||||
|
- MAINTAINERS
|
||||||
repository_dispatch:
|
repository_dispatch:
|
||||||
types:
|
types:
|
||||||
- online-pkgs-static-building-trigger-event
|
- online-pkgs-static-building-trigger-event
|
||||||
|
|
Loading…
Reference in New Issue