@@ -18,61 +18,85 @@ jobs:
1818 env :
1919 GITHUB_TOKEN : ${{ secrets.GITHUB_TOKEN }} # Required for GitHub API authentication
2020 run : |
21- # Fetch commits for the current Pull Request using the GitHub API
22- # The event payload contains all necessary information about the PR.
23- # We paginate to ensure all commits are fetched if there are many.
2421 PR_COMMITS_JSON=$(curl -sS -H "Accept: application/vnd.github.v3+json" \
2522 -H "Authorization: token $GITHUB_TOKEN" \
26- "${{ github.event.pull_request.commits_url }}?per_page=100") # Fetch commits from the PR's commits_url
23+ "${{ github.event.pull_request.commits_url }}?per_page=100")
2724
28- # Robustly check for an error message: only attempt to read '.message' if the response is an object.
2925 ERROR_MESSAGE=$(echo "$PR_COMMITS_JSON" | jq -r 'if type == "object" and .message then .message else null end')
3026 if [ "$ERROR_MESSAGE" != "null" ] && [ -n "$ERROR_MESSAGE" ]; then
3127 echo "ERROR: Failed to retrieve PR commits from GitHub API: $ERROR_MESSAGE"
3228 echo "API Response snippet: $(echo "$PR_COMMITS_JSON" | head -n 5)"
33- exit 1 # Fail the action if fetching PR commits fails
29+ exit 1
3430 fi
3531
36- # Extract SHAs from the JSON response and join them with spaces.
3732 COMMITS=$(echo "$PR_COMMITS_JSON" | jq -r 'map(.sha) | join(" ")')
3833
3934 if [ -z "$COMMITS" ]; then
4035 echo "No commits found in this Pull Request via GitHub API. This might indicate an issue with the PR or API."
41- exit 1 # Fail if no PR commits can be retrieved (after confirming no API error message was returned)
36+ exit 1
4237 fi
4338
44- # Set the output variable `PR_COMMITS` with the space-separated list of SHAs.
4539 echo "PR_COMMITS=${COMMITS}" >> "$GITHUB_OUTPUT"
4640
4741 - name : Check each PR commit for Linux upstream hash and related Fixes tag # Step 2: Loop through each PR commit and perform checks
4842 id : check_results # Assign an ID to this step to capture its output
4943 env :
50- # GITHUB_TOKEN is a secret token provided by GitHub Actions for authentication
51- # to the GitHub API. It has read/write permissions for the current repository.
5244 GITHUB_TOKEN : ${{ secrets.GITHUB_TOKEN }}
53- # Define the owner and name of the upstream Linux kernel repository mirror on GitHub.
5445 LINUX_KERNEL_REPO_OWNER : torvalds
5546 LINUX_KERNEL_REPO_NAME : linux
47+ # Safeguard: Max pages to fetch. Adjust if your referenced commits are extremely old
48+ # and their fixes are deep in history, but be mindful of API rate limits and job duration.
49+ MAX_PAGES_TO_FETCH : 100 # 100 pages * 100 commits/page = 10,000 commits max
5650 run : |
57- # Initialize a variable to accumulate all output for the PR comment.
51+ # Use printf -v for robust multi-line string accumulation for the PR comment
5852 PR_COMMENT_BODY_ACCUMULATOR=""
5953
60- # Loop through each commit SHA identified in the previous step
54+ # Get the HEAD commit SHA and its date for the Linux kernel master branch
55+ printf -v PR_COMMENT_BODY_ACCUMULATOR "%sGetting current HEAD of '%s/%s' master branch...\n" \
56+ "$PR_COMMENT_BODY_ACCUMULATOR" "$LINUX_KERNEL_REPO_OWNER" "$LINUX_KERNEL_REPO_NAME"
57+
58+ HEAD_COMMIT_DETAILS_JSON=$(curl -sS -H "Accept: application/vnd.github.v3+json" \
59+ -H "Authorization: token $GITHUB_TOKEN" \
60+ "https://api.github.com/repos/${LINUX_KERNEL_REPO_OWNER}/${LINUX_KERNEL_REPO_NAME}/commits/master")
61+
62+ ERROR_MESSAGE=$(echo "$HEAD_COMMIT_DETAILS_JSON" | jq -r 'if type == "object" and .message then .message else null end')
63+ if [ "$ERROR_MESSAGE" != "null" ] && [ -n "$ERROR_MESSAGE" ]; then
64+ printf -v PR_COMMENT_BODY_ACCUMULATOR "%s ERROR: Could not retrieve HEAD commit for Linux kernel master: %s\n" \
65+ "$PR_COMMENT_BODY_ACCUMULATOR" "$ERROR_MESSAGE"
66+ printf -v PR_COMMENT_BODY_ACCUMULATOR "%s API Response snippet: %s\n" \
67+ "$PR_COMMENT_BODY_ACCUMULATOR" "$(echo "$HEAD_COMMIT_DETAILS_JSON" | head -n 5)"
68+ echo "PR_COMMENT_BODY<<EOF" >> "$GITHUB_OUTPUT"
69+ echo "$PR_COMMENT_BODY_ACCUMULATOR" >> "$GITHUB_OUTPUT"
70+ echo "EOF" >> "$GITHUB_OUTPUT"
71+ exit 1
72+ fi
73+
74+ HEAD_COMMIT_SHA=$(echo "$HEAD_COMMIT_DETAILS_JSON" | jq -r '.sha')
75+ HEAD_COMMIT_DATE=$(echo "$HEAD_COMMIT_DETAILS_JSON" | jq -r '.commit.committer.date')
76+ printf -v PR_COMMENT_BODY_ACCUMULATOR "%s Linux kernel master HEAD: \`%s\` (as of %s)\n\n" \
77+ "$PR_COMMENT_BODY_ACCUMULATOR" "$HEAD_COMMIT_SHA" "$HEAD_COMMIT_DATE"
78+
79+
80+ # Loop through each commit SHA identified in the current PR
6181 for PR_COMMIT_SHA in ${{ steps.pr_commits.outputs.PR_COMMITS }}; do
62- PR_COMMENT_BODY_ACCUMULATOR+="--- Checking PR commit: \`$PR_COMMIT_SHA\` ---\n"
82+ printf -v PR_COMMENT_BODY_ACCUMULATOR "%s--- Checking PR commit: \`%s\` ---\n" \
83+ "$PR_COMMENT_BODY_ACCUMULATOR" "$PR_COMMIT_SHA"
6384
6485 # --- Fetch the full commit message of the PR commit via GitHub API ---
65- # Using the /repos/{owner}/{repo}/commits/{ref} endpoint for the current repository
6686 PR_COMMIT_DETAILS_JSON=$(curl -sS -H "Accept: application/vnd.github.v3+json" \
6787 -H "Authorization: token $GITHUB_TOKEN" \
6888 "https://api.github.com/repos/${{ github.repository_owner }}/${{ github.event.repository.name }}/commits/${PR_COMMIT_SHA}")
6989
70- # Robustly check for an error message.
7190 ERROR_MESSAGE=$(echo "$PR_COMMIT_DETAILS_JSON" | jq -r 'if type == "object" and .message then .message else null end')
7291 if [ "$ERROR_MESSAGE" != "null" ] && [ -n "$ERROR_MESSAGE" ]; then
73- PR_COMMENT_BODY_ACCUMULATOR+=" ERROR: Could not retrieve commit message for PR commit \`$PR_COMMIT_SHA\` from GitHub API: $ERROR_MESSAGE\n"
74- PR_COMMENT_BODY_ACCUMULATOR+=" API Response snippet: $(echo "$PR_COMMIT_DETAILS_JSON" | head -n 5)\n"
75- exit 1 # Fail if PR commit message cannot be retrieved
92+ printf -v PR_COMMENT_BODY_ACCUMULATOR "%s ERROR: Could not retrieve commit message for PR commit \`%s\` from GitHub API: %s\n" \
93+ "$PR_COMMENT_BODY_ACCUMULATOR" "$PR_COMMIT_SHA" "$ERROR_MESSAGE"
94+ printf -v PR_COMMENT_BODY_ACCUMULATOR "%s API Response snippet: %s\n" \
95+ "$PR_COMMENT_BODY_ACCUMULATOR" "$(echo "$PR_COMMIT_DETAILS_JSON" | head -n 5)"
96+ echo "PR_COMMENT_BODY<<EOF" >> "$GITHUB_OUTPUT"
97+ echo "$PR_COMMENT_BODY_ACCUMULATOR" >> "$GITHUB_OUTPUT"
98+ echo "EOF" >> "$GITHUB_OUTPUT"
99+ exit 1
76100 fi
77101
78102 PR_COMMIT_MESSAGE=$(echo "$PR_COMMIT_DETAILS_JSON" | jq -r '.commit.message')
@@ -81,82 +105,122 @@ jobs:
81105 UPSTREAM_LINUX_HASH=$(echo "$PR_COMMIT_MESSAGE" | grep -Eo "^commit [0-9a-f]{40}$" | awk '{print $2}')
82106
83107 if [ -z "$UPSTREAM_LINUX_HASH" ]; then
84- PR_COMMENT_BODY_ACCUMULATOR+=" No \`commit <upstream_linux_commit_hash>\` line found in PR commit \`$PR_COMMIT_SHA\`. Skipping upstream check for this commit.\n"
85- PR_COMMENT_BODY_ACCUMULATOR+="\n"
108+ printf -v PR_COMMENT_BODY_ACCUMULATOR "%s No \`commit <upstream_linux_commit_hash>\` line found in PR commit \`%s\`. Skipping upstream check for this commit.\n" \
109+ "$PR_COMMENT_BODY_ACCUMULATOR" "$PR_COMMIT_SHA"
110+ printf -v PR_COMMENT_BODY_ACCUMULATOR "%s\n" "$PR_COMMENT_BODY_ACCUMULATOR"
86111 continue
87112 fi
88113
89- PR_COMMENT_BODY_ACCUMULATOR+=" Found upstream Linux hash to check for fixes: \`$UPSTREAM_LINUX_HASH\`\n"
114+ printf -v PR_COMMENT_BODY_ACCUMULATOR "%s Found upstream Linux hash to check for fixes: \`%s\`\n" \
115+ "$PR_COMMENT_BODY_ACCUMULATOR" "$UPSTREAM_LINUX_HASH"
116+
117+ # --- Fetch the commit date of the UPSTREAM_LINUX_HASH from the Linux kernel repo ---
118+ printf -v PR_COMMENT_BODY_ACCUMULATOR "%s Fetching details for upstream Linux commit \`%s\` to get its date...\n" \
119+ "$PR_COMMENT_BODY_ACCUMULATOR" "$UPSTREAM_LINUX_HASH"
90120
91- # --- SEARCH for "Fixes:" tag in upstream Linux kernel using GitHub Search API ---
92- # Extract the first 12 characters for the short SHA, commonly used in Fixes: tags in Linux kernel.
93- UPSTREAM_LINUX_HASH_SHORT=$(echo "$UPSTREAM_LINUX_HASH" | cut -c 1-12 )
121+ UPSTREAM_COMMIT_DETAILS_JSON=$(curl -sS -H "Accept: application/vnd.github.v3+json" \
122+ -H "Authorization: token $GITHUB_TOKEN" \
123+ "https://api.github.com/repos/${LINUX_KERNEL_REPO_OWNER}/${LINUX_KERNEL_REPO_NAME}/commits/${UPSTREAM_LINUX_HASH}" )
94124
95- PR_COMMENT_BODY_ACCUMULATOR+=" Searching for upstream commits in '${LINUX_KERNEL_REPO_OWNER}/${LINUX_KERNEL_REPO_NAME}' with pattern: 'Fixes: ${UPSTREAM_LINUX_HASH_SHORT}' using Search API...\n"
125+ ERROR_MESSAGE=$(echo "$UPSTREAM_COMMIT_DETAILS_JSON" | jq -r 'if type == "object" and .message then .message else null end')
126+ if [ "$ERROR_MESSAGE" != "null" ] && [ -n "$ERROR_MESSAGE" ]; then
127+ printf -v PR_COMMENT_BODY_ACCUMULATOR "%s ERROR: Could not retrieve details or commit date for upstream hash \`%s\` from Linux kernel repo: %s\n" \
128+ "$PR_COMMENT_BODY_ACCUMULATOR" "$UPSTREAM_LINUX_HASH" "$ERROR_MESSAGE"
129+ printf -v PR_COMMENT_BODY_ACCUMULATOR "%s API Response snippet: %s\n" \
130+ "$PR_COMMENT_BODY_ACCUMULATOR" "$(echo "$UPSTREAM_COMMIT_DETAILS_JSON" | head -n 5)"
131+ echo "PR_COMMENT_BODY<<EOF" >> "$GITHUB_OUTPUT"
132+ echo "$PR_COMMENT_BODY_ACCUMULATOR" >> "$GITHUB_OUTPUT"
133+ echo "EOF" >> "$GITHUB_OUTPUT"
134+ exit 1
135+ fi
136+ UPSTREAM_COMMIT_DATE=$(echo "$UPSTREAM_COMMIT_DETAILS_JSON" | jq -r '.commit.committer.date')
137+
138+ if [ "$UPSTREAM_COMMIT_DATE" == "null" ] || [ -z "$UPSTREAM_COMMIT_DATE" ]; then
139+ printf -v PR_COMMENT_BODY_ACCUMULATOR "%s ERROR: Upstream commit \`%s\` found, but its date could not be extracted.\n" \
140+ "$PR_COMMENT_BODY_ACCUMULATOR" "$UPSTREAM_LINUX_HASH"
141+ printf -v PR_COMMENT_BODY_ACCUMULATOR "%s API Response snippet: %s\n" \
142+ "$PR_COMMENT_BODY_ACCUMULATOR" "$(echo "$UPSTREAM_COMMIT_DETAILS_JSON" | head -n 5)"
143+ echo "PR_COMMENT_BODY<<EOF" >> "$GITHUB_OUTPUT"
144+ echo "$PR_COMMENT_BODY_ACCUMULATOR" >> "$GITHUB_OUTPUT"
145+ echo "EOF" >> "$GITHUB_OUTPUT"
146+ exit 1
147+ fi
148+ printf -v PR_COMMENT_BODY_ACCUMULATOR "%s Upstream commit \`%s\` date: %s\n" \
149+ "$PR_COMMENT_BODY_ACCUMULATOR" "$UPSTREAM_LINUX_HASH" "$UPSTREAM_COMMIT_DATE"
150+
151+ printf -v PR_COMMENT_BODY_ACCUMULATOR "%s Searching for upstream commits on 'master' branch between \`%s\` and \`%s\` that fix \`%s\`...\n" \
152+ "$PR_COMMENT_BODY_ACCUMULATOR" "$UPSTREAM_COMMIT_DATE" "$HEAD_COMMIT_DATE" "$UPSTREAM_LINUX_HASH"
96153
97154 FOUND_FIXING_COMMITS=()
98- SEARCH_PAGE=1
99- MAX_SEARCH_PAGES=3 # Limit the search to prevent excessive API calls (up to 300 commits)
100- ALL_SEARCH_RESULTS_JSON="[]" # Initialize as an empty JSON array
101-
102- while [ "$SEARCH_PAGE" -le "$MAX_SEARCH_PAGES" ]; do
103- PR_COMMENT_BODY_ACCUMULATOR+=" Fetching search results (page $SEARCH_PAGE)...\n"
104- # Construct the search query using the short SHA for better search relevance
105- SEARCH_QUERY="Fixes: ${UPSTREAM_LINUX_HASH_SHORT} repo:${LINUX_KERNEL_REPO_OWNER}/${LINUX_KERNEL_REPO_NAME}"
106- # URL-encode the query string for safe API calls
107- ENCODED_SEARCH_QUERY=$(echo -n "$SEARCH_QUERY" | jq -sRr @uri)
108-
109- CURRENT_PAGE_RESULTS_JSON=$(curl -sS -H "Accept: application/vnd.github.v3+json" \
155+ PAGE=1
156+
157+ # Loop to fetch all relevant commits between the target commit's date and HEAD's date
158+ while [ "$PAGE" -le "$MAX_PAGES_TO_FETCH" ]; do # Safeguard against infinite loops for extremely old commits
159+ printf -v PR_COMMENT_BODY_ACCUMULATOR "%s Fetching upstream Linux commits (page %d)...\n" \
160+ "$PR_COMMENT_BODY_ACCUMULATOR" "$PAGE"
161+
162+ CURRENT_PAGE_COMMITS_JSON=$(curl -sS -H "Accept: application/vnd.github.v3+json" \
110163 -H "Authorization: token $GITHUB_TOKEN" \
111- "https://api.github.com/search/ commits?q=${ENCODED_SEARCH_QUERY}& per_page=100&page=$SEARCH_PAGE ")
164+ "https://api.github.com/repos/${LINUX_KERNEL_REPO_OWNER}/${LINUX_KERNEL_REPO_NAME}/ commits?sha=master&since=${UPSTREAM_COMMIT_DATE}&until=${HEAD_COMMIT_DATE}& per_page=100&page=$PAGE ")
112165
113- # Robustly check for an API error message from the search endpoint.
114- API_ERROR=$(echo "$CURRENT_PAGE_RESULTS_JSON" | jq -r 'if type == "object" and .message then .message else null end')
166+ API_ERROR=$(echo "$CURRENT_PAGE_COMMITS_JSON" | jq -r 'if type == "object" and .message then .message else null end')
115167 if [ "$API_ERROR" != "null" ] && [ -n "$API_ERROR" ]; then
116- PR_COMMENT_BODY_ACCUMULATOR+=" ERROR: Failed to retrieve search results (page $SEARCH_PAGE) from GitHub Search API: $API_ERROR\n"
117- PR_COMMENT_BODY_ACCUMULATOR+=" API Response snippet: $(echo "$CURRENT_PAGE_RESULTS_JSON" | head -n 5)\n"
168+ printf -v PR_COMMENT_BODY_ACCUMULATOR "%s ERROR: Failed to retrieve subsequent upstream commits (page %d): %s\n" \
169+ "$PR_COMMENT_BODY_ACCUMULATOR" "$PAGE" "$API_ERROR"
170+ printf -v PR_COMMENT_BODY_ACCUMULATOR "%s API Response snippet: %s\n" \
171+ "$PR_COMMENT_BODY_ACCUMULATOR" "$(echo "$CURRENT_PAGE_COMMITS_JSON" | head -n 5)"
172+ echo "PR_COMMENT_BODY<<EOF" >> "$GITHUB_OUTPUT"
173+ echo "$PR_COMMENT_BODY_ACCUMULATOR" >> "$GITHUB_OUTPUT"
174+ echo "EOF" >> "$GITHUB_OUTPUT"
118175 exit 1
119176 fi
120177
121- # Check if there are any items on this page (search results are in the 'items' array )
122- CURRENT_PAGE_ITEMS=$(echo "$CURRENT_PAGE_RESULTS_JSON" | jq '.items | length')
123- if [ "$CURRENT_PAGE_ITEMS" -eq 0 ]; then
124- PR_COMMENT_BODY_ACCUMULATOR+=" No more search results found on this page. Stopping search.\n "
125- break # No more commits to process
178+ CURRENT_PAGE_COUNT=$(echo "$CURRENT_PAGE_COMMITS_JSON" | jq 'length' )
179+ if [ "$CURRENT_PAGE_COUNT" -eq 0 ]; then
180+ printf -v PR_COMMENT_BODY_ACCUMULATOR "%s No more subsequent upstream commits found within the date range. Stopping search.\n" \
181+ "$PR_COMMENT_BODY_ACCUMULATOR "
182+ break
126183 fi
127184
128- # Append current page's items to the accumulated results
129- ALL_SEARCH_RESULTS_JSON=$(echo "$ALL_SEARCH_RESULTS_JSON" "$CURRENT_PAGE_RESULTS_JSON" | jq -s '.[0] + .[1].items')
130-
131- SEARCH_PAGE=$((SEARCH_PAGE + 1))
132- done # End of while loop for search pages
133-
134- # Process all accumulated search results with a single jq command for filtering
135- # This filters for the exact "Fixes: <FULL_SHA>" and extracts relevant info.
136- FILTERED_COMMITS=$(echo "$ALL_SEARCH_RESULTS_JSON" | jq -r --arg full_hash "$UPSTREAM_LINUX_HASH" '
137- .[] |
138- select(.commit.message | test("Fixes: [0-9a-fA-F]*\\Q" + $full_hash + "\\E.*"; "i")) | # "i" for case-insensitive
139- "\(.sha):\(.commit.message | split("\n")[] | select(test("Fixes: [0-9a-fA-F]*\\Q" + $full_hash + "\\E.*"; "i")))"
140- ')
141-
142- if [ -n "$FILTERED_COMMITS" ]; then
143- IFS=$'\n' read -r -d '' -a FOUND_FIXING_COMMITS <<< "$FILTERED_COMMITS"
144- else
145- FOUND_FIXING_COMMITS=() # Ensure it's an empty array if no matches
146- fi
185+ # Iterate through each commit object on the current page, extracting SHA and message directly.
186+ echo "$CURRENT_PAGE_COMMITS_JSON" | jq -c '.[]' | while read -r commit_json_line; do
187+ COMMIT_SHA=$(echo "$commit_json_line" | jq -r '.sha')
188+ COMMIT_MESSAGE=$(echo "$commit_json_line" | jq -r '.commit.message')
189+
190+ # Skip the commit itself if it appears in the date range
191+ if [ "$COMMIT_SHA" = "$UPSTREAM_LINUX_HASH" ]; then
192+ printf -v PR_COMMENT_BODY_ACCUMULATOR "%s Skipping self-reference check for commit \`%s\`.\n" \
193+ "$PR_COMMENT_BODY_ACCUMULATOR" "$COMMIT_SHA"
194+ continue
195+ fi
196+
197+ # Check if this upstream commit's message contains the "Fixes:" tag for our hash.
198+ # Use the full SHA for matching, as it's definitive.
199+ if echo "$COMMIT_MESSAGE" | grep -Eq "^Fixes: [0-9a-fA-F]*${UPSTREAM_LINUX_HASH}.*"; then
200+ # Extract the specific "Fixes:" line for display
201+ FIX_MESSAGE_SNIPPET=$(echo "$COMMIT_MESSAGE" | grep -Ei "^Fixes:.*${UPSTREAM_LINUX_HASH}.*")
202+ FOUND_FIXING_COMMITS+=("$COMMIT_SHA:$FIX_MESSAGE_SNIPPET")
203+ fi
204+ done # End of while read loop
205+
206+ PAGE=$((PAGE + 1))
207+ done # End of while true loop for pages
147208
148209 if [ ${#FOUND_FIXING_COMMITS[@]} -gt 0 ]; then
149- PR_COMMENT_BODY_ACCUMULATOR+=" SUCCESS: Found ${#FOUND_FIXING_COMMITS[@]} upstream Linux commit(s) that fix \`$UPSTREAM_LINUX_HASH\`:\n"
210+ printf -v PR_COMMENT_BODY_ACCUMULATOR "%s SUCCESS: Found %d upstream Linux commit(s) that fix \`%s\`:\n" \
211+ "$PR_COMMENT_BODY_ACCUMULATOR" ${#FOUND_FIXING_COMMITS[@]} "$UPSTREAM_LINUX_HASH"
150212 for FIX_COMMIT_ENTRY in "${FOUND_FIXING_COMMITS[@]}"; do
151213 # Split SHA and message part for display
152214 FIX_COMMIT_SHA=$(echo "$FIX_COMMIT_ENTRY" | cut -d ':' -f 1)
153- FIX_MESSAGE_SNIPPET=$(echo "$FIX_COMMIT_ENTRY" | cut -d ':' -f 2-)
154- PR_COMMENT_BODY_ACCUMULATOR+=" - \`$FIX_COMMIT_SHA\` (Fixes tag: \`$FIX_MESSAGE_SNIPPET\`)\n"
215+ FIX_MESSAGE_SNIPPET=$(echo "$FIX_COMMIT_ENTRY" | cut -d ':' -f 2-) # Get everything after the first colon
216+ printf -v PR_COMMENT_BODY_ACCUMULATOR "%s - \`%s\` (Fixes tag: \`%s\`)\n" \
217+ "$PR_COMMENT_BODY_ACCUMULATOR" "$FIX_COMMIT_SHA" "$FIX_MESSAGE_SNIPPET"
155218 done
156219 else
157- PR_COMMENT_BODY_ACCUMULATOR+=" No upstream fixes found for \`$UPSTREAM_LINUX_HASH\`.\n"
220+ printf -v PR_COMMENT_BODY_ACCUMULATOR "%s No upstream fixes found for \`%s\`.\n" \
221+ "$PR_COMMENT_BODY_ACCUMULATOR" "$UPSTREAM_LINUX_HASH"
158222 fi
159- PR_COMMENT_BODY_ACCUMULATOR+="\n "
223+ printf -v PR_COMMENT_BODY_ACCUMULATOR "%s\n" "$PR_COMMENT_BODY_ACCUMULATOR "
160224 done # End of for PR_COMMIT_SHA loop
161225
162226 # Set the output variable `PR_COMMENT_BODY` using EOF to preserve newlines for the PR comment.
0 commit comments