Skip to content

Commit ce1bae9

Browse files
committed
Automatically send the RC vote email
1 parent 35b1d9f commit ce1bae9

File tree

3 files changed

+93
-1
lines changed

3 files changed

+93
-1
lines changed

dev/create-release/release-build.sh

Lines changed: 91 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -538,6 +538,97 @@ if [[ "$1" == "publish-release" ]]; then
538538
-H "Content-Type:application/xml" -v \
539539
$NEXUS_ROOT/profiles/$NEXUS_PROFILE/finish)
540540
echo "Closed Nexus staging repository: $staged_repo_id"
541+
542+
echo "Sending the RC vote email"
543+
EMAIL_TO="dev@spark.apache.org"
544+
EMAIL_SUBJECT="[VOTE] Release Spark ${SPARK_VERSION} (RC${SPARK_RC_COUNT})"
545+
546+
# Calculate deadline in Pacific Time (PST/PDT)
547+
DEADLINE=$(TZ=America/Los_Angeles date -d "+4 days" "+%a, %d %b %Y %H:%M:%S %Z")
548+
549+
JIRA_API_URL="https://issues.apache.org/jira/rest/api/2/project/SPARK/versions"
550+
JIRA_VERSION_ID=$(curl -s "$JIRA_API_URL" | \
551+
# Split JSON objects by replacing '},{' with a newline-separated pattern
552+
tr '}' '\n' | \
553+
# Find the block containing the exact version name
554+
grep -F "\"name\":\"$SPARK_VERSION\"" -A 5 | \
555+
# Extract the line with "id"
556+
grep '"id"' | \
557+
# Extract the numeric id value (assuming "id":"123456")
558+
sed -E 's/.*"id":"?([0-9]+)"?.*/\1/' | \
559+
head -1)
560+
561+
# Configure msmtp
562+
cat > ~/.msmtprc <<EOF
563+
defaults
564+
auth on
565+
tls on
566+
tls_trust_file /etc/ssl/certs/ca-certificates.crt
567+
logfile ~/.msmtp.log
568+
569+
account apache
570+
host mail-relay.apache.org
571+
port 587
572+
from $ASF_USERNAME@apache.org
573+
user $ASF_USERNAME
574+
password $ASF_PASSWORD
575+
576+
account default : apache
577+
EOF
578+
579+
chmod 600 ~/.msmtprc
580+
581+
# Compose and send the email
582+
{
583+
echo "From: $ASF_USERNAME@apache.org"
584+
echo "To: $EMAIL_TO"
585+
echo "Subject: $EMAIL_SUBJECT"
586+
echo
587+
echo "Please vote on releasing the following candidate as Apache Spark version ${SPARK_VERSION}."
588+
echo
589+
echo "The vote is open until ${DEADLINE} and passes if a majority +1 PMC votes are cast, with"
590+
echo "a minimum of 3 +1 votes."
591+
echo
592+
echo "[ ] +1 Release this package as Apache Spark ${SPARK_VERSION}"
593+
echo "[ ] -1 Do not release this package because ..."
594+
echo
595+
echo "To learn more about Apache Spark, please see https://spark.apache.org/"
596+
echo
597+
echo "The tag to be voted on is ${GIT_REF} (commit ${git_hash}):"
598+
echo "https://github.com/apache/spark/tree/${GIT_REF}"
599+
echo
600+
echo "The release files, including signatures, digests, etc. can be found at:"
601+
echo "https://dist.apache.org/repos/dist/dev/spark/${GIT_REF}-bin/"
602+
echo
603+
echo "Signatures used for Spark RCs can be found in this file:"
604+
echo "https://dist.apache.org/repos/dist/dev/spark/KEYS"
605+
echo
606+
echo "The staging repository for this release can be found at:"
607+
echo "https://repository.apache.org/content/repositories/${staged_repo_id}/"
608+
echo
609+
echo "The documentation corresponding to this release can be found at:"
610+
echo "https://dist.apache.org/repos/dist/dev/spark/${GIT_REF}-docs/"
611+
echo
612+
echo "The list of bug fixes going into ${SPARK_VERSION} can be found at the following URL:"
613+
echo "https://issues.apache.org/jira/projects/SPARK/versions/${JIRA_VERSION_ID}"
614+
echo
615+
echo "FAQ"
616+
echo
617+
echo "========================="
618+
echo "How can I help test this release?"
619+
echo "========================="
620+
echo
621+
echo "If you are a Spark user, you can help us test this release by taking"
622+
echo "an existing Spark workload and running on this release candidate, then"
623+
echo "reporting any regressions."
624+
echo
625+
echo "If you're working in PySpark you can set up a virtual env and install"
626+
echo "the current RC via \"pip install https://dist.apache.org/repos/dist/dev/spark/${GIT_REF}-bin/pyspark-${SPARK_VERSION}.tar.gz\""
627+
echo "and see if anything important breaks."
628+
echo "In the Java/Scala, you can add the staging repository to your project's resolvers and test"
629+
echo "with the RC (make sure to clean up the artifact cache before/after so"
630+
echo "you don't end up building with an out of date RC going forward)."
631+
} | msmtp -t
541632
fi
542633

543634
popd

dev/create-release/release-util.sh

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -141,6 +141,7 @@ function get_release_info {
141141
if [ -n "$SPARK_RC_COUNT" ]; then
142142
RC_COUNT=$SPARK_RC_COUNT
143143
fi
144+
export SPARK_RC_COUNT=$RC_COUNT
144145

145146
# Check if the RC already exists, and if re-creating the RC, skip tag creation.
146147
RELEASE_TAG="v${RELEASE_VERSION}-rc${RC_COUNT}"

dev/create-release/spark-rm/Dockerfile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -77,7 +77,7 @@ RUN apt-get clean && apt-get update && $APT_INSTALL gnupg ca-certificates && \
7777
# Install R packages and dependencies used when building.
7878
# R depends on pandoc*, libssl (which are installed above).
7979
# Note that PySpark doc generation also needs pandoc due to nbsphinx
80-
$APT_INSTALL r-base r-base-dev && \
80+
$APT_INSTALL r-base r-base-dev msmtp && \
8181
$APT_INSTALL libcurl4-openssl-dev libgit2-dev libssl-dev libxml2-dev && \
8282
$APT_INSTALL texlive-latex-base texlive texlive-fonts-extra texinfo qpdf texlive-latex-extra && \
8383
$APT_INSTALL libfontconfig1-dev libharfbuzz-dev libfribidi-dev libfreetype6-dev libpng-dev libtiff5-dev libjpeg-dev && \

0 commit comments

Comments
 (0)