Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
38 changes: 29 additions & 9 deletions build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -9,9 +9,21 @@ lazy val isFipsRelease = {
// scalastyle:on println
result
}
lazy val includeFatJarsAndBundles = {
// When the PUBLISH environment variable is true, we assume the caller is publishing to Maven,
// in which case we do not want to include fat JAR, ZIP, or Tarball bundle artifacts.
val result = !sys.env.getOrElse("PUBLISH", "false").toBoolean
// scalastyle:off println
println(s"Including Fat JARs and Bundles in published artifacts: $result")
// scalastyle:on println
result
}
def isFatJarOrBundle(c: String): Boolean =
c.contains("with-dependencies") || c.contains("fat-test") || c.contains("bundle")

lazy val snowparkName = s"snowpark${if (isFipsRelease) "-fips" else ""}"
lazy val jdbcName = s"snowflake-jdbc${if (isFipsRelease) "-fips" else ""}"
lazy val snowparkVersion = "1.17.0-SNAPSHOT"
lazy val snowparkVersion = "1.17.0"

lazy val Javadoc = config("genjavadoc") extend Compile

Expand All @@ -26,6 +38,7 @@ lazy val javadocSettings = inConfig(Javadoc)(Defaults.configSettings) ++ Seq(
!(s.getParent.contains("internal") || s.getParent.contains("Internal"))),
Javadoc / javacOptions := Seq(
"--allow-script-in-comments",
"-use",
"-windowtitle", s"Snowpark Java API Reference $snowparkVersion",
"-doctitle", s"Snowpark Java API Reference $snowparkVersion",
"-header", s"""<div style="margin-top: 14px"><strong>
Expand Down Expand Up @@ -165,13 +178,11 @@ lazy val root = (project in file("."))

// Release settings

// Release JAR including compiled test classes
Test / packageBin / publishArtifact := true,
// Also publish a test-sources JAR
Test / packageSrc / publishArtifact := true,
Test / packageSrc / artifact :=
(Compile / packageSrc / artifact).value.withClassifier(Some("tests-sources")),
addArtifact(Test / packageSrc / artifact, Test / packageSrc),
// Disable publishing the source files JAR unless publishing to maven.
Compile / packageSrc / publishArtifact := !includeFatJarsAndBundles,

// Disable publishing test source files in all scenarios.
Test / packageSrc / publishArtifact := false,

// Fat JAR settings
assembly / assemblyJarName :=
Expand Down Expand Up @@ -247,6 +258,16 @@ lazy val root = (project in file("."))
Artifact(name = snowparkName, `type` = "bundle", extension = "tar.gz", classifier = "bundle"),
Universal / packageZipTarball),

// Explicitly list checksum files to be generated for visibility
checksums := Seq("md5", "sha1"),

// Filter out bundles and fat jars if publishing to maven
artifacts := artifacts.value filter (
a => includeFatJarsAndBundles || !isFatJarOrBundle(a.classifier.getOrElse(""))),
packagedArtifacts := packagedArtifacts.value filter (
af => includeFatJarsAndBundles || !isFatJarOrBundle(af._1.classifier.getOrElse(""))),

// Signed publish settings
credentials += Credentials(Path.userHome / ".ivy2" / ".credentials"),
// Set up GPG key for release build from environment variable: GPG_HEX_CODE
// Build jenkins job must have set it, otherwise, the release build will fail.
Expand All @@ -256,7 +277,6 @@ lazy val root = (project in file("."))
Properties.envOrNone("GPG_HEX_CODE").getOrElse("Jenkins_build_not_set_GPG_HEX_CODE"),
"ignored" // this field is ignored; passwords are supplied by pinentry
),
// usePgpKeyHex(Properties.envOrElse("GPG_SIGNATURE", "12345")),
Global / pgpPassphrase := Properties.envOrNone("GPG_KEY_PASSPHRASE").map(_.toCharArray),
publishMavenStyle := true,
releaseCrossBuild := true,
Expand Down
130 changes: 130 additions & 0 deletions scripts/deploy-common.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,130 @@
#!/bin/bash -ex
#
# DO NOT RUN DIRECTLY.
# Script must be sourced by deploy.sh or deploy-fips.sh
# after setting or unsetting `SNOWPARK_FIPS` environment variable.
#

if [ -z "$GPG_KEY_ID" ]; then
export GPG_KEY_ID="Snowflake Computing"
echo "[WARN] GPG key ID not specified, using default: $GPG_KEY_ID."
fi

if [ -z "$GPG_KEY_PASSPHRASE" ]; then
echo "[ERROR] GPG passphrase is not specified for $GPG_KEY_ID!"
exit 1
fi

if [ -z "$GPG_PRIVATE_KEY" ]; then
echo "[ERROR] GPG private key file is not specified!"
exit 1
fi

if [ -z "$sonatype_user" ]; then
echo "[ERROR] Jenkins sonatype user is not specified!"
exit 1
fi

if [ -z "$sonatype_password" ]; then
echo "[ERROR] Jenkins sonatype pwd is not specified!"
exit 1
fi

if [ -z "$PUBLISH" ]; then
echo "[ERROR] 'PUBLISH' is not specified!"
exit 1
fi

if [ -z "$github_version_tag" ]; then
echo "[ERROR] 'github_version_tag' is not specified!"
exit 1
fi

mkdir -p ~/.ivy2

STR=$'host=central.sonatype.com
user='$sonatype_user'
password='$sonatype_password''

echo "$STR" > ~/.ivy2/.credentials

# import private key first
echo "[INFO] Importing PGP key."
if [ ! -z "$GPG_PRIVATE_KEY" ] && [ -f "$GPG_PRIVATE_KEY" ]; then
# First check if already imported private key
if ! gpg --list-secret-key | grep "$GPG_KEY_ID"; then
gpg --allow-secret-key-import --import "$GPG_PRIVATE_KEY"
fi
fi

which sbt
if [ $? -ne 0 ]
then
pushd ..
echo "[INFO] sbt is not installed, downloading latest sbt for test and build."
curl -L -o sbt-1.11.4.zip https://github.com/sbt/sbt/releases/download/v1.11.4/sbt-1.11.4.zip
unzip sbt-1.11.4.zip
PATH=$PWD/sbt/bin:$PATH
popd
else
echo "[INFO] Using system installed sbt."
fi
which sbt
sbt version

echo "[INFO] Checking out snowpark-java-scala @ tag: $github_version_tag."
git checkout $github_version_tag
Copy link

Copilot AI Sep 19, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The git checkout command should use 'tags/' prefix for consistency with the original implementation and to be explicit about checking out a tag rather than a branch: git checkout tags/$github_version_tag

Suggested change
git checkout $github_version_tag
git checkout tags/$github_version_tag

Copilot uses AI. Check for mistakes.

if [ "$PUBLISH" = true ]; then
if [ "$SNOWPARK_FIPS" = true ]; then
echo "[INFO] Packaging snowpark-fips @ tag: $github_version_tag."
else
echo "[INFO] Packaging snowpark @ tag: $github_version_tag."
fi
sbt +publishSigned
echo "[INFO] Staged packaged artifacts locally with PGP signing."
sbt sonaUpload
echo "[SUCCESS] Uploaded artifacts to central portal."
echo "[ACTION-REQUIRED] Please log in to Central Portal to publish artifacts: https://central.sonatype.com/"
# TODO: alternatively automate publishing fully
# sbt sonaRelease
# echo "[SUCCESS] Released Snowpark Java-Scala $github_version_tag to Maven."
else
#release to s3
echo "[INFO] Staging signed artifacts to local ivy2 repository."
rm -rf ~/.ivy2/local/
sbt +publishLocalSigned

# SBT will build FIPS version of Snowpark automatically if the environment variable exists.
if [ "$SNOWPARK_FIPS" = true ]; then
S3_JENKINS_URL="s3://sfc-eng-jenkins/repository/snowparkclient-fips/$github_version_tag/"
S3_DATA_URL="s3://sfc-eng-data/client/snowparkclient-fips/releases/$github_version_tag/"
echo "[INFO] Uploading snowpark-fips artifacts to:"
else
S3_JENKINS_URL="s3://sfc-eng-jenkins/repository/snowparkclient/$github_version_tag/"
S3_DATA_URL="s3://sfc-eng-data/client/snowparkclient/releases/$github_version_tag/"
echo "[INFO] Uploading snowpark artifacts to:"
fi
echo "[INFO] - $S3_JENKINS_URL"
echo "[INFO] - $S3_DATA_URL"

# Remove release folders in s3 for current release version if they already exist due to previously failed release pipeline runs.
echo "[INFO] Deleting $github_version_tag release folders in s3 if they already exist."
aws s3 rm "$S3_JENKINS_URL" --recursive
echo "[INFO] $S3_JENKINS_URL folder deleted if it exists."
aws s3 rm "$S3_DATA_URL" --recursive
echo "[INFO] $S3_DATA_URL folder deleted if it exists."

# Rename all produced artifacts to include version number (sbt doesn't by default when publishing to local ivy2 repository).
# TODO: BEFORE SNOWPARK v2.12.0, fix the regex in the sed command to not match the 2.12.x or 2.13.x named folder under ~/.ivy2/local/com.snowflake/snowpark_2.1[23]/
find ~/.ivy2/local -type f -name '*snowpark*' | while read file; do newfile=$(echo "$file" | sed "s/\(2\.1[23]\)\([-\.]\)/\1-${github_version_tag#v}\2/"); mv "$file" "$newfile"; done
Comment on lines +119 to +120
Copy link

Copilot AI Sep 19, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The TODO comment on line 119 indicates this regex will need fixing before Snowpark v2.12.0, but the regex pattern 2\.1[23] only matches 2.12 and 2.13. This creates a potential maintenance issue when newer Scala versions are supported.

Suggested change
# TODO: BEFORE SNOWPARK v2.12.0, fix the regex in the sed command to not match the 2.12.x or 2.13.x named folder under ~/.ivy2/local/com.snowflake/snowpark_2.1[23]/
find ~/.ivy2/local -type f -name '*snowpark*' | while read file; do newfile=$(echo "$file" | sed "s/\(2\.1[23]\)\([-\.]\)/\1-${github_version_tag#v}\2/"); mv "$file" "$newfile"; done
# Updated: Regex now matches any Scala 2.x version (e.g., 2.12, 2.13, 2.14, ...).
find ~/.ivy2/local -type f -name '*snowpark*' | while read file; do newfile=$(echo "$file" | sed -E "s/(2\.[0-9]+)([-\.])/\1-${github_version_tag#v}\2/"); mv "$file" "$newfile"; done

Copilot uses AI. Check for mistakes.

# Generate sha256 checksums for all artifacts produced except .md5, .sha1, and existing .sha256 checksum files.
find ~/.ivy2/local -type f -name '*snowpark*' ! -name '*.md5' ! -name '*.sha1' ! -name '*.sha256' -exec sh -c 'for f; do sha256sum "$f" | awk '"'"'{printf "%s", $1}'"'"' > "$f.sha256"; done' _ {} +

# Copy all files, flattening the nested structure of the ivy2 repository into the expected structure on s3.
find ~/.ivy2/local -type f -name '*snowpark*' ! -name '*.sha1' -exec aws s3 cp \{\} $S3_JENKINS_URL \;
find ~/.ivy2/local -type f -name '*snowpark*' ! -name '*.sha1' -exec aws s3 cp \{\} $S3_DATA_URL \;

echo "[SUCCESS] Published Snowpark Java-Scala $github_version_tag artifacts to S3."
fi
8 changes: 8 additions & 0 deletions scripts/deploy-fips.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
#!/bin/bash -ex
#
# Push Snowpark Java/Scala FIPS build to the public maven repository.
# This script needs to be executed by snowflake jenkins job.
#

export SNOWPARK_FIPS="true"
source scripts/deploy-common.sh
119 changes: 3 additions & 116 deletions scripts/deploy.sh
Original file line number Diff line number Diff line change
@@ -1,121 +1,8 @@
#!/bin/bash -ex
#
# Push Snowpark Java/Scala to the public maven repository.
# Push Snowpark Java/Scala build to the public maven repository.
# This script needs to be executed by snowflake jenkins job.
# If the SNOWPARK_FIPS environment variable exists when running
# the script, the fips build of the snowpark client will be
# published instead of the regular build.
#

if [ -z "$GPG_KEY_ID" ]; then
export GPG_KEY_ID="Snowflake Computing"
echo "[WARN] GPG key ID not specified, using default: $GPG_KEY_ID."
fi

if [ -z "$GPG_KEY_PASSPHRASE" ]; then
echo "[ERROR] GPG passphrase is not specified for $GPG_KEY_ID!"
exit 1
fi

if [ -z "$GPG_PRIVATE_KEY" ]; then
echo "[ERROR] GPG private key file is not specified!"
exit 1
fi

if [ -z "$sonatype_user" ]; then
echo "[ERROR] Jenkins sonatype user is not specified!"
exit 1
fi

if [ -z "$sonatype_password" ]; then
echo "[ERROR] Jenkins sonatype pwd is not specified!"
exit 1
fi

if [ -z "$PUBLISH" ]; then
echo "[ERROR] 'PUBLISH' is not specified!"
exit 1
fi

if [ -z "$github_version_tag" ]; then
echo "[ERROR] 'github_version_tag' is not specified!"
exit 1
fi

mkdir -p ~/.ivy2

STR=$'host=central.sonatype.com
user='$sonatype_user'
password='$sonatype_password''

echo "$STR" > ~/.ivy2/.credentials

# import private key first
echo "[INFO] Importing PGP key."
if [ ! -z "$GPG_PRIVATE_KEY" ] && [ -f "$GPG_PRIVATE_KEY" ]; then
# First check if already imported private key
if ! gpg --list-secret-key | grep "$GPG_KEY_ID"; then
gpg --allow-secret-key-import --import "$GPG_PRIVATE_KEY"
fi
fi

which sbt
if [ $? -ne 0 ]
then
pushd ..
echo "[INFO] sbt is not installed, downloading latest sbt for test and build."
curl -L -o sbt-1.11.4.zip https://github.com/sbt/sbt/releases/download/v1.11.4/sbt-1.11.4.zip
unzip sbt-1.11.4.zip
PATH=$PWD/sbt/bin:$PATH
popd
else
echo "[INFO] Using system installed sbt."
fi
which sbt
sbt version

echo "[INFO] Checking out snowpark-java-scala @ tag: $github_version_tag."
git checkout tags/$github_version_tag

if [ "$PUBLISH" = true ]; then
if [ -v SNOWPARK_FIPS ]; then
echo "[INFO] Packaging snowpark-fips @ tag: $github_version_tag."
else
echo "[INFO] Packaging snowpark @ tag: $github_version_tag."
fi
sbt +publishSigned
echo "[INFO] Staged packaged artifacts locally with PGP signing."
sbt sonaUpload
echo "[SUCCESS] Uploaded artifacts to central portal."
echo "[ACTION-REQUIRED] Please log in to Central Portal to publish artifacts: https://central.sonatype.com/"
# TODO: alternatively automate publishing fully
# sbt sonaRelease
# echo "[SUCCESS] Released Snowpark Java-Scala v$github_version_tag to Maven."
else
#release to s3
echo "[INFO] Staging signed artifacts to local ivy2 repository."
rm -rf ~/.ivy2/local/
sbt +publishLocalSigned

# SBT will build FIPS version of Snowpark automatically if the environment variable exists.
if [ -v SNOWPARK_FIPS ]; then
S3_JENKINS_URL="s3://sfc-eng-jenkins/repository/snowparkclient-fips"
S3_DATA_URL="s3://sfc-eng-data/client/snowparkclient-fips/releases"
echo "[INFO] Uploading snowpark-fips artifacts to:"
else
S3_JENKINS_URL="s3://sfc-eng-jenkins/repository/snowparkclient"
S3_DATA_URL="s3://sfc-eng-data/client/snowparkclient/releases"
echo "[INFO] Uploading snowpark artifacts to:"
fi
echo "[INFO] - $S3_JENKINS_URL/$github_version_tag/"
echo "[INFO] - $S3_DATA_URL/$github_version_tag/"

# Rename all produced artifacts to include version number (sbt doesn't by default when publishing to local ivy2 repository).
find ~/.ivy2/local -type f -name "*snowpark*" | while read file; do newfile=$(echo "$file" | sed "s/\(2\.1[23]\)\([-\.]\)/\1-${github_version_tag#v}\2/"); mv "$file" "$newfile"; done

# Copy all files, flattening the nested structure of the ivy2 repository into the expected structure on s3.
find ~/.ivy2/local -type f -name "*snowpark*" -exec aws s3 cp \{\} $S3_JENKINS_URL/$github_version_tag/ \;
find ~/.ivy2/local -type f -name "*snowpark*" -exec aws s3 cp \{\} $S3_DATA_URL/$github_version_tag/ \;

echo "[SUCCESS] Published Snowpark Java-Scala v$github_version_tag artifacts to S3."
fi
unset SNOWPARK_FIPS
source scripts/deploy-common.sh
12 changes: 10 additions & 2 deletions scripts/utils.sh
Original file line number Diff line number Diff line change
Expand Up @@ -62,14 +62,22 @@ run_test_suites() {
# Avoid failures in subsequent test runs due to an already closed stderr.
export DISABLE_REDIRECT_STDERR=""

# Set JVM system property for FIPS test if SNOWPARK_FIPS is true.
if [ "$SNOWPARK_FIPS" = true ]; then
FIPS='-J-DFIPS_TEST=true'
echo "Passing $FIPS to sbt"
else
FIPS=''
fi

# test
sbt clean +compile \
sbt $FIPS clean +compile \
+JavaAPITests:test \
+NonparallelTests:test \
'++ 2.12.20 OtherTests:testOnly * -- -l SampleDataTest' \
'++ 2.13.16 OtherTests:testOnly * -- -l SampleDataTest' \
'++ 2.12.20 UDFTests:testOnly * -- -l SampleDataTest' \
'++ 2.13.16 UDFTests:testOnly * -- -l SampleDataTest -l com.snowflake.snowpark.UDFPackageTest' \
'++ 2.13.16 UDFTests:testOnly * -- -l SampleDataTest -l com.snowflake.snowpark.UDFPackageTest' \
'++ 2.12.20 UDTFTests:testOnly * -- -l SampleDataTest' \
'++ 2.13.16 UDTFTests:testOnly * -- -l SampleDataTest -l com.snowflake.snowpark.UDFPackageTest' \
+SprocTests:test
Expand Down
Loading