Skip to content
Merged
Show file tree
Hide file tree
Changes from 4 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 9 additions & 5 deletions .snyk
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,14 @@ ignore:
reason: Spark Core is provided dependency
expires: 2050-01-01T00:00:00.000Z
created: 2025-09-18T08:33:31.014
- 'org.apache.spark:spark-core_2.12':
<<: *spark-core
'SNYK-JAVA-COMFASTERXMLJACKSONCORE-7569538': *spark-core-exclusions
'SNYK-JAVA-COMGOOGLEPROTOBUF-8055227': *spark-core-exclusions
'SNYK-JAVA-ORGAPACHEIVY-5847858': *spark-core-exclusions
'SNYK-JAVA-ORGAPACHEZOOKEEPER-5961102': *spark-core-exclusions
'SNYK-JAVA-ORGGLASSFISHJERSEYCORE-14049172': *spark-core-exclusions
'SNYK-JAVA-IOAIRLIFT-14412703': &spark-sql-exclusions
- 'org.apache.spark:spark-sql_2.13': &spark-sql
reason: Spark SQL is provided dependency
expires: 2050-01-01T00:00:00.000Z
created: 2025-09-18T08:35:12.345
<<: *spark-sql
'SNYK-JAVA-ORGLZ4-14151788': *spark-core-exclusions
'SNYK-JAVA-ORGLZ4-14219384': *spark-core-exclusions
patch: {}
6 changes: 2 additions & 4 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -12,16 +12,14 @@ The documentation for Neo4j Connector for Apache Spark lives at https://github.c

## Building for Spark 3

You can build for Spark 3.x with both Scala 2.12 and Scala 2.13
You can build for Spark 4.x with Scala 2.13

```
./maven-release.sh package 2.12
./maven-release.sh package 2.13
```

These commands will generate the corresponding targets
* `spark-3/target/neo4j-connector-apache-spark_2.12-<version>_for_spark_3.jar`
* `spark-3/target/neo4j-connector-apache-spark_2.13-<version>_for_spark_3.jar`
* `spark/target/neo4j-connector-apache-spark_2.13-<version>_for_spark_3.jar`


## Integration with Apache Spark Applications
Expand Down
2 changes: 1 addition & 1 deletion common/LICENSES.txt
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ libraries. For an overview of the licenses see the NOTICE.txt file.

------------------------------------------------------------------------------
Apache Software License, Version 2.0
IntelliJ IDEA Annotations
JetBrains Java Annotations
Kotlin Stdlib
Neo4j Bolt Connection (Bolt Provider reference impl)
Neo4j Bolt Connection (Pooled Source impl)
Expand Down
2 changes: 1 addition & 1 deletion common/NOTICE.txt
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ Third-party licenses
--------------------

Apache Software License, Version 2.0
IntelliJ IDEA Annotations
JetBrains Java Annotations
Kotlin Stdlib
Neo4j Bolt Connection (Bolt Provider reference impl)
Neo4j Bolt Connection (Pooled Source impl)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ class ValidationsTest extends SparkConnectorScalaBaseTSE {
.map { _.version }
.getOrElse("UNKNOWN")
try {
Validations.validate(ValidateSparkMinVersion("3.10000"))
Validations.validate(ValidateSparkMinVersion("4.10000"))
fail(s"should be thrown a ${classOf[IllegalArgumentException].getName}")
} catch {
case e: IllegalArgumentException =>
Expand Down
8 changes: 4 additions & 4 deletions jreleaser.yml
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ assemble:
output: .
includes:
- '{{projectName}}-{{projectVersion}}*.jar'
templateDirectory: spark-3/src/jreleaser/assemblers/zip
templateDirectory: spark/src/jreleaser/assemblers/zip

hooks:
script:
Expand All @@ -88,16 +88,16 @@ hooks:
includes: [ "assemble" ]
matrix:
vars:
scala: [ "2.12", "2.13" ]
scala: [ "2.13" ]
continueOnError: false
verbose: true
shell: BASH
run: |
mkdir artifacts || true
./maven-release.sh deploy {{matrix.scala}} default::file://{{basedir}}/target/{{matrix.scala}}/maven-artifacts
cp -r {{basedir}}/target/{{matrix.scala}}/maven-artifacts artifacts/
cp -r {{basedir}}/spark-3/target/{{projectName}}*.zip artifacts/
cp -r {{basedir}}/spark-3/target/{{projectName}}*.jar artifacts/
cp -r {{basedir}}/spark/target/{{projectName}}*.zip artifacts/
cp -r {{basedir}}/spark/target/{{projectName}}*.jar artifacts/

signing:
active: ALWAYS
Expand Down
14 changes: 7 additions & 7 deletions maven-release.sh
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ exit_script() {
mv -f pom.xml.bak pom.xml
mv -f common/pom.xml.bak common/pom.xml
mv -f test-support/pom.xml.bak test-support/pom.xml
mv -f spark-3/pom.xml.bak spark-3/pom.xml
mv -f spark/pom.xml.bak spark/pom.xml
trap - SIGINT SIGTERM # clear the trap
kill -- -$$ || true # Sends SIGTERM to child/sub processes
}
Expand Down Expand Up @@ -48,7 +48,7 @@ SPARK_PACKAGES_VERSION="${PROJECT_VERSION}-s_$SCALA_VERSION"
cp pom.xml pom.xml.bak
cp common/pom.xml common/pom.xml.bak
cp test-support/pom.xml test-support/pom.xml.bak
cp spark-3/pom.xml spark-3/pom.xml.bak
cp spark/pom.xml spark/pom.xml.bak

./mvnw -B versions:set -DnewVersion=${PROJECT_VERSION}_for_spark_${SPARK_VERSION} -DgenerateBackupPoms=false

Expand All @@ -61,11 +61,11 @@ sed_i "s/<artifactId>neo4j-connector-apache-spark_common<\/artifactId>/<artifact
sed_i "s/<artifactId>neo4j-connector-apache-spark_parent<\/artifactId>/<artifactId>neo4j-connector-apache-spark_${SCALA_VERSION}_parent<\/artifactId>/" "common/pom.xml"
sed_i "s/<artifactId>neo4j-connector-apache-spark_test-support<\/artifactId>/<artifactId>neo4j-connector-apache-spark_${SCALA_VERSION}_test-support<\/artifactId>/" "common/pom.xml"

sed_i "s/<artifactId>neo4j-connector-apache-spark<\/artifactId>/<artifactId>neo4j-connector-apache-spark_${SCALA_VERSION}<\/artifactId>/" "spark-3/pom.xml"
sed_i "s/<artifactId>neo4j-connector-apache-spark_parent<\/artifactId>/<artifactId>neo4j-connector-apache-spark_${SCALA_VERSION}_parent<\/artifactId>/" "spark-3/pom.xml"
sed_i "s/<artifactId>neo4j-connector-apache-spark_common<\/artifactId>/<artifactId>neo4j-connector-apache-spark_${SCALA_VERSION}_common<\/artifactId>/" "spark-3/pom.xml"
sed_i "s/<artifactId>neo4j-connector-apache-spark_test-support<\/artifactId>/<artifactId>neo4j-connector-apache-spark_${SCALA_VERSION}_test-support<\/artifactId>/" "spark-3/pom.xml"
sed_i "s/<spark-packages.version\/>/<spark-packages.version>${SPARK_PACKAGES_VERSION}<\/spark-packages.version>/" "spark-3/pom.xml"
sed_i "s/<artifactId>neo4j-connector-apache-spark<\/artifactId>/<artifactId>neo4j-connector-apache-spark_${SCALA_VERSION}<\/artifactId>/" "spark/pom.xml"
sed_i "s/<artifactId>neo4j-connector-apache-spark_parent<\/artifactId>/<artifactId>neo4j-connector-apache-spark_${SCALA_VERSION}_parent<\/artifactId>/" "spark/pom.xml"
sed_i "s/<artifactId>neo4j-connector-apache-spark_common<\/artifactId>/<artifactId>neo4j-connector-apache-spark_${SCALA_VERSION}_common<\/artifactId>/" "spark/pom.xml"
sed_i "s/<artifactId>neo4j-connector-apache-spark_test-support<\/artifactId>/<artifactId>neo4j-connector-apache-spark_${SCALA_VERSION}_test-support<\/artifactId>/" "spark/pom.xml"
sed_i "s/<spark-packages.version\/>/<spark-packages.version>${SPARK_PACKAGES_VERSION}<\/spark-packages.version>/" "spark/pom.xml"

# build
./mvnw -B clean "${GOAL}" -Dscala-"${SCALA_VERSION}" -DskipTests ${ALT_DEPLOYMENT_REPOSITORY}
Expand Down
8 changes: 4 additions & 4 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@
<modules>
<module>common</module>
<module>test-support</module>
<module>spark-3</module>
<module>spark</module>
</modules>
<scm>
<url>https://github.com/neo4j/neo4j-spark-connector</url>
Expand Down Expand Up @@ -68,11 +68,11 @@
<netty-bom.version>4.1.128.Final</netty-bom.version>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<scala-maven-plugin.version>4.9.6</scala-maven-plugin.version>
<scala.binary.version>2.12</scala.binary.version>
<scala.version>2.12.20</scala.version>
<scala.binary.version>2.13</scala.binary.version>
<scala.version>2.13.18</scala.version>
<slf4j-api.version>2.0.17</slf4j-api.version>
<sortpom-maven-plugin.version>4.0.0</sortpom-maven-plugin.version>
<spark.version>3.5.7</spark.version>
<spark.version>4.0.1</spark.version>
<spotless-maven-plugin.version>3.0.0</spotless-maven-plugin.version>
<surefire.jvm.args/>
<testcontainers.version>2.0.2</testcontainers.version>
Expand Down
2 changes: 1 addition & 1 deletion scripts/python/requirements.txt
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
pyspark==3.5.5
pyspark==4.0.1
testcontainers[neo4j]
six
tzlocal==2.1
File renamed without changes.
File renamed without changes.
File renamed without changes.
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ import java.util.UUID
class DataSource extends TableProvider
with DataSourceRegister {

Validations.validate(ValidateSparkMinVersion("3.3.0"))
Validations.validate(ValidateSparkMinVersion("4.0.0"))

private val jobId: String = UUID.randomUUID().toString

Expand Down