Skip to content

Commit

Permalink
update
Browse files Browse the repository at this point in the history
  • Loading branch information
wayneguow committed Feb 1, 2025
1 parent b282c5f commit 98f4fef
Show file tree
Hide file tree
Showing 13 changed files with 34 additions and 62 deletions.
5 changes: 0 additions & 5 deletions connector/kafka-0-10-assembly/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -131,11 +131,6 @@
<artifactId>snappy-java</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>javax.activation</groupId>
<artifactId>activation</artifactId>
<scope>provided</scope>
</dependency>
</dependencies>

<build>
Expand Down
4 changes: 0 additions & 4 deletions core/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -104,10 +104,6 @@
<artifactId>spark-common-utils_${scala.binary.version}</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>javax.activation</groupId>
<artifactId>activation</artifactId>
</dependency>
<dependency>
<groupId>org.apache.curator</groupId>
<artifactId>curator-recipes</artifactId>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@ import jakarta.ws.rs._
import jakarta.ws.rs.core.{Context, Response}
import org.eclipse.jetty.server.handler.ContextHandler
import org.eclipse.jetty.servlet.{ServletContextHandler, ServletHolder}
import org.glassfish.jersey.CommonProperties
import org.glassfish.jersey.server.ServerProperties
import org.glassfish.jersey.servlet.ServletContainer

Expand Down Expand Up @@ -63,8 +62,6 @@ private[spark] object ApiRootResource {
jerseyContext.setContextPath("/api")
val holder: ServletHolder = new ServletHolder(classOf[ServletContainer])
holder.setInitParameter(ServerProperties.PROVIDER_PACKAGES, "org.apache.spark.status.api.v1")
holder.setInitParameter(CommonProperties.PROVIDER_DEFAULT_DISABLE, "DATASOURCE")
holder.setInitParameter(ServerProperties.WADL_FEATURE_DISABLE, "true")
UIRootFromServletContext.setUiRoot(jerseyContext, uiRoot)
jerseyContext.addServlet(holder, "/*")
jerseyContext
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@ package org.apache.spark.status.api.v1
import jakarta.ws.rs._
import jakarta.ws.rs.core.MediaType
import org.eclipse.jetty.servlet.{ServletContextHandler, ServletHolder}
import org.glassfish.jersey.CommonProperties
import org.glassfish.jersey.server.ServerProperties
import org.glassfish.jersey.servlet.ServletContainer

Expand Down Expand Up @@ -115,8 +114,6 @@ private[spark] object PrometheusResource {
jerseyContext.setContextPath("/metrics")
val holder: ServletHolder = new ServletHolder(classOf[ServletContainer])
holder.setInitParameter(ServerProperties.PROVIDER_PACKAGES, "org.apache.spark.status.api.v1")
holder.setInitParameter(CommonProperties.PROVIDER_DEFAULT_DISABLE, "DATASOURCE")
holder.setInitParameter(ServerProperties.WADL_FEATURE_DISABLE, "true")
UIRootFromServletContext.setUiRoot(jerseyContext, uiRoot)
jerseyContext.addServlet(holder, "/*")
jerseyContext
Expand Down
11 changes: 6 additions & 5 deletions dev/deps/spark-deps-hadoop-3-hive-2.3
Original file line number Diff line number Diff line change
Expand Up @@ -3,13 +3,13 @@ JLargeArrays/1.5//JLargeArrays-1.5.jar
JTransforms/3.1//JTransforms-3.1.jar
RoaringBitmap/1.3.0//RoaringBitmap-1.3.0.jar
ST4/4.0.4//ST4-4.0.4.jar
activation/1.1.1//activation-1.1.1.jar
aircompressor/2.0.2//aircompressor-2.0.2.jar
algebra_2.13/2.8.0//algebra_2.13-2.8.0.jar
aliyun-java-sdk-core/4.5.10//aliyun-java-sdk-core-4.5.10.jar
aliyun-java-sdk-kms/2.11.0//aliyun-java-sdk-kms-2.11.0.jar
aliyun-java-sdk-ram/3.1.0//aliyun-java-sdk-ram-3.1.0.jar
aliyun-sdk-oss/3.13.2//aliyun-sdk-oss-3.13.2.jar
angus-activation/2.0.2//angus-activation-2.0.2.jar
annotations/17.0.0//annotations-17.0.0.jar
antlr-runtime/3.5.2//antlr-runtime-3.5.2.jar
antlr4-runtime/4.13.1//antlr4-runtime-4.13.1.jar
Expand Down Expand Up @@ -101,7 +101,7 @@ httpclient/4.5.14//httpclient-4.5.14.jar
httpcore/4.4.16//httpcore-4.4.16.jar
icu4j/76.1//icu4j-76.1.jar
ini4j/0.5.4//ini4j-0.5.4.jar
istack-commons-runtime/3.0.8//istack-commons-runtime-3.0.8.jar
istack-commons-runtime/4.1.2//istack-commons-runtime-4.1.2.jar
ivy/2.5.3//ivy-2.5.3.jar
j2objc-annotations/3.0.0//j2objc-annotations-3.0.0.jar
jackson-annotations/2.18.2//jackson-annotations-2.18.2.jar
Expand All @@ -113,21 +113,22 @@ jackson-dataformat-yaml/2.18.2//jackson-dataformat-yaml-2.18.2.jar
jackson-datatype-jsr310/2.18.2//jackson-datatype-jsr310-2.18.2.jar
jackson-mapper-asl/1.9.13//jackson-mapper-asl-1.9.13.jar
jackson-module-scala_2.13/2.18.2//jackson-module-scala_2.13-2.18.2.jar
jakarta.activation-api/2.1.3//jakarta.activation-api-2.1.3.jar
jakarta.annotation-api/2.1.1//jakarta.annotation-api-2.1.1.jar
jakarta.inject-api/2.0.1//jakarta.inject-api-2.0.1.jar
jakarta.servlet-api/5.0.0//jakarta.servlet-api-5.0.0.jar
jakarta.validation-api/3.0.2//jakarta.validation-api-3.0.2.jar
jakarta.ws.rs-api/3.0.0//jakarta.ws.rs-api-3.0.0.jar
jakarta.xml.bind-api/2.3.2//jakarta.xml.bind-api-2.3.2.jar
jakarta.xml.bind-api/4.0.2//jakarta.xml.bind-api-4.0.2.jar
janino/3.1.9//janino-3.1.9.jar
java-diff-utils/4.15//java-diff-utils-4.15.jar
java-xmlbuilder/1.2//java-xmlbuilder-1.2.jar
javassist/3.30.2-GA//javassist-3.30.2-GA.jar
javax.jdo/3.2.0-m3//javax.jdo-3.2.0-m3.jar
javax.servlet-api/4.0.1//javax.servlet-api-4.0.1.jar
javolution/5.5.1//javolution-5.5.1.jar
jaxb-api/2.2.11//jaxb-api-2.2.11.jar
jaxb-runtime/2.3.2//jaxb-runtime-2.3.2.jar
jaxb-core/4.0.5//jaxb-core-4.0.5.jar
jaxb-runtime/4.0.5//jaxb-runtime-4.0.5.jar
jcl-over-slf4j/2.0.16//jcl-over-slf4j-2.0.16.jar
jdo-api/3.0.1//jdo-api-3.0.1.jar
jdom2/2.0.6//jdom2-2.0.6.jar
Expand Down
4 changes: 0 additions & 4 deletions mllib/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -34,10 +34,6 @@
<url>https://spark.apache.org/</url>

<dependencies>
<dependency>
<groupId>javax.xml.bind</groupId>
<artifactId>jaxb-api</artifactId>
</dependency>
<dependency>
<groupId>org.scala-lang.modules</groupId>
<artifactId>scala-parser-combinators_${scala.binary.version}</artifactId>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ package org.apache.spark.mllib.pmml
import java.io.{File, OutputStream, StringWriter}
import javax.xml.transform.stream.StreamResult

import org.jpmml.model.JAXBUtil
import org.jpmml.model.{JAXBSerializer, JAXBUtil}

import org.apache.spark.SparkContext
import org.apache.spark.annotation.Since
Expand All @@ -39,7 +39,8 @@ trait PMMLExportable {
*/
private def toPMML(streamResult: StreamResult): Unit = {
val pmmlModelExport = PMMLModelExportFactory.createPMMLModelExport(this)
JAXBUtil.marshalPMML(pmmlModelExport.getPmml(), streamResult)
val jaxbSerializer = new JAXBSerializer(JAXBUtil.getContext());
jaxbSerializer.marshal(pmmlModelExport.getPmml(), streamResult)
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,8 @@ package org.apache.spark.mllib.pmml.`export`

import scala.{Array => SArray}

import org.dmg.pmml.{DataDictionary, DataField, DataType, FieldName, MiningField,
MiningFunction, MiningSchema, OpType}
import org.dmg.pmml.{DataDictionary, DataField, DataType, MiningField, MiningFunction,
MiningSchema, OpType}
import org.dmg.pmml.regression.{NumericPredictor, RegressionModel, RegressionTable}

import org.apache.spark.mllib.regression.GeneralizedLinearModel
Expand All @@ -44,7 +44,7 @@ private[mllib] class BinaryClassificationPMMLModelExport(
pmml.getHeader.setDescription(description)

if (model.weights.size > 0) {
val fields = new SArray[FieldName](model.weights.size)
val fields = new SArray[String](model.weights.size)
val dataDictionary = new DataDictionary
val miningSchema = new MiningSchema
val regressionTableYES = new RegressionTable(model.intercept).setTargetCategory("1")
Expand All @@ -67,7 +67,7 @@ private[mllib] class BinaryClassificationPMMLModelExport(
.addRegressionTables(regressionTableYES, regressionTableNO)

for (i <- 0 until model.weights.size) {
fields(i) = FieldName.create("field_" + i)
fields(i) = "field_" + i
dataDictionary.addDataFields(new DataField(fields(i), OpType.CONTINUOUS, DataType.DOUBLE))
miningSchema
.addMiningFields(new MiningField(fields(i))
Expand All @@ -76,7 +76,7 @@ private[mllib] class BinaryClassificationPMMLModelExport(
}

// add target field
val targetField = FieldName.create("target")
val targetField = "target"
dataDictionary
.addDataFields(new DataField(targetField, OpType.CATEGORICAL, DataType.STRING))
miningSchema
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,8 @@ package org.apache.spark.mllib.pmml.`export`

import scala.{Array => SArray}

import org.dmg.pmml.{DataDictionary, DataField, DataType, FieldName, MiningField,
MiningFunction, MiningSchema, OpType}
import org.dmg.pmml.{DataDictionary, DataField, DataType, MiningField, MiningFunction,
MiningSchema, OpType}
import org.dmg.pmml.regression.{NumericPredictor, RegressionModel, RegressionTable}

import org.apache.spark.mllib.regression.GeneralizedLinearModel
Expand All @@ -42,7 +42,7 @@ private[mllib] class GeneralizedLinearPMMLModelExport(
pmml.getHeader.setDescription(description)

if (model.weights.size > 0) {
val fields = new SArray[FieldName](model.weights.size)
val fields = new SArray[String](model.weights.size)
val dataDictionary = new DataDictionary
val miningSchema = new MiningSchema
val regressionTable = new RegressionTable(model.intercept)
Expand All @@ -53,7 +53,7 @@ private[mllib] class GeneralizedLinearPMMLModelExport(
.addRegressionTables(regressionTable)

for (i <- 0 until model.weights.size) {
fields(i) = FieldName.create("field_" + i)
fields(i) = "field_" + i
dataDictionary.addDataFields(new DataField(fields(i), OpType.CONTINUOUS, DataType.DOUBLE))
miningSchema
.addMiningFields(new MiningField(fields(i))
Expand All @@ -62,7 +62,7 @@ private[mllib] class GeneralizedLinearPMMLModelExport(
}

// for completeness add target field
val targetField = FieldName.create("target")
val targetField = "target"
dataDictionary.addDataFields(new DataField(targetField, OpType.CONTINUOUS, DataType.DOUBLE))
miningSchema
.addMiningFields(new MiningField(targetField)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ package org.apache.spark.mllib.pmml.`export`
import scala.{Array => SArray}

import org.dmg.pmml.{Array, CompareFunction, ComparisonMeasure, DataDictionary, DataField, DataType,
FieldName, MiningField, MiningFunction, MiningSchema, OpType, SquaredEuclidean}
MiningField, MiningFunction, MiningSchema, OpType, SquaredEuclidean}
import org.dmg.pmml.clustering.{Cluster, ClusteringField, ClusteringModel}

import org.apache.spark.mllib.clustering.KMeansModel
Expand All @@ -40,7 +40,7 @@ private[mllib] class KMeansPMMLModelExport(model: KMeansModel) extends PMMLModel

if (model.clusterCenters.length > 0) {
val clusterCenter = model.clusterCenters(0)
val fields = new SArray[FieldName](clusterCenter.size)
val fields = new SArray[String](clusterCenter.size)
val dataDictionary = new DataDictionary
val miningSchema = new MiningSchema
val comparisonMeasure = new ComparisonMeasure()
Expand All @@ -55,7 +55,7 @@ private[mllib] class KMeansPMMLModelExport(model: KMeansModel) extends PMMLModel
.setNumberOfClusters(model.clusterCenters.length)

for (i <- 0 until clusterCenter.size) {
fields(i) = FieldName.create("field_" + i)
fields(i) = "field_" + i
dataDictionary.addDataFields(new DataField(fields(i), OpType.CONTINUOUS, DataType.DOUBLE))
miningSchema
.addMiningFields(new MiningField(fields(i))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1165,7 +1165,7 @@ class LinearRegressionSuite extends MLTest with DefaultReadWriteTest with PMMLRe
assert(fields(0).getOpType() == OpType.CONTINUOUS)
val pmmlRegressionModel = pmml.getModels().get(0).asInstanceOf[PMMLRegressionModel]
val pmmlPredictors = pmmlRegressionModel.getRegressionTables.get(0).getNumericPredictors
val pmmlWeights = pmmlPredictors.asScala.map(_.getCoefficient()).toList
val pmmlWeights = pmmlPredictors.asScala.map(_.getCoefficient().doubleValue()).toList
assert(pmmlWeights(0) ~== model.coefficients(0) relTol 1E-3)
assert(pmmlWeights(1) ~== model.coefficients(1) relTol 1E-3)
}
Expand Down
5 changes: 3 additions & 2 deletions mllib/src/test/scala/org/apache/spark/ml/util/PMMLUtils.scala
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ import java.io.ByteArrayInputStream
import java.nio.charset.StandardCharsets

import org.dmg.pmml.PMML
import org.jpmml.model.{JAXBUtil, SAXUtil}
import org.jpmml.model.{JAXBSerializer, JAXBUtil, SAXUtil}
import org.jpmml.model.filters.ImportFilter

/**
Expand All @@ -37,6 +37,7 @@ private[spark] object PMMLUtils {
val transformed = SAXUtil.createFilteredSource(
new ByteArrayInputStream(input.getBytes(StandardCharsets.UTF_8)),
new ImportFilter())
JAXBUtil.unmarshalPMML(transformed)
val jaxbSerializer = new JAXBSerializer(JAXBUtil.getContext());
jaxbSerializer.unmarshal(transformed).asInstanceOf[PMML]
}
}
28 changes: 8 additions & 20 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -571,7 +571,7 @@
<dependency>
<groupId>org.jpmml</groupId>
<artifactId>pmml-model</artifactId>
<version>1.4.8</version>
<version>1.7.1</version>
<scope>provided</scope>
<exclusions>
<exclusion>
Expand Down Expand Up @@ -599,7 +599,7 @@
<dependency>
<groupId>org.glassfish.jaxb</groupId>
<artifactId>jaxb-runtime</artifactId>
<version>2.3.2</version>
<version>4.0.5</version>
<scope>compile</scope>
<exclusions>
<!-- for now, we only write XML in PMML export, and these can be excluded -->
Expand All @@ -615,14 +615,6 @@
<groupId>org.jvnet.staxex</groupId>
<artifactId>stax-ex</artifactId>
</exclusion>
<!--
SPARK-27611: Exclude redundant javax.activation implementation, which
conflicts with the existing javax.activation:activation:1.1.1 dependency.
-->
<exclusion>
<groupId>jakarta.activation</groupId>
<artifactId>jakarta.activation-api</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
Expand Down Expand Up @@ -1106,6 +1098,12 @@
<artifactId>jersey-test-framework-provider-simple</artifactId>
<version>${jersey.version}</version>
<scope>test</scope>
<exclusions>
<exclusion>
<groupId>jakarta.xml.bind</groupId>
<artifactId>jakarta.xml.bind-api</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.glassfish.jersey</groupId>
Expand Down Expand Up @@ -1545,16 +1543,6 @@
<artifactId>xz</artifactId>
<version>1.10</version>
</dependency>
<!-- See SPARK-23654 for info on this dependency;
It is used to keep javax.activation at v1.1.1 after dropping
jets3t as a dependency.
-->
<dependency>
<groupId>javax.activation</groupId>
<artifactId>activation</artifactId>
<version>1.1.1</version>
<scope>${hadoop.deps.scope}</scope>
</dependency>
<dependency>
<groupId>org.apache.zookeeper</groupId>
<artifactId>zookeeper</artifactId>
Expand Down

0 comments on commit 98f4fef

Please sign in to comment.