[jvm-packages] Update scalatest to 3.2.15 (#8925)

---------

Co-authored-by: Jiaming Yuan <jm.yuan@outlook.com>
This commit is contained in:
Emil Ejbyfeldt 2023-04-20 16:16:56 +02:00 committed by GitHub
parent 564df59204
commit a84a1fde02
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
22 changed files with 46 additions and 42 deletions

View File

@ -495,13 +495,13 @@
<dependency>
<groupId>org.scalatest</groupId>
<artifactId>scalatest_${scala.binary.version}</artifactId>
<version>3.0.8</version>
<version>3.2.15</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.scalactic</groupId>
<artifactId>scalactic_${scala.binary.version}</artifactId>
<version>3.0.8</version>
<version>3.2.15</version>
<scope>test</scope>
</dependency>
</dependencies>

View File

@ -53,7 +53,7 @@
<dependency>
<groupId>org.scalatest</groupId>
<artifactId>scalatest_${scala.binary.version}</artifactId>
<version>3.0.5</version>
<version>3.2.15</version>
<scope>provided</scope>
</dependency>
<dependency>

View File

@ -19,10 +19,10 @@ package ml.dmlc.xgboost4j.scala
import scala.collection.mutable.ArrayBuffer
import ai.rapids.cudf.Table
import org.scalatest.FunSuite
import org.scalatest.funsuite.AnyFunSuite
import ml.dmlc.xgboost4j.gpu.java.CudfColumnBatch
class QuantileDMatrixSuite extends FunSuite {
class QuantileDMatrixSuite extends AnyFunSuite {
test("QuantileDMatrix test") {

View File

@ -20,14 +20,15 @@ import java.nio.file.{Files, Path}
import java.sql.{Date, Timestamp}
import java.util.{Locale, TimeZone}
import org.scalatest.{BeforeAndAfterAll, FunSuite}
import org.scalatest.BeforeAndAfterAll
import org.scalatest.funsuite.AnyFunSuite
import org.apache.spark.{GpuTestUtils, SparkConf}
import org.apache.spark.internal.Logging
import org.apache.spark.network.util.JavaUtils
import org.apache.spark.sql.{Row, SparkSession}
trait GpuTestSuite extends FunSuite with TmpFolderSuite {
trait GpuTestSuite extends AnyFunSuite with TmpFolderSuite {
import SparkSessionHolder.withSparkSession
protected def getResourcePath(resource: String): String = {
@ -200,7 +201,7 @@ trait GpuTestSuite extends FunSuite with TmpFolderSuite {
}
trait TmpFolderSuite extends BeforeAndAfterAll { self: FunSuite =>
trait TmpFolderSuite extends BeforeAndAfterAll { self: AnyFunSuite =>
protected var tempDir: Path = _
override def beforeAll(): Unit = {

View File

@ -23,9 +23,9 @@ import scala.util.Random
import ml.dmlc.xgboost4j.java.{Communicator, RabitTracker => PyRabitTracker}
import ml.dmlc.xgboost4j.java.IRabitTracker.TrackerStatus
import ml.dmlc.xgboost4j.scala.DMatrix
import org.scalatest.FunSuite
import org.scalatest.funsuite.AnyFunSuite
class CommunicatorRobustnessSuite extends FunSuite with PerTest {
class CommunicatorRobustnessSuite extends AnyFunSuite with PerTest {
private def getXGBoostExecutionParams(paramMap: Map[String, Any]): XGBoostExecutionParams = {
val classifier = new XGBoostClassifier(paramMap)

View File

@ -17,13 +17,13 @@
package ml.dmlc.xgboost4j.scala.spark
import org.apache.spark.ml.linalg.Vectors
import org.scalatest.FunSuite
import org.scalatest.funsuite.AnyFunSuite
import ml.dmlc.xgboost4j.scala.spark.util.DataUtils
import ml.dmlc.xgboost4j.scala.spark.util.DataUtils.PackedParams
import org.apache.spark.sql.functions._
class DeterministicPartitioningSuite extends FunSuite with TmpFolderPerSuite with PerTest {
class DeterministicPartitioningSuite extends AnyFunSuite with TmpFolderPerSuite with PerTest {
test("perform deterministic partitioning when checkpointInternal and" +
" checkpointPath is set (Classifier)") {

View File

@ -19,10 +19,10 @@ package ml.dmlc.xgboost4j.scala.spark
import java.io.File
import ml.dmlc.xgboost4j.scala.{Booster, DMatrix, ExternalCheckpointManager, XGBoost => SXGBoost}
import org.scalatest.FunSuite
import org.scalatest.funsuite.AnyFunSuite
import org.apache.hadoop.fs.{FileSystem, Path}
class ExternalCheckpointManagerSuite extends FunSuite with TmpFolderPerSuite with PerTest {
class ExternalCheckpointManagerSuite extends AnyFunSuite with TmpFolderPerSuite with PerTest {
private def produceParamMap(checkpointPath: String, checkpointInterval: Int):
Map[String, Any] = {

View File

@ -18,12 +18,12 @@ package ml.dmlc.xgboost4j.scala.spark
import org.apache.spark.Partitioner
import org.apache.spark.ml.feature.VectorAssembler
import org.scalatest.FunSuite
import org.scalatest.funsuite.AnyFunSuite
import org.apache.spark.sql.functions._
import scala.util.Random
class FeatureSizeValidatingSuite extends FunSuite with PerTest {
class FeatureSizeValidatingSuite extends AnyFunSuite with PerTest {
test("transform throwing exception if feature size of dataset is greater than model's") {
val modelPath = getClass.getResource("/model/0.82/model").getPath

View File

@ -19,12 +19,12 @@ package ml.dmlc.xgboost4j.scala.spark
import org.apache.spark.ml.feature.VectorAssembler
import org.apache.spark.ml.linalg.Vectors
import org.apache.spark.sql.DataFrame
import org.scalatest.FunSuite
import org.scalatest.funsuite.AnyFunSuite
import scala.util.Random
import org.apache.spark.SparkException
class MissingValueHandlingSuite extends FunSuite with PerTest {
class MissingValueHandlingSuite extends AnyFunSuite with PerTest {
test("dense vectors containing missing value") {
def buildDenseDataFrame(): DataFrame = {
val numRows = 100

View File

@ -16,12 +16,13 @@
package ml.dmlc.xgboost4j.scala.spark
import org.scalatest.{BeforeAndAfterAll, FunSuite}
import org.scalatest.BeforeAndAfterAll
import org.scalatest.funsuite.AnyFunSuite
import org.apache.spark.SparkException
import org.apache.spark.ml.param.ParamMap
class ParameterSuite extends FunSuite with PerTest with BeforeAndAfterAll {
class ParameterSuite extends AnyFunSuite with PerTest with BeforeAndAfterAll {
test("XGBoost and Spark parameters synchronize correctly") {
val xgbParamMap = Map("eta" -> "1", "objective" -> "binary:logistic",

View File

@ -22,13 +22,14 @@ import ml.dmlc.xgboost4j.{LabeledPoint => XGBLabeledPoint}
import org.apache.spark.SparkContext
import org.apache.spark.sql._
import org.scalatest.{BeforeAndAfterEach, FunSuite}
import org.scalatest.BeforeAndAfterEach
import org.scalatest.funsuite.AnyFunSuite
import scala.math.min
import scala.util.Random
import org.apache.commons.io.IOUtils
trait PerTest extends BeforeAndAfterEach { self: FunSuite =>
trait PerTest extends BeforeAndAfterEach { self: AnyFunSuite =>
protected val numWorkers: Int = min(Runtime.getRuntime.availableProcessors(), 4)

View File

@ -25,9 +25,9 @@ import scala.util.Random
import org.apache.spark.ml.feature._
import org.apache.spark.ml.{Pipeline, PipelineModel}
import org.apache.spark.sql.functions._
import org.scalatest.FunSuite
import org.scalatest.funsuite.AnyFunSuite
class PersistenceSuite extends FunSuite with TmpFolderPerSuite with PerTest {
class PersistenceSuite extends AnyFunSuite with TmpFolderPerSuite with PerTest {
test("test persistence of XGBoostClassifier and XGBoostClassificationModel") {
val eval = new EvalError()

View File

@ -19,9 +19,10 @@ package ml.dmlc.xgboost4j.scala.spark
import java.nio.file.{Files, Path}
import org.apache.spark.network.util.JavaUtils
import org.scalatest.{BeforeAndAfterAll, FunSuite}
import org.scalatest.BeforeAndAfterAll
import org.scalatest.funsuite.AnyFunSuite
trait TmpFolderPerSuite extends BeforeAndAfterAll { self: FunSuite =>
trait TmpFolderPerSuite extends BeforeAndAfterAll { self: AnyFunSuite =>
protected var tempDir: Path = _
override def beforeAll(): Unit = {

View File

@ -22,13 +22,13 @@ import ml.dmlc.xgboost4j.scala.{DMatrix, XGBoost => ScalaXGBoost}
import org.apache.spark.ml.linalg._
import org.apache.spark.sql._
import org.scalatest.FunSuite
import org.scalatest.funsuite.AnyFunSuite
import org.apache.commons.io.IOUtils
import org.apache.spark.Partitioner
import org.apache.spark.ml.feature.VectorAssembler
class XGBoostClassifierSuite extends FunSuite with PerTest with TmpFolderPerSuite {
class XGBoostClassifierSuite extends AnyFunSuite with PerTest with TmpFolderPerSuite {
protected val treeMethod: String = "auto"

View File

@ -21,11 +21,11 @@ import ml.dmlc.xgboost4j.scala.Booster
import scala.collection.JavaConverters._
import org.apache.spark.sql._
import org.scalatest.FunSuite
import org.scalatest.funsuite.AnyFunSuite
import org.apache.spark.SparkException
class XGBoostCommunicatorRegressionSuite extends FunSuite with PerTest {
class XGBoostCommunicatorRegressionSuite extends AnyFunSuite with PerTest {
val predictionErrorMin = 0.00001f
val maxFailure = 2;

View File

@ -19,9 +19,9 @@ package ml.dmlc.xgboost4j.scala.spark
import ml.dmlc.xgboost4j.scala.{Booster, DMatrix}
import org.apache.spark.sql._
import org.scalatest.FunSuite
import org.scalatest.funsuite.AnyFunSuite
class XGBoostConfigureSuite extends FunSuite with PerTest {
class XGBoostConfigureSuite extends AnyFunSuite with PerTest {
override def sparkSessionBuilder: SparkSession.Builder = super.sparkSessionBuilder
.config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")

View File

@ -22,12 +22,12 @@ import ml.dmlc.xgboost4j.{LabeledPoint => XGBLabeledPoint}
import ml.dmlc.xgboost4j.scala.DMatrix
import org.apache.spark.{SparkException, TaskContext}
import org.scalatest.FunSuite
import org.scalatest.funsuite.AnyFunSuite
import org.apache.spark.ml.feature.VectorAssembler
import org.apache.spark.sql.functions.lit
class XGBoostGeneralSuite extends FunSuite with TmpFolderPerSuite with PerTest {
class XGBoostGeneralSuite extends AnyFunSuite with TmpFolderPerSuite with PerTest {
test("distributed training with the specified worker number") {
val trainingRDD = sc.parallelize(Classification.train)

View File

@ -23,11 +23,11 @@ import ml.dmlc.xgboost4j.scala.{DMatrix, XGBoost => ScalaXGBoost}
import org.apache.spark.ml.linalg.{Vector, Vectors}
import org.apache.spark.sql.functions._
import org.apache.spark.sql.{DataFrame, Row}
import org.scalatest.FunSuite
import org.scalatest.funsuite.AnyFunSuite
import org.apache.spark.ml.feature.VectorAssembler
class XGBoostRegressorSuite extends FunSuite with PerTest with TmpFolderPerSuite {
class XGBoostRegressorSuite extends AnyFunSuite with PerTest with TmpFolderPerSuite {
protected val treeMethod: String = "auto"
test("XGBoost-Spark XGBoostRegressor output should match XGBoost4j") {

View File

@ -69,7 +69,7 @@ pom_template = """
<dependency>
<groupId>org.scalactic</groupId>
<artifactId>scalactic_${{scala.binary.version}}</artifactId>
<version>3.0.8</version>
<version>3.2.15</version>
<scope>test</scope>
</dependency>
<dependency>

View File

@ -46,7 +46,7 @@
<dependency>
<groupId>org.scalatest</groupId>
<artifactId>scalatest_${scala.binary.version}</artifactId>
<version>3.0.5</version>
<version>3.2.15</version>
<scope>provided</scope>
</dependency>
</dependencies>

View File

@ -20,10 +20,10 @@ import java.util.Arrays
import scala.util.Random
import org.scalatest.FunSuite
import org.scalatest.funsuite.AnyFunSuite
import ml.dmlc.xgboost4j.java.{DMatrix => JDMatrix}
class DMatrixSuite extends FunSuite {
class DMatrixSuite extends AnyFunSuite {
test("create DMatrix from File") {
val dmat = new DMatrix("../../demo/data/agaricus.txt.test")
// get label

View File

@ -20,11 +20,11 @@ import java.io.{FileOutputStream, FileInputStream, File}
import junit.framework.TestCase
import org.apache.commons.logging.LogFactory
import org.scalatest.FunSuite
import org.scalatest.funsuite.AnyFunSuite
import ml.dmlc.xgboost4j.java.XGBoostError
class ScalaBoosterImplSuite extends FunSuite {
class ScalaBoosterImplSuite extends AnyFunSuite {
private class EvalError extends EvalTrait {