class BoostingRegressor extends Regressor[Vector, BoostingRegressor, BoostingRegressionModel] with BoostingRegressorParams with MLWritable
- Source
- BoostingRegressor.scala
Linear Supertypes
Ordering
- Grouped
- Alphabetic
- By Inheritance
Inherited
- BoostingRegressor
- MLWritable
- BoostingRegressorParams
- BoostingParams
- HasAggregationDepth
- HasCheckpointInterval
- HasBaseLearner
- HasWeightCol
- HasNumBaseLearners
- Regressor
- Predictor
- PredictorParams
- HasPredictionCol
- HasFeaturesCol
- HasLabelCol
- Estimator
- PipelineStage
- Logging
- Params
- Serializable
- Identifiable
- AnyRef
- Any
- Hide All
- Show All
Visibility
- Public
- Protected
Value Members
- final def !=(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
- final def ##: Int
- Definition Classes
- AnyRef → Any
- final def $[T](param: Param[T]): T
- Attributes
- protected
- Definition Classes
- Params
- final def ==(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
- final val aggregationDepth: IntParam
- Definition Classes
- HasAggregationDepth
- final def asInstanceOf[T0]: T0
- Definition Classes
- Any
- val baseLearner: Param[EnsembleRegressorType]
param for the estimator that will be used by the ensemble learner as a base learner
param for the estimator that will be used by the ensemble learner as a base learner
- Definition Classes
- HasBaseLearner
- final val checkpointInterval: IntParam
- Definition Classes
- HasCheckpointInterval
- final def clear(param: Param[_]): BoostingRegressor.this.type
- Definition Classes
- Params
- def clone(): AnyRef
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws(classOf[java.lang.CloneNotSupportedException]) @native()
- def copy(extra: ParamMap): BoostingRegressor
- Definition Classes
- BoostingRegressor → Predictor → Estimator → PipelineStage → Params
- def copyValues[T <: Params](to: T, extra: ParamMap): T
- Attributes
- protected
- Definition Classes
- Params
- final def defaultCopy[T <: Params](extra: ParamMap): T
- Attributes
- protected
- Definition Classes
- Params
- final def eq(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
- def equals(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef → Any
- def error(label: Double, prediction: Double): Double
- def explainParam(param: Param[_]): String
- Definition Classes
- Params
- def explainParams(): String
- Definition Classes
- Params
- def extractInstances(dataset: Dataset[_], validateInstance: (Instance) => Unit): RDD[Instance]
- Attributes
- protected
- Definition Classes
- PredictorParams
- def extractInstances(dataset: Dataset[_]): RDD[Instance]
- Attributes
- protected
- Definition Classes
- PredictorParams
- def extractLabeledPoints(dataset: Dataset[_]): RDD[LabeledPoint]
- Attributes
- protected
- Definition Classes
- Predictor
- final def extractParamMap(): ParamMap
- Definition Classes
- Params
- final def extractParamMap(extra: ParamMap): ParamMap
- Definition Classes
- Params
- final val featuresCol: Param[String]
- Definition Classes
- HasFeaturesCol
- def finalize(): Unit
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws(classOf[java.lang.Throwable])
- def fit(dataset: Dataset[_]): BoostingRegressionModel
- Definition Classes
- Predictor → Estimator
- def fit(dataset: Dataset[_], paramMaps: Seq[ParamMap]): Seq[BoostingRegressionModel]
- Definition Classes
- Estimator
- Annotations
- @Since("2.0.0")
- def fit(dataset: Dataset[_], paramMap: ParamMap): BoostingRegressionModel
- Definition Classes
- Estimator
- Annotations
- @Since("2.0.0")
- def fit(dataset: Dataset[_], firstParamPair: ParamPair[_], otherParamPairs: ParamPair[_]*): BoostingRegressionModel
- Definition Classes
- Estimator
- Annotations
- @Since("2.0.0") @varargs()
- def fitBaseLearner(baseLearner: EnsembleRegressorType, labelColName: String, featuresColName: String, predictionColName: String, weightColName: Option[String])(df: DataFrame): EnsemblePredictionModelType
- Attributes
- protected
- Definition Classes
- HasBaseLearner
- final def get[T](param: Param[T]): Option[T]
- Definition Classes
- Params
- final def getAggregationDepth: Int
- Definition Classes
- HasAggregationDepth
- def getBaseLearner: EnsembleRegressorType
- Definition Classes
- HasBaseLearner
- final def getCheckpointInterval: Int
- Definition Classes
- HasCheckpointInterval
- final def getClass(): Class[_ <: AnyRef]
- Definition Classes
- AnyRef → Any
- Annotations
- @native()
- final def getDefault[T](param: Param[T]): Option[T]
- Definition Classes
- Params
- final def getFeaturesCol: String
- Definition Classes
- HasFeaturesCol
- final def getLabelCol: String
- Definition Classes
- HasLabelCol
- def getLossType: String
- Definition Classes
- BoostingRegressorParams
- def getNumBaseLearners: Int
- Definition Classes
- HasNumBaseLearners
- final def getOrDefault[T](param: Param[T]): T
- Definition Classes
- Params
- def getParam(paramName: String): Param[Any]
- Definition Classes
- Params
- final def getPredictionCol: String
- Definition Classes
- HasPredictionCol
- def getVotingStrategy: String
- Definition Classes
- BoostingRegressorParams
- final def getWeightCol: String
- Definition Classes
- HasWeightCol
- final def hasDefault[T](param: Param[T]): Boolean
- Definition Classes
- Params
- def hasParam(paramName: String): Boolean
- Definition Classes
- Params
- def hashCode(): Int
- Definition Classes
- AnyRef → Any
- Annotations
- @native()
- def initializeLogIfNecessary(isInterpreter: Boolean, silent: Boolean): Boolean
- Attributes
- protected
- Definition Classes
- Logging
- def initializeLogIfNecessary(isInterpreter: Boolean): Unit
- Attributes
- protected
- Definition Classes
- Logging
- final def isDefined(param: Param[_]): Boolean
- Definition Classes
- Params
- final def isInstanceOf[T0]: Boolean
- Definition Classes
- Any
- final def isSet(param: Param[_]): Boolean
- Definition Classes
- Params
- def isTraceEnabled(): Boolean
- Attributes
- protected
- Definition Classes
- Logging
- final val labelCol: Param[String]
- Definition Classes
- HasLabelCol
- def log: Logger
- Attributes
- protected
- Definition Classes
- Logging
- def logDebug(msg: => String, throwable: Throwable): Unit
- Attributes
- protected
- Definition Classes
- Logging
- def logDebug(msg: => String): Unit
- Attributes
- protected
- Definition Classes
- Logging
- def logError(msg: => String, throwable: Throwable): Unit
- Attributes
- protected
- Definition Classes
- Logging
- def logError(msg: => String): Unit
- Attributes
- protected
- Definition Classes
- Logging
- def logInfo(msg: => String, throwable: Throwable): Unit
- Attributes
- protected
- Definition Classes
- Logging
- def logInfo(msg: => String): Unit
- Attributes
- protected
- Definition Classes
- Logging
- def logName: String
- Attributes
- protected
- Definition Classes
- Logging
- def logTrace(msg: => String, throwable: Throwable): Unit
- Attributes
- protected
- Definition Classes
- Logging
- def logTrace(msg: => String): Unit
- Attributes
- protected
- Definition Classes
- Logging
- def logWarning(msg: => String, throwable: Throwable): Unit
- Attributes
- protected
- Definition Classes
- Logging
- def logWarning(msg: => String): Unit
- Attributes
- protected
- Definition Classes
- Logging
- def loss(error: Double): Double
- val lossType: Param[String]
Loss function which Boosting tries to minimize.
Loss function which Boosting tries to minimize. (case-insensitive) Supported: "exponential", "linear", "squared". (default = exponential)
- Definition Classes
- BoostingRegressorParams
- final def ne(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
- final def notify(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native()
- final def notifyAll(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native()
- val numBaseLearners: Param[Int]
param for the number of base learners of the algorithm
param for the number of base learners of the algorithm
- Definition Classes
- HasNumBaseLearners
- lazy val params: Array[Param[_]]
- Definition Classes
- Params
- final val predictionCol: Param[String]
- Definition Classes
- HasPredictionCol
- def save(path: String): Unit
- Definition Classes
- MLWritable
- Annotations
- @Since("1.6.0") @throws("If the input path already exists but overwrite is not enabled.")
- final def set(paramPair: ParamPair[_]): BoostingRegressor.this.type
- Attributes
- protected
- Definition Classes
- Params
- final def set(param: String, value: Any): BoostingRegressor.this.type
- Attributes
- protected
- Definition Classes
- Params
- final def set[T](param: Param[T], value: T): BoostingRegressor.this.type
- Definition Classes
- Params
- def setBaseLearner(value: EnsembleRegressorType): BoostingRegressor.this.type
- def setCheckpointInterval(value: Int): BoostingRegressor.this.type
- final def setDefault(paramPairs: ParamPair[_]*): BoostingRegressor.this.type
- Attributes
- protected
- Definition Classes
- Params
- final def setDefault[T](param: Param[T], value: T): BoostingRegressor.this.type
- Attributes
- protected
- Definition Classes
- Params
- def setFeaturesCol(value: String): BoostingRegressor
- Definition Classes
- Predictor
- def setLabelCol(value: String): BoostingRegressor
- Definition Classes
- Predictor
- def setLossType(value: String): BoostingRegressor.this.type
- def setNumBaseLearners(value: Int): BoostingRegressor.this.type
- def setPredictionCol(value: String): BoostingRegressor
- Definition Classes
- Predictor
- def setVotingStrategy(value: String): BoostingRegressor.this.type
- def setWeightCol(value: String): BoostingRegressor.this.type
- final def synchronized[T0](arg0: => T0): T0
- Definition Classes
- AnyRef
- def toString(): String
- Definition Classes
- Identifiable → AnyRef → Any
- def train(dataset: Dataset[_]): BoostingRegressionModel
- Attributes
- protected
- Definition Classes
- BoostingRegressor → Predictor
- def transformSchema(schema: StructType): StructType
- Definition Classes
- Predictor → PipelineStage
- def transformSchema(schema: StructType, logging: Boolean): StructType
- Attributes
- protected
- Definition Classes
- PipelineStage
- Annotations
- @DeveloperApi()
- val uid: String
- Definition Classes
- BoostingRegressor → Identifiable
- def validateAndTransformSchema(schema: StructType, fitting: Boolean, featuresDataType: DataType): StructType
- Attributes
- protected
- Definition Classes
- PredictorParams
- val votingStrategy: Param[String]
Voting strategy to aggregate predictions of base regressor.
Voting strategy to aggregate predictions of base regressor. (case-insensitive) Supported: "median", "mean". (default = median)
- Definition Classes
- BoostingRegressorParams
- final def wait(): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws(classOf[java.lang.InterruptedException])
- final def wait(arg0: Long, arg1: Int): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws(classOf[java.lang.InterruptedException])
- final def wait(arg0: Long): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws(classOf[java.lang.InterruptedException]) @native()
- final val weightCol: Param[String]
- Definition Classes
- HasWeightCol
- def write: MLWriter
- Definition Classes
- BoostingRegressor → MLWritable