: Gbtclassifier pyspark
BITCOIN GETBALANCE | 805 |
Gbtclassifier pyspark | |
DONINGTON PARK BTCC 2017 | 898 |
Source code for conwaytransport.com.aufication
## Licensed to the Apache Software Foundation (ASF) under one or more# contributor license agreements. See the NOTICE file distributed with# this work for additional information regarding copyright ownership.# The ASF licenses this file to You under the Apache License, Version # (the "License"); you may not use this file except in compliance with# the License. You may obtain a copy of the License at## conwaytransport.com.au## Unless required by applicable law or agreed to in writing, software# distributed under the License is distributed on an "AS IS" BASIS,# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.# See the License for the specific language governing permissions and# limitations under the License.#conwaytransport.com.auportkeyword_conwaytransport.com.aurimportJavaEstimator,conwaytransport.com.auimport*conwaytransport.com.ausionimport(RandomForestParams,TreeEnsembleParams,DecisionTreeModel,TreeEnsembleModels)conwaytransport.com.auimportinherit_doc__all__=['LogisticRegression','LogisticRegressionModel','DecisionTreeClassifier','DecisionTreeClassificationModel','GBTClassifier','GBTClassificationModel','RandomForestClassifier','RandomForestClassificationModel','NaiveBayes','NaiveBayesModel','MultilayerPerceptronClassifier','MultilayerPerceptronClassificationModel']@inherit_doc
Источник: conwaytransport.com.au~jegonzal/pyspark/_modules/pyspark/ml/conwaytransport.com.au[docs]classLogisticRegression(JavaEstimator,HasFeaturesCol,HasLabelCol,HasPredictionCol,HasMaxIter,HasRegParam,HasTol,HasProbabilityCol,HasRawPredictionCol,HasElasticNetParam,HasFitIntercept,HasStandardization,HasThresholds,HasWeightCol):""" Logistic regression. Currently, this class only supports binary classification. >>> from conwaytransport.com.au import Row >>> from conwaytransport.com.au import Vectors >>> df = conwaytransport.com.auelize([ Row(label=, weight=, features=conwaytransport.com.au()), Row(label=, weight=, features=conwaytransport.com.au(1, [], []))]).toDF() >>> lr = LogisticRegression(maxIter=5, regParam=, weightCol="weight") >>> model = conwaytransport.com.au(df) >>> conwaytransport.com.aucients DenseVector([]) >>> conwaytransport.com.auept >>> test0 = conwaytransport.com.auelize([Row(features=conwaytransport.com.au())]).toDF() >>> result = conwaytransport.com.auorm(test0).head() >>> conwaytransport.com.aution >>> conwaytransport.com.auility DenseVector([, ]) >>> conwaytransport.com.audiction DenseVector([, ]) >>> test1 = conwaytransport.com.auelize([Row(features=conwaytransport.com.au(1, [0], []))]).toDF() >>> conwaytransport.com.auorm(test1).head().prediction >>> conwaytransport.com.auams("vector") Traceback (most recent call last): TypeError: Method setParams forces keyword arguments. .. versionadded:: """# a placeholder to make it appear in the generated docthreshold=Param(Params._dummy(),"threshold","Threshold in binary classification prediction, in range [0, 1]."+" If threshold and thresholds are both set, they must match.")@keyword_onlydef__init__(self,featuresCol="features",labelCol="label",predictionCol="prediction",maxIter=,regParam=,elasticNetParam=,tol=1e-6,fitIntercept=True,threshold=,thresholds=None,probabilityCol="probability",rawPredictionCol="rawPrediction",standardization=True,weightCol=None):""" __init__(self, featuresCol="features", labelCol="label", predictionCol="prediction", \ maxIter=, regParam=, elasticNetParam=, tol=1e-6, fitIntercept=True, \ threshold=, thresholds=None, probabilityCol="probability", \ rawPredictionCol="rawPrediction", standardization=True, weightCol=None) If the threshold and thresholds Params are both set, they must be equivalent. """super(LogisticRegression,self).__init__()self._java_obj=self._new_java_obj("conwaytransport.com.auicRegression",conwaytransport.com.au)#: param for threshold in binary classification, in range [0, 1]conwaytransport.com.auold=Param(self,"threshold","Threshold in binary classification prediction, in range [0, 1]."+" If threshold and thresholds are both set, they must match.")self._setDefault(maxIter=,regParam=,tol=1E-6,threshold=)kwargs=self.__init__._input_conwaytransport.com.auams(**kwargs)self._checkThresholdConsistency()@keyword_only@since("")
[docs]defsetParams(self,featuresCol="features",labelCol="label",predictionCol="prediction",maxIter=,regParam=,elasticNetParam=,tol=1e-6,fitIntercept=True,threshold=,thresholds=None,probabilityCol="probability",rawPredictionCol="rawPrediction",standardization=True,weightCol=None):""" setParams(self, featuresCol="features", labelCol="label", predictionCol="prediction", \ maxIter=, regParam=, elasticNetParam=, tol=1e-6, fitIntercept=True, \ threshold=, thresholds=None, probabilityCol="probability", \ rawPredictionCol="rawPrediction", standardization=True, weightCol=None) Sets params for logistic regression. If the threshold and thresholds Params are both set, they must be equivalent. """kwargs=conwaytransport.com.auams._input_kwargsself._set(**kwargs)self._checkThresholdConsistency()returnself
def_create_model(self,java_model):returnLogisticRegressionModel(java_model)@since("")[docs]defsetThreshold(self,value):""" Sets the value of :py:attr:`threshold`. Clears value of :py:attr:`thresholds` if it has been set. """self._paramMap[conwaytransport.com.auold]=conwaytransport.com.au(conwaytransport.com.auolds):delself._paramMap[conwaytransport.com.auolds]returnself
@since("")[docs]defgetThreshold(self):""" Gets the value of threshold or its default value. """self._checkThresholdConsistency()conwaytransport.com.au(conwaytransport.com.auolds):ts=conwaytransport.com.auefault(conwaytransport.com.auolds)iflen(ts)!=2:raiseValueError("Logistic Regression getThreshold only applies to"+" binary classification, but thresholds has length != 2."+" thresholds: "+",".join(ts))return/(+ts[0]/ts[1])else:conwaytransport.com.auefault(conwaytransport.com.auold)
@since("")[docs]defsetThresholds(self,value):""" Sets the value of :py:attr:`thresholds`. Clears value of :py:attr:`threshold` if it has been set. """self._paramMap[conwaytransport.com.auolds]=conwaytransport.com.au(conwaytransport.com.auold):delself._paramMap[conwaytransport.com.auold]returnself
@since("")[docs]defgetThresholds(self):""" If :py:attr:`thresholds` is set, return its value. Otherwise, if :py:attr:`threshold` is set, return the equivalent thresholds for binary classification: (1-threshold, threshold). If neither are set, throw an error. """self._checkThresholdConsistency()conwaytransport.com.au(conwaytransport.com.auolds)conwaytransport.com.au(conwaytransport.com.auold):t=conwaytransport.com.auefault(conwaytransport.com.auold)return[t,t]else:conwaytransport.com.auefault(conwaytransport.com.auolds)
def_checkThresholdConsistency(self):conwaytransport.com.au(conwaytransport.com.auold)conwaytransport.com.au(conwaytransport.com.auolds):ts=conwaytransport.com.auam(conwaytransport.com.auolds)iflen(ts)!=2:raiseValueError("Logistic Regression getThreshold only applies to"+" binary classification, but thresholds has length != 2."+" thresholds: "+",".join(ts))t=/(+ts[0]/ts[1])t2=conwaytransport.com.auam(conwaytransport.com.auold)ifabs(t2-t)>=1EraiseValueError("Logistic Regression getThreshold found inconsistent values for"+" threshold (%g) and thresholds (equivalent to %g)"%(t2,t))[docs]classLogisticRegressionModel(JavaModel):""" Model fitted by LogisticRegression. .. versionadded:: """@property@since("")defweights(self):""" Model weights. """conwaytransport.com.au("weights is deprecated. Use coefficients instead.")returnself._call_java("weights")@property@since("")defcoefficients(self):""" Model coefficients. """returnself._call_java("coefficients")@property@since("")defintercept(self):""" Model intercept. """returnself._call_java("intercept")
classTreeClassifierParams(object):""" Private class to track supported impurity measures. .. versionadded:: """supportedImpurities=["entropy","gini"]# a placeholder to make it appear in the generated docimpurity=Param(Params._dummy(),"impurity","Criterion used for information gain calculation (case-insensitive). "+"Supported options: "+", ".join(supportedImpurities))def__init__(self):super(TreeClassifierParams,self).__init__()#: param for Criterion used for information gain calculation (case-insensitive)conwaytransport.com.auty=Param(self,"impurity","Criterion used for information "+"gain calculation (case-insensitive). Supported options: "+", ".join(conwaytransport.com.autedImpurities))@since("")defsetImpurity(self,value):""" Sets the value of :py:attr:`impurity`. """self._paramMap[conwaytransport.com.auty]=valuereturnself@since("")defgetImpurity(self):""" Gets the value of impurity or its default value. """conwaytransport.com.auefault(conwaytransport.com.auty)classGBTParams(TreeEnsembleParams):""" Private class to track supported GBT params. .. versionadded:: """supportedLossTypes=["logistic"]@inherit_doc[docs]classDecisionTreeClassifier(JavaEstimator,HasFeaturesCol,HasLabelCol,HasPredictionCol,HasProbabilityCol,HasRawPredictionCol,DecisionTreeParams,TreeClassifierParams,HasCheckpointInterval):""" `conwaytransport.com.au Decision tree` learning algorithm for classification. It supports both binary and multiclass labels, as well as both continuous and categorical features. >>> from conwaytransport.com.au import Vectors >>> from conwaytransport.com.aue import StringIndexer >>> df = conwaytransport.com.auDataFrame([ (, conwaytransport.com.au()), (, conwaytransport.com.au(1, [], []))], ["label", "features"]) >>> stringIndexer = StringIndexer(inputCol="label", outputCol="indexed") >>> si_model = conwaytransport.com.au(df) >>> td = si_conwaytransport.com.auorm(df) >>> dt = DecisionTreeClassifier(maxDepth=2, labelCol="indexed") >>> model = conwaytransport.com.au(td) >>> conwaytransport.com.aues 3 >>> conwaytransport.com.au 1 >>> test0 = conwaytransport.com.auDataFrame([(conwaytransport.com.au(),)], ["features"]) >>> result = conwaytransport.com.auorm(test0).head() >>> conwaytransport.com.aution >>> conwaytransport.com.auility DenseVector([, ]) >>> conwaytransport.com.audiction DenseVector([, ]) >>> test1 = conwaytransport.com.auDataFrame([(conwaytransport.com.au(1, [0], []),)], ["features"]) >>> conwaytransport.com.auorm(test1).head().prediction .. versionadded:: """@keyword_onlydef__init__(self,featuresCol="features",labelCol="label",predictionCol="prediction",probabilityCol="probability",rawPredictionCol="rawPrediction",maxDepth=5,maxBins=32,minInstancesPerNode=1,minInfoGain=,maxMemoryInMB=,cacheNodeIds=False,checkpointInterval=10,impurity="gini"):""" __init__(self, featuresCol="features", labelCol="label", predictionCol="prediction", \ probabilityCol="probability", rawPredictionCol="rawPrediction", \ maxDepth=5, maxBins=32, minInstancesPerNode=1, minInfoGain=, \ maxMemoryInMB=, cacheNodeIds=False, checkpointInterval=10, impurity="gini") """super(DecisionTreeClassifier,self).__init__()self._java_obj=self._new_java_obj("conwaytransport.com.auonTreeClassifier",conwaytransport.com.au)self._setDefault(maxDepth=5,maxBins=32,minInstancesPerNode=1,minInfoGain=,maxMemoryInMB=,cacheNodeIds=False,checkpointInterval=10,impurity="gini")kwargs=self.__init__._input_conwaytransport.com.auams(**kwargs)@keyword_only@since("")
@inherit_doc[docs]defsetParams(self,featuresCol="features",labelCol="label",predictionCol="prediction",probabilityCol="probability",rawPredictionCol="rawPrediction",maxDepth=5,maxBins=32,minInstancesPerNode=1,minInfoGain=,maxMemoryInMB=,cacheNodeIds=False,checkpointInterval=10,impurity="gini"):""" setParams(self, featuresCol="features", labelCol="label", predictionCol="prediction", \ probabilityCol="probability", rawPredictionCol="rawPrediction", \ maxDepth=5, maxBins=32, minInstancesPerNode=1, minInfoGain=, \ maxMemoryInMB=, cacheNodeIds=False, checkpointInterval=10, impurity="gini") Sets params for the DecisionTreeClassifier. """kwargs=conwaytransport.com.auams._input_kwargsreturnself._set(**kwargs)
def_create_model(self,java_model):returnDecisionTreeClassificationModel(java_model)[docs]classDecisionTreeClassificationModel(DecisionTreeModel):""" Model fitted by DecisionTreeClassifier. .. versionadded:: """
@inherit_doc[docs]classRandomForestClassifier(JavaEstimator,HasFeaturesCol,HasLabelCol,HasPredictionCol,HasSeed,HasRawPredictionCol,HasProbabilityCol,RandomForestParams,TreeClassifierParams,HasCheckpointInterval):""" `conwaytransport.com.au Random Forest` learning algorithm for classification. It supports both binary and multiclass labels, as well as both continuous and categorical features. >>> import numpy >>> from numpy import allclose >>> from conwaytransport.com.au import Vectors >>> from conwaytransport.com.aue import StringIndexer >>> df = conwaytransport.com.auDataFrame([ (, conwaytransport.com.au()), (, conwaytransport.com.au(1, [], []))], ["label", "features"]) >>> stringIndexer = StringIndexer(inputCol="label", outputCol="indexed") >>> si_model = conwaytransport.com.au(df) >>> td = si_conwaytransport.com.auorm(df) >>> rf = RandomForestClassifier(numTrees=3, maxDepth=2, labelCol="indexed", seed=42) >>> model = conwaytransport.com.au(td) >>> allclose(conwaytransport.com.auights, [, , ]) True >>> test0 = conwaytransport.com.auDataFrame([(conwaytransport.com.au(),)], ["features"]) >>> result = conwaytransport.com.auorm(test0).head() >>> conwaytransport.com.aution >>> conwaytransport.com.au(conwaytransport.com.auility) 0 >>> conwaytransport.com.au(conwaytransport.com.audiction) 0 >>> test1 = conwaytransport.com.auDataFrame([(conwaytransport.com.au(1, [0], []),)], ["features"]) >>> conwaytransport.com.auorm(test1).head().prediction .. versionadded:: """@keyword_onlydef__init__(self,featuresCol="features",labelCol="label",predictionCol="prediction",probabilityCol="probability",rawPredictionCol="rawPrediction",maxDepth=5,maxBins=32,minInstancesPerNode=1,minInfoGain=,maxMemoryInMB=,cacheNodeIds=False,checkpointInterval=10,impurity="gini",numTrees=20,featureSubsetStrategy="auto",seed=None):""" __init__(self, featuresCol="features", labelCol="label", predictionCol="prediction", \ probabilityCol="probability", rawPredictionCol="rawPrediction", \ maxDepth=5, maxBins=32, minInstancesPerNode=1, minInfoGain=, \ maxMemoryInMB=, cacheNodeIds=False, checkpointInterval=10, impurity="gini", \ numTrees=20, featureSubsetStrategy="auto", seed=None) """super(RandomForestClassifier,self).__init__()self._java_obj=self._new_java_obj("conwaytransport.com.auForestClassifier",conwaytransport.com.au)self._setDefault(maxDepth=5,maxBins=32,minInstancesPerNode=1,minInfoGain=,maxMemoryInMB=,cacheNodeIds=False,checkpointInterval=10,seed=None,impurity="gini",numTrees=20,featureSubsetStrategy="auto")kwargs=self.__init__._input_conwaytransport.com.auams(**kwargs)@keyword_only@since("")
[docs]defsetParams(self,featuresCol="features",labelCol="label",predictionCol="prediction",probabilityCol="probability",rawPredictionCol="rawPrediction",maxDepth=5,maxBins=32,minInstancesPerNode=1,minInfoGain=,maxMemoryInMB=,cacheNodeIds=False,checkpointInterval=10,seed=None,impurity="gini",numTrees=20,featureSubsetStrategy="auto"):""" setParams(self, featuresCol="features", labelCol="label", predictionCol="prediction", \ probabilityCol="probability", rawPredictionCol="rawPrediction", \ maxDepth=5, maxBins=32, minInstancesPerNode=1, minInfoGain=, \ maxMemoryInMB=, cacheNodeIds=False, checkpointInterval=10, seed=None, \ impurity="gini", numTrees=20, featureSubsetStrategy="auto") Sets params for linear classification. """kwargs=conwaytransport.com.auams._input_kwargsreturnself._set(**kwargs)
def_create_model(self,java_model):returnRandomForestClassificationModel(java_model)[docs]classRandomForestClassificationModel(TreeEnsembleModels):""" Model fitted by RandomForestClassifier. .. versionadded:: """
@inherit_doc[docs]classGBTClassifier(JavaEstimator,HasFeaturesCol,HasLabelCol,HasPredictionCol,HasMaxIter,GBTParams,HasCheckpointInterval,HasStepSize,HasSeed):""" `conwaytransport.com.au Gradient-Boosted Trees (GBTs)` learning algorithm for classification. It supports binary labels, as well as both continuous and categorical features. Note: Multiclass labels are not currently supported. >>> from numpy import allclose >>> from conwaytransport.com.au import Vectors >>> from conwaytransport.com.aue import StringIndexer >>> df = conwaytransport.com.auDataFrame([ (, conwaytransport.com.au()), (, conwaytransport.com.au(1, [], []))], ["label", "features"]) >>> stringIndexer = StringIndexer(inputCol="label", outputCol="indexed") >>> si_model = conwaytransport.com.au(df) >>> td = si_conwaytransport.com.auorm(df) >>> gbt = GBTClassifier(maxIter=5, maxDepth=2, labelCol="indexed") >>> model = conwaytransport.com.au(td) >>> allclose(conwaytransport.com.auights, [, , , , ]) True >>> test0 = conwaytransport.com.auDataFrame([(conwaytransport.com.au(),)], ["features"]) >>> conwaytransport.com.auorm(test0).head().prediction >>> test1 = conwaytransport.com.auDataFrame([(conwaytransport.com.au(1, [0], []),)], ["features"]) >>> conwaytransport.com.auorm(test1).head().prediction .. versionadded:: """# a placeholder to make it appear in the generated doclossType=Param(Params._dummy(),"lossType","Loss function which GBT tries to minimize (case-insensitive). "+"Supported options: "+", ".join(conwaytransport.com.autedLossTypes))@keyword_onlydef__init__(self,featuresCol="features",labelCol="label",predictionCol="prediction",maxDepth=5,maxBins=32,minInstancesPerNode=1,minInfoGain=,maxMemoryInMB=,cacheNodeIds=False,checkpointInterval=10,lossType="logistic",maxIter=20,stepSize=):""" __init__(self, featuresCol="features", labelCol="label", predictionCol="prediction", \ maxDepth=5, maxBins=32, minInstancesPerNode=1, minInfoGain=, \ maxMemoryInMB=, cacheNodeIds=False, checkpointInterval=10, \ lossType="logistic", maxIter=20, stepSize=) """super(GBTClassifier,self).__init__()self._java_obj=self._new_java_obj("conwaytransport.com.aussifier",conwaytransport.com.au)#: param for Loss function which GBT tries to minimize (case-insensitive)conwaytransport.com.aupe=Param(self,"lossType","Loss function which GBT tries to minimize (case-insensitive). "+"Supported options: "+", ".join(conwaytransport.com.autedLossTypes))self._setDefault(maxDepth=5,maxBins=32,minInstancesPerNode=1,minInfoGain=,maxMemoryInMB=,cacheNodeIds=False,checkpointInterval=10,lossType="logistic",maxIter=20,stepSize=)kwargs=self.__init__._input_conwaytransport.com.auams(**kwargs)@keyword_only@since("")
[docs]defsetParams(self,featuresCol="features",labelCol="label",predictionCol="prediction",maxDepth=5,maxBins=32,minInstancesPerNode=1,minInfoGain=,maxMemoryInMB=,cacheNodeIds=False,checkpointInterval=10,lossType="logistic",maxIter=20,stepSize=):""" setParams(self, featuresCol="features", labelCol="label", predictionCol="prediction", \ maxDepth=5, maxBins=32, minInstancesPerNode=1, minInfoGain=, \ maxMemoryInMB=, cacheNodeIds=False, checkpointInterval=10, \ lossType="logistic", maxIter=20, stepSize=) Sets params for Gradient Boosted Tree Classification. """kwargs=conwaytransport.com.auams._input_kwargsreturnself._set(**kwargs)
def_create_model(self,java_model):returnGBTClassificationModel(java_model)@since("")[docs]defsetLossType(self,value):""" Sets the value of :py:attr:`lossType`. """self._paramMap[conwaytransport.com.aupe]=valuereturnself
@since("")[docs]defgetLossType(self):""" Gets the value of lossType or its default value. """conwaytransport.com.auefault(conwaytransport.com.aupe)
[docs]classGBTClassificationModel(TreeEnsembleModels):""" Model fitted by GBTClassifier. .. versionadded:: """
@inherit_doc[docs]classNaiveBayes(JavaEstimator,HasFeaturesCol,HasLabelCol,HasPredictionCol,HasProbabilityCol,HasRawPredictionCol):""" Naive Bayes Classifiers. It supports both Multinomial and Bernoulli NB. Multinomial NB (`conwaytransport.com.au`) can handle finitely supported discrete data. For example, by converting documents into TF-IDF vectors, it can be used for document classification. By making every vector a binary (0/1) data, it can also be used as Bernoulli NB (`conwaytransport.com.au`). The input feature values must be nonnegative. >>> from conwaytransport.com.au import Row >>> from conwaytransport.com.au import Vectors >>> df = conwaytransport.com.auDataFrame([ Row(label=, features=conwaytransport.com.au([, ])), Row(label=, features=conwaytransport.com.au([, ])), Row(label=, features=conwaytransport.com.au([, ]))]) >>> nb = NaiveBayes(smoothing=, modelType="multinomial") >>> model = conwaytransport.com.au(df) >>> conwaytransport.com.au DenseVector([, ]) >>> conwaytransport.com.au DenseMatrix(2, 2, [, , , ], 1) >>> test0 = conwaytransport.com.auelize([Row(features=conwaytransport.com.au([, ]))]).toDF() >>> result = conwaytransport.com.auorm(test0).head() >>> conwaytransport.com.aution >>> conwaytransport.com.auility DenseVector([, ]) >>> conwaytransport.com.audiction DenseVector([, ]) >>> test1 = conwaytransport.com.auelize([Row(features=conwaytransport.com.au(2, [0], []))]).toDF() >>> conwaytransport.com.auorm(test1).head().prediction .. versionadded:: """# a placeholder to make it appear in the generated docsmoothing=Param(Params._dummy(),"smoothing","The smoothing parameter, should be >= 0, "+"default is ")modelType=Param(Params._dummy(),"modelType","The model type which is a string "+"(case-sensitive). Supported options: multinomial (default) and bernoulli.")@keyword_onlydef__init__(self,featuresCol="features",labelCol="label",predictionCol="prediction",probabilityCol="probability",rawPredictionCol="rawPrediction",smoothing=,modelType="multinomial"):""" __init__(self, featuresCol="features", labelCol="label", predictionCol="prediction", \ probabilityCol="probability", rawPredictionCol="rawPrediction", smoothing=, \ modelType="multinomial") """super(NaiveBayes,self).__init__()self._java_obj=self._new_java_obj("conwaytransport.com.auayes",conwaytransport.com.au)#: param for the smoothing conwaytransport.com.auing=Param(self,"smoothing","The smoothing parameter, should be >= 0, "+"default is ")#: param for the model conwaytransport.com.auype=Param(self,"modelType","The model type which is a string "+"(case-sensitive). Supported options: multinomial (default) "+"and bernoulli.")self._setDefault(smoothing=,modelType="multinomial")kwargs=self.__init__._input_conwaytransport.com.auams(**kwargs)@keyword_only@since("")
[docs]defsetParams(self,featuresCol="features",labelCol="label",predictionCol="prediction",probabilityCol="probability",rawPredictionCol="rawPrediction",smoothing=,modelType="multinomial"):""" setParams(self, featuresCol="features", labelCol="label", predictionCol="prediction", \ probabilityCol="probability", rawPredictionCol="rawPrediction", smoothing=, \ modelType="multinomial") Sets params for Naive Bayes. """kwargs=conwaytransport.com.auams._input_kwargsreturnself._set(**kwargs)
def_create_model(self,java_model):returnNaiveBayesModel(java_model)@since("")[docs]defsetSmoothing(self,value):""" Sets the value of :py:attr:`smoothing`. """self._paramMap[conwaytransport.com.auing]=valuereturnself
@since("")[docs]defgetSmoothing(self):""" Gets the value of smoothing or its default value. """conwaytransport.com.auefault(conwaytransport.com.auing)
@since("")[docs]defsetModelType(self,value):""" Sets the value of :py:attr:`modelType`. """self._paramMap[conwaytransport.com.auype]=valuereturnself
@since("")[docs]defgetModelType(self):""" Gets the value of modelType or its default value. """conwaytransport.com.auefault(conwaytransport.com.auype)
[docs]classNaiveBayesModel(JavaModel):""" Model fitted by NaiveBayes. .. versionadded:: """@property@since("")defpi(self):""" log of class priors. """returnself._call_java("pi")@property@since("")deftheta(self):""" log of class conditional probabilities. """returnself._call_java("theta")
@inherit_doc[docs]classMultilayerPerceptronClassifier(JavaEstimator,HasFeaturesCol,HasLabelCol,HasPredictionCol,HasMaxIter,HasTol,HasSeed):""" Classifier trainer based on the Multilayer Perceptron. Each layer has sigmoid activation function, output layer has softmax. Number of inputs has to be equal to the size of feature vectors. Number of outputs has to be equal to the total number of labels. >>> from conwaytransport.com.au import Vectors >>> df = conwaytransport.com.auDataFrame([ (, conwaytransport.com.au([, ])), (, conwaytransport.com.au([, ])), (, conwaytransport.com.au([, ])), (, conwaytransport.com.au([, ]))], ["label", "features"]) >>> mlp = MultilayerPerceptronClassifier(maxIter=, layers=[2, 5, 2], blockSize=1, seed=11) >>> model = conwaytransport.com.au(df) >>> conwaytransport.com.au [2, 5, 2] >>> conwaytransport.com.au 27 >>> testDF = conwaytransport.com.auDataFrame([ (conwaytransport.com.au([, ]),), (conwaytransport.com.au([, ]),)], ["features"]) >>> conwaytransport.com.auorm(testDF).show() +++ | features|prediction| +++ |[,]| | |[,]| | +++ .. versionadded:: """# a placeholder to make it appear in the generated doclayers=Param(Params._dummy(),"layers","Sizes of layers from input layer to output layer "+"E.g., Array(, , 10) means inputs, one hidden layer with "+"neurons and output layer of 10 neurons, default is [1, 1].")blockSize=Param(Params._dummy(),"blockSize","Block size for stacking input data in "+"matrices. Data is stacked within partitions. If block size is more than "+"remaining data in a partition then it is adjusted to the size of this "+"data. Recommended size is between 10 and , default is ")@keyword_onlydef__init__(self,featuresCol="features",labelCol="label",predictionCol="prediction",maxIter=,tol=1e-4,seed=None,layers=None,blockSize=):""" __init__(self, featuresCol="features", labelCol="label", predictionCol="prediction", \ maxIter=, tol=1e-4, seed=None, layers=[1, 1], blockSize=) """super(MultilayerPerceptronClassifier,self).__init__()self._java_obj=self._new_java_obj("conwaytransport.com.auayerPerceptronClassifier",conwaytransport.com.au)conwaytransport.com.au=Param(self,"layers","Sizes of layers from input layer to output layer "+"E.g., Array(, , 10) means inputs, one hidden layer with "+" neurons and output layer of 10 neurons, default is [1, 1].")conwaytransport.com.auize=Param(self,"blockSize","Block size for stacking input data in "+
-
-