#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from abc import ABCMeta, abstractmethod, abstractproperty
import copy
import threading
from pyspark import since
from pyspark.ml.common import inherit_doc
from pyspark.ml.param.shared import HasInputCol, HasOutputCol, HasLabelCol, HasFeaturesCol, \
HasPredictionCol, Params
from pyspark.sql.functions import udf
from pyspark.sql.types import StructField, StructType
class _FitMultipleIterator(object):
"""
Used by default implementation of Estimator.fitMultiple to produce models in a thread safe
iterator. This class handles the simple case of fitMultiple where each param map should be
fit independently.
Parameters
----------
fitSingleModel : function
Callable[[int], Transformer] which fits an estimator to a dataset.
`fitSingleModel` may be called up to `numModels` times, with a unique index each time.
Each call to `fitSingleModel` with an index should return the Model associated with
that index.
numModel : int
Number of models this iterator should produce.
Notes
-----
See :py:meth:`Estimator.fitMultiple` for more info.
"""
def __init__(self, fitSingleModel, numModels):
"""
"""
self.fitSingleModel = fitSingleModel
self.numModel = numModels
self.counter = 0
self.lock = threading.Lock()
def __iter__(self):
return self
def __next__(self):
with self.lock:
index = self.counter
if index >= self.numModel:
raise StopIteration("No models remaining.")
self.counter += 1
return index, self.fitSingleModel(index)
def next(self):
"""For python2 compatibility."""
return self.__next__()
[docs]@inherit_doc
class Estimator(Params, metaclass=ABCMeta):
"""
Abstract class for estimators that fit models to data.
.. versionadded:: 1.3.0
"""
pass
@abstractmethod
def _fit(self, dataset):
"""
Fits a model to the input dataset. This is called by the default implementation of fit.
Parameters
----------
dataset : :py:class:`pyspark.sql.DataFrame`
input dataset
Returns
-------
:class:`Transformer`
fitted model
"""
raise NotImplementedError()
[docs] def fitMultiple(self, dataset, paramMaps):
"""
Fits a model to the input dataset for each param map in `paramMaps`.
.. versionadded:: 2.3.0
Parameters
----------
dataset : :py:class:`pyspark.sql.DataFrame`
input dataset.
paramMaps : :py:class:`collections.abc.Sequence`
A Sequence of param maps.
Returns
-------
:py:class:`_FitMultipleIterator`
A thread safe iterable which contains one model for each param map. Each
call to `next(modelIterator)` will return `(index, model)` where model was fit
using `paramMaps[index]`. `index` values may not be sequential.
"""
estimator = self.copy()
def fitSingleModel(index):
return estimator.fit(dataset, paramMaps[index])
return _FitMultipleIterator(fitSingleModel, len(paramMaps))
[docs] def fit(self, dataset, params=None):
"""
Fits a model to the input dataset with optional parameters.
.. versionadded:: 1.3.0
Parameters
----------
dataset : :py:class:`pyspark.sql.DataFrame`
input dataset.
params : dict or list or tuple, optional
an optional param map that overrides embedded params. If a list/tuple of
param maps is given, this calls fit on each param map and returns a list of
models.
Returns
-------
:py:class:`Transformer` or a list of :py:class:`Transformer`
fitted model(s)
"""
if params is None:
params = dict()
if isinstance(params, (list, tuple)):
models = [None] * len(params)
for index, model in self.fitMultiple(dataset, params):
models[index] = model
return models
elif isinstance(params, dict):
if params:
return self.copy(params)._fit(dataset)
else:
return self._fit(dataset)
else:
raise TypeError("Params must be either a param map or a list/tuple of param maps, "
"but got %s." % type(params))
[docs]@inherit_doc
class Model(Transformer, metaclass=ABCMeta):
"""
Abstract class for models that are fitted by estimators.
.. versionadded:: 1.4.0
"""
pass
@inherit_doc
class _PredictorParams(HasLabelCol, HasFeaturesCol, HasPredictionCol):
"""
Params for :py:class:`Predictor` and :py:class:`PredictorModel`.
.. versionadded:: 3.0.0
"""
pass
[docs]@inherit_doc
class Predictor(Estimator, _PredictorParams, metaclass=ABCMeta):
"""
Estimator for prediction tasks (regression and classification).
"""
[docs] @since("3.0.0")
def setLabelCol(self, value):
"""
Sets the value of :py:attr:`labelCol`.
"""
return self._set(labelCol=value)
[docs] @since("3.0.0")
def setFeaturesCol(self, value):
"""
Sets the value of :py:attr:`featuresCol`.
"""
return self._set(featuresCol=value)
[docs] @since("3.0.0")
def setPredictionCol(self, value):
"""
Sets the value of :py:attr:`predictionCol`.
"""
return self._set(predictionCol=value)
[docs]@inherit_doc
class PredictionModel(Model, _PredictorParams, metaclass=ABCMeta):
"""
Model for prediction tasks (regression and classification).
"""
[docs] @since("3.0.0")
def setFeaturesCol(self, value):
"""
Sets the value of :py:attr:`featuresCol`.
"""
return self._set(featuresCol=value)
[docs] @since("3.0.0")
def setPredictionCol(self, value):
"""
Sets the value of :py:attr:`predictionCol`.
"""
return self._set(predictionCol=value)
@abstractproperty
@since("2.1.0")
def numFeatures(self):
"""
Returns the number of features the model was trained on. If unknown, returns -1
"""
raise NotImplementedError()
[docs] @abstractmethod
@since("3.0.0")
def predict(self, value):
"""
Predict label for the given features.
"""
raise NotImplementedError()