Description
import pytest from pyspark import keyword_only from pyspark.ml import Model from pyspark.sql import DataFrame from pyspark.ml.util import DefaultParamsReadable, DefaultParamsWritable from pyspark.ml.param.shared import HasInputCol from pyspark.sql.functions import * class NoneParamTester(Model, HasInputCol, DefaultParamsReadable, DefaultParamsWritable ): @keyword_only def __init__(self, inputCol: str = None): super(NoneParamTester, self).__init__() kwargs = self._input_kwargs self.setParams(**kwargs) @keyword_only def setParams(self, inputCol: str = None): kwargs = self._input_kwargs self._set(**kwargs) return self def _transform(self, data: DataFrame) -> DataFrame: return data class TestNoneParam(object): def test_persist_none(self, spark, temp_dir): path = temp_dir + '/test_model' model = NoneParamTester(inputCol=None) assert model.isDefined(model.inputCol) assert model.isSet(model.inputCol) assert model.getInputCol() is None model.write().overwrite().save(path) NoneParamTester.load(path) # TypeError: Could not convert <class 'NoneType'> to string type def test_set_none(self, spark): model = NoneParamTester(inputCol=None) assert model.isDefined(model.inputCol) assert model.isSet(model.inputCol) assert model.getInputCol() is None model.set(model.inputCol, None) # TypeError: Could not convert <class 'NoneType'> to string type
Attachments
Issue Links
- relates to
-
SPARK-29464 PySpark ML should expose Params.clear() to unset a user supplied Param
- Resolved