Skip to content

Commit

Permalink
Gradient Boosting - Remove try/catch around imports
Browse files Browse the repository at this point in the history
  • Loading branch information
PrimozGodec committed Sep 21, 2023
1 parent 041c0ce commit 130bbcf
Show file tree
Hide file tree
Showing 5 changed files with 17 additions and 100 deletions.
10 changes: 2 additions & 8 deletions Orange/classification/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,12 +20,6 @@
from .sgd import *
from .neural_network import *
from .calibration import *
try:
from .catgb import *
except ModuleNotFoundError:
pass
from .catgb import *
from .gb import *
try:
from .xgb import *
except Exception:
pass
from .xgb import *
10 changes: 2 additions & 8 deletions Orange/modelling/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,12 +11,6 @@
from .randomforest import *
from .svm import *
from .tree import *
try:
from .catgb import *
except ImportError:
pass
from .catgb import *
from .gb import *
try:
from .xgb import *
except ImportError:
pass
from .xgb import *
10 changes: 2 additions & 8 deletions Orange/regression/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,13 +14,7 @@
from .tree import *
from .neural_network import *
from ..classification.simple_tree import *
try:
from .catgb import *
except ModuleNotFoundError:
pass
from .catgb import *
from .gb import *
try:
from .xgb import *
except Exception:
pass
from .xgb import *
from .curvefit import *
28 changes: 5 additions & 23 deletions Orange/widgets/model/owgradientboosting.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,44 +9,26 @@
from Orange.base import Learner
from Orange.data import Table
from Orange.modelling import GBLearner

try:
from Orange.modelling import CatGBLearner
except ImportError:
CatGBLearner = None
try:
from Orange.modelling import XGBLearner, XGBRFLearner
except ImportError:
XGBLearner = XGBRFLearner = None

from Orange.modelling import CatGBLearner
from Orange.modelling import XGBLearner, XGBRFLearner
from Orange.widgets import gui
from Orange.widgets.settings import Setting, SettingProvider
from Orange.widgets.utils.owlearnerwidget import OWBaseLearner
from Orange.widgets.utils.widgetpreview import WidgetPreview


class LearnerItemModel(QStandardItemModel):
LEARNERS = [
(GBLearner, "", ""),
(XGBLearner, "Extreme Gradient Boosting (xgboost)", "xgboost"),
(XGBRFLearner, "Extreme Gradient Boosting Random Forest (xgboost)",
"xgboost"),
(CatGBLearner, "Gradient Boosting (catboost)", "catboost"),
]
LEARNERS = [GBLearner, XGBLearner, XGBRFLearner, CatGBLearner]

def __init__(self, parent):
super().__init__(parent)
self._add_data()

def _add_data(self):
for cls, opt_name, lib in self.LEARNERS:
for cls in self.LEARNERS:
item = QStandardItem()
imported = bool(cls)
name = cls.name if imported else opt_name
name = cls.name
item.setData(f"{name}", Qt.DisplayRole)
item.setEnabled(imported)
if not imported:
item.setToolTip(f"{lib} is not installed")
self.appendRow(item)


Expand Down
59 changes: 6 additions & 53 deletions Orange/widgets/model/tests/test_owgradientboosting.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,27 +6,15 @@

from Orange.classification import GBClassifier

try:
from Orange.classification import XGBClassifier, XGBRFClassifier
except ImportError:
XGBClassifier = XGBRFClassifier = None
try:
from Orange.classification import CatGBClassifier
except ImportError:
CatGBClassifier = None
from Orange.classification import XGBClassifier, XGBRFClassifier
from Orange.classification import CatGBClassifier
from Orange.data import Table
from Orange.modelling import GBLearner
from Orange.preprocess.score import Scorer
from Orange.regression import GBRegressor

try:
from Orange.regression import XGBRegressor, XGBRFRegressor
except ImportError:
XGBRegressor = XGBRFRegressor = None
try:
from Orange.regression import CatGBRegressor
except ImportError:
CatGBRegressor = None
from Orange.regression import XGBRegressor, XGBRFRegressor
from Orange.regression import CatGBRegressor
from Orange.widgets.model.owgradientboosting import OWGradientBoosting, \
LearnerItemModel, GBLearnerEditor, XGBLearnerEditor, XGBRFLearnerEditor, \
CatGBLearnerEditor, BaseEditor
Expand Down Expand Up @@ -65,16 +53,6 @@ def test_model(self):
self.assertEqual(model.item(i).isEnabled(),
classifiers[i] is not None)

@patch("Orange.widgets.model.owgradientboosting.LearnerItemModel.LEARNERS",
[(GBLearner, "", ""),
(None, "Gradient Boosting (catboost)", "catboost")])
def test_missing_lib(self):
widget = create_parent(CatGBLearnerEditor)
model = LearnerItemModel(widget)
self.assertEqual(model.rowCount(), 2)
self.assertTrue(model.item(0).isEnabled())
self.assertFalse(model.item(1).isEnabled())


class BaseEditorTest(GuiTest):
EditorClass: Type[BaseEditor] = None
Expand Down Expand Up @@ -146,7 +124,6 @@ def test_arguments(self):
"colsample_bynode": 1, "subsample": 1, "random_state": 0}
self.assertDictEqual(self.editor.get_arguments(), args)

@unittest.skipIf(XGBClassifier is None, "Missing 'xgboost' package")
def test_learner_parameters(self):
params = (("Method", "Extreme Gradient Boosting (xgboost)"),
("Number of trees", 100),
Expand All @@ -160,7 +137,6 @@ def test_learner_parameters(self):
("Fraction of features for each split", 1))
self.assertTupleEqual(self.editor.get_learner_parameters(), params)

@unittest.skipIf(XGBClassifier is None, "Missing 'xgboost' package")
def test_default_parameters_cls(self):
data = Table("heart_disease")
booster = XGBClassifier()
Expand All @@ -178,7 +154,6 @@ def test_default_parameters_cls(self):
self.assertEqual(int(tp["colsample_bylevel"]), self.editor.colsample_bylevel)
self.assertEqual(int(tp["colsample_bynode"]), self.editor.colsample_bynode)

@unittest.skipIf(XGBRegressor is None, "Missing 'xgboost' package")
def test_default_parameters_reg(self):
data = Table("housing")
booster = XGBRegressor()
Expand Down Expand Up @@ -206,7 +181,6 @@ def test_arguments(self):
"colsample_bynode": 1, "subsample": 1, "random_state": 0}
self.assertDictEqual(self.editor.get_arguments(), args)

@unittest.skipIf(XGBRFClassifier is None, "Missing 'xgboost' package")
def test_learner_parameters(self):
params = (("Method",
"Extreme Gradient Boosting Random Forest (xgboost)"),
Expand All @@ -221,7 +195,6 @@ def test_learner_parameters(self):
("Fraction of features for each split", 1))
self.assertTupleEqual(self.editor.get_learner_parameters(), params)

@unittest.skipIf(XGBRFClassifier is None, "Missing 'xgboost' package")
def test_default_parameters_cls(self):
data = Table("heart_disease")
booster = XGBRFClassifier()
Expand All @@ -239,7 +212,6 @@ def test_default_parameters_cls(self):
self.assertEqual(int(tp["colsample_bylevel"]), self.editor.colsample_bylevel)
self.assertEqual(int(tp["colsample_bynode"]), self.editor.colsample_bynode)

@unittest.skipIf(XGBRFRegressor is None, "Missing 'xgboost' package")
def test_default_parameters_reg(self):
data = Table("housing")
booster = XGBRFRegressor()
Expand All @@ -266,7 +238,6 @@ def test_arguments(self):
"reg_lambda": 3, "colsample_bylevel": 1, "random_state": 0}
self.assertDictEqual(self.editor.get_arguments(), args)

@unittest.skipIf(CatGBClassifier is None, "Missing 'catboost' package")
def test_learner_parameters(self):
params = (("Method", "Gradient Boosting (catboost)"),
("Number of trees", 100),
Expand All @@ -277,7 +248,6 @@ def test_learner_parameters(self):
("Fraction of features for each tree", 1))
self.assertTupleEqual(self.editor.get_learner_parameters(), params)

@unittest.skipIf(CatGBClassifier is None, "Missing 'catboost' package")
def test_default_parameters_cls(self):
data = Table("heart_disease")
booster = CatGBClassifier()
Expand All @@ -291,7 +261,6 @@ def test_default_parameters_cls(self):
self.assertEqual(self.editor.learning_rate, 0.3)
# params["learning_rate"] is automatically defined so don't test it

@unittest.skipIf(CatGBRegressor is None, "Missing 'catboost' package")
def test_default_parameters_reg(self):
data = Table("housing")
booster = CatGBRegressor()
Expand All @@ -305,6 +274,7 @@ def test_default_parameters_reg(self):
self.assertEqual(self.editor.learning_rate, 0.3)
# params["learning_rate"] is automatically defined so don't test it


class TestOWGradientBoosting(WidgetTest, WidgetLearnerTestMixin):
def setUp(self):
self.widget = self.create_widget(OWGradientBoosting,
Expand All @@ -328,7 +298,6 @@ def test_datasets(self):
for ds in datasets.datasets():
self.send_signal(self.widget.Inputs.data, ds)

@unittest.skipIf(XGBClassifier is None, "Missing 'xgboost' package")
def test_xgb_params(self):
simulate.combobox_activate_index(self.widget.controls.method_index, 1)
editor = self.widget.editor
Expand All @@ -350,27 +319,11 @@ def test_xgb_params(self):
def test_methods(self):
self.send_signal(self.widget.Inputs.data, self.data)
method_cb = self.widget.controls.method_index
for i, (cls, _, _) in enumerate(LearnerItemModel.LEARNERS):
if cls is None:
continue
for i, cls in enumerate(LearnerItemModel.LEARNERS):
simulate.combobox_activate_index(method_cb, i)
self.click_apply()
self.assertIsInstance(self.widget.learner, cls)

def test_missing_lib(self):
modules = {k: v for k, v in sys.modules.items()
if "orange" not in k.lower()} # retain built-ins
modules["xgboost"] = None
modules["catboost"] = None
# pylint: disable=reimported,redefined-outer-name
# pylint: disable=import-outside-toplevel
with patch.dict(sys.modules, modules, clear=True):
from Orange.widgets.model.owgradientboosting import \
OWGradientBoosting
widget = self.create_widget(OWGradientBoosting,
stored_settings={"method_index": 3})
self.assertEqual(widget.method_index, 0)


if __name__ == "__main__":
unittest.main()

0 comments on commit 130bbcf

Please sign in to comment.