Skip to content

Implements LinkedMLPRegressor #112

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 12 commits into
base: main
Choose a base branch
from
126 changes: 126 additions & 0 deletions _unittests/ut_mlmodel/test_linked_mlpregression.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,126 @@
# -*- coding: utf-8 -*-
"""
@brief test log(time=2s)
"""
import unittest
import numpy
from numpy.random import random
from sklearn.neural_network import MLPRegressor
from sklearn.metrics import mean_absolute_error
from sklearn.exceptions import ConvergenceWarning
from mlinsights.ext_test_case import ExtTestCase, ignore_warnings
from mlinsights.mlmodel import LinkedMLPRegressor
from mlinsights.mlmodel import (
run_test_sklearn_pickle,
run_test_sklearn_clone,
run_test_sklearn_grid_search_cv,
)


class TestLinkedMLPRegression(ExtTestCase):
@ignore_warnings(ConvergenceWarning)
def test_regression_diff(self):
X = numpy.array([[0.1], [0.2], [0.3], [0.4], [0.5]])
Y = numpy.array([1.0, 1.1, 1.2, 10, 1.4])
clr = MLPRegressor(hidden_layer_sizes=(3,))
clr.fit(X, Y)
clq = LinkedMLPRegressor(hidden_layer_sizes=(3,))
clq.fit(X, Y)
self.assertGreater(clr.n_iter_, 10)
self.assertGreater(clq.n_iter_, 10)
err1 = mean_absolute_error(Y, clr.predict(X))
err2 = mean_absolute_error(Y, clq.predict(X))
self.assertLesser(err1, 5)
self.assertLesser(err2, 5)

@ignore_warnings(ConvergenceWarning)
def test_regression_linked_int(self):
X = numpy.array(
[[0.1, 0.11], [0.2, 0.21], [0.3, 0.31], [0.4, 0.41], [0.5, 0.51]]
)
Y = numpy.array([1.0, 1.1, 1.2, 10, 1.4])
clr = MLPRegressor(hidden_layer_sizes=(3,))
clr.fit(X, Y)
clq = LinkedMLPRegressor(hidden_layer_sizes=(3,), linked=2)
clq.fit(X, Y)
self.assertGreater(clr.n_iter_, 10)
self.assertGreater(clq.n_iter_, 10)
err1 = mean_absolute_error(Y, clr.predict(X))
err2 = mean_absolute_error(Y, clq.predict(X))
self.assertLesser(err1, 5)
self.assertLesser(err2, 5)

@ignore_warnings(ConvergenceWarning)
def test_regression_linked(self):
linked = [
((0, "c", 1, 2), (0, "i", 0)),
((1, "c", 0, 0), (1, "c", 2, 0)),
((0, "c", 1, 1), (0, "c", 0, 2)),
((0, "i", 2), (0, "c", 0, 0)),
((1, "i", 0), (1, "c", 1, 0)),
((0, "i", 1), (0, "c", 0, 1)),
]
X = numpy.array(
[[0.1, 0.11], [0.2, 0.21], [0.3, 0.31], [0.4, 0.41], [0.5, 0.51]]
)
Y = numpy.array([1.0, 1.1, 1.2, 10, 1.4])
clr = MLPRegressor(hidden_layer_sizes=(3,))
clr.fit(X, Y)
clq = LinkedMLPRegressor(hidden_layer_sizes=(3,), linked=linked)
clq.fit(X, Y)
self.assertEqual(clq.linked_, linked)
self.assertEqual(clq.coefs_[0][1, 2], clq.intercepts_[0][0])
self.assertEqual(clq.coefs_[1][0, 0], clq.coefs_[1][2, 0])
self.assertGreater(clr.n_iter_, 10)
self.assertGreater(clq.n_iter_, 10)
err1 = mean_absolute_error(Y, clr.predict(X))
err2 = mean_absolute_error(Y, clq.predict(X))
self.assertLesser(err1, 5)
self.assertLesser(err2, 5)

@ignore_warnings(ConvergenceWarning)
def test_regression_pickle(self):
X = random(100)
eps1 = (random(90) - 0.5) * 0.1
eps2 = random(10) * 2
eps = numpy.hstack([eps1, eps2])
X = X.reshape((100, 1)) # pylint: disable=E1101
Y = X.ravel() * 3.4 + 5.6 + eps
run_test_sklearn_pickle(lambda: MLPRegressor(hidden_layer_sizes=(3,)), X, Y)
run_test_sklearn_pickle(
lambda: LinkedMLPRegressor(hidden_layer_sizes=(3,)), X, Y
)

@ignore_warnings(ConvergenceWarning)
def test_regression_clone(self):
run_test_sklearn_clone(lambda: LinkedMLPRegressor())

@ignore_warnings(ConvergenceWarning)
def test_regression_grid_search(self):
X = random(100)
eps1 = (random(90) - 0.5) * 0.1
eps2 = random(10) * 2
eps = numpy.hstack([eps1, eps2])
X = X.reshape((100, 1)) # pylint: disable=E1101
Y = X.ravel() * 3.4 + 5.6 + eps
self.assertRaise(
lambda: run_test_sklearn_grid_search_cv(
lambda: LinkedMLPRegressor(hidden_layer_sizes=(3,)), X, Y
),
ValueError,
)
res = run_test_sklearn_grid_search_cv(
lambda: LinkedMLPRegressor(hidden_layer_sizes=(3,)),
X,
Y,
learning_rate_init=[0.001, 0.0001],
)
self.assertIn("model", res)
self.assertIn("score", res)
self.assertGreater(res["score"], -1)
self.assertLesser(res["score"], 11)


if __name__ == "__main__":
# TestLinkedMLPRegression().test_regression_linked()
unittest.main()
34 changes: 34 additions & 0 deletions appveyor.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
image:
- Visual Studio 2019
environment:
matrix:
- PYTHON: "C:\\Python310-x64"
PYTHON_VERSION: "3.10.x"
PYTHON_ARCH: "64"
SKL: '>=1.0'
init:
- "ECHO %PYTHON% %PYTHON_VERSION% %PYTHON_ARCH%"

install:
- "%PYTHON%\\python -m pip install --upgrade pip"
# for many packages
- "%PYTHON%\\Scripts\\pip install llvmlite numba"
# install precompiled versions not available on pypi
- "%PYTHON%\\Scripts\\pip install torch torchvision torchaudio"
# other dependencies
- "%PYTHON%\\Scripts\\pip install -r requirements.txt"
- "%PYTHON%\\Scripts\\pip install scikit-learn%SKL%"
build: off

before_test:
- "%PYTHON%\\python -u setup.py build_ext --inplace --verbose"

test_script:
- "%PYTHON%\\python -u setup.py unittests"

after_test:
- "%PYTHON%\\python -u setup.py bdist_wheel"

artifacts:
- path: dist
name: mlinsights
1 change: 1 addition & 0 deletions mlinsights/mlmodel/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
from .decision_tree_logreg import DecisionTreeLogisticRegression
from .extended_features import ExtendedFeatures
from .interval_regressor import IntervalRegressor
from .linked_mlpregressor import LinkedMLPRegressor
from .kmeans_constraint import ConstraintKMeans
from .kmeans_l1 import KMeansL1L2
from .ml_featurizer import model_featurizer
Expand Down
Loading