Weber Fechner Example¶
Example file which shows some simple curve fitting using DARTSRegressor and some other estimators.
In [ ]:
Copied!
# Uncomment the following lines when running on Google Colab
# !pip install "autora[theorist-darts]"
# Uncomment the following lines when running on Google Colab
# !pip install "autora[theorist-darts]"
In [ ]:
Copied!
from functools import partial
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
from sklearn.linear_model import LinearRegression
from sklearn.model_selection import GridSearchCV
from sklearn.pipeline import make_pipeline
from sklearn.preprocessing import PolynomialFeatures
from autora.theorist.darts import DARTSRegressor
from autora.experiment_runner.synthetic.psychophysics.weber_fechner_law import weber_fechner_law
from functools import partial
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
from sklearn.linear_model import LinearRegression
from sklearn.model_selection import GridSearchCV
from sklearn.pipeline import make_pipeline
from sklearn.preprocessing import PolynomialFeatures
from autora.theorist.darts import DARTSRegressor
from autora.experiment_runner.synthetic.psychophysics.weber_fechner_law import weber_fechner_law
In [ ]:
Copied!
# %% Define some helper functions
def show_results_complete(
data_: pd.DataFrame,
estimator=None,
show_results=True,
projection="2d",
label=None,
):
"""
Function to plot input data (x_, y_) and the predictions of an estimator for the same x_.
"""
if projection == "2d":
plt.figure()
data_.plot.scatter(
"S1", "S2", c="difference_detected", cmap="viridis", zorder=10
)
elif projection == "3d":
fig = plt.figure()
ax = fig.add_subplot(projection="3d")
ax.scatter(data_["S1"], data_["S2"], data_["difference_detected"])
if estimator is not None:
xs, ys = np.mgrid[0:5:0.2, 0:5:0.2] # type: ignore
zs = estimator.predict(np.column_stack((xs.ravel(), ys.ravel())))
ax.plot_surface(xs, ys, zs.reshape(xs.shape), alpha=0.5)
if label is not None:
plt.title(label)
if show_results:
plt.show()
return
# %% Define some helper functions
def show_results_complete(
data_: pd.DataFrame,
estimator=None,
show_results=True,
projection="2d",
label=None,
):
"""
Function to plot input data (x_, y_) and the predictions of an estimator for the same x_.
"""
if projection == "2d":
plt.figure()
data_.plot.scatter(
"S1", "S2", c="difference_detected", cmap="viridis", zorder=10
)
elif projection == "3d":
fig = plt.figure()
ax = fig.add_subplot(projection="3d")
ax.scatter(data_["S1"], data_["S2"], data_["difference_detected"])
if estimator is not None:
xs, ys = np.mgrid[0:5:0.2, 0:5:0.2] # type: ignore
zs = estimator.predict(np.column_stack((xs.ravel(), ys.ravel())))
ax.plot_surface(xs, ys, zs.reshape(xs.shape), alpha=0.5)
if label is not None:
plt.title(label)
if show_results:
plt.show()
return
In [ ]:
Copied!
# %% Load the data
s = weber_fechner_law(resolution=20)
# Get independent and dependent variables
ivs = [iv.name for iv in s.variables.independent_variables]
dvs = [dv.name for dv in s.variables.dependent_variables]
X = s.domain()
experiment_data = s.run(X, random_state=42)
y = experiment_data[dvs]
# %% Load the data
s = weber_fechner_law(resolution=20)
# Get independent and dependent variables
ivs = [iv.name for iv in s.variables.independent_variables]
dvs = [dv.name for dv in s.variables.dependent_variables]
X = s.domain()
experiment_data = s.run(X, random_state=42)
y = experiment_data[dvs]
In [ ]:
Copied!
show_results = partial(show_results_complete, data_=experiment_data, projection="3d")
show_results(label="input data")
show_results = partial(show_results_complete, data_=experiment_data, projection="3d")
show_results(label="input data")
In [ ]:
Copied!
# %% Fit first using a super-simple linear regression
first_order_linear_estimator = LinearRegression()
first_order_linear_estimator.fit(X, y)
show_results(estimator=first_order_linear_estimator, label="1st order linear")
# %% Fit first using a super-simple linear regression
first_order_linear_estimator = LinearRegression()
first_order_linear_estimator.fit(X, y)
show_results(estimator=first_order_linear_estimator, label="1st order linear")
In [ ]:
Copied!
# %% Fit using a 0-3 order polynomial, getting the best fit for the data.
polynomial_estimator = GridSearchCV(
make_pipeline(PolynomialFeatures(), LinearRegression(fit_intercept=False)),
param_grid=dict(polynomialfeatures__degree=range(4)),
)
polynomial_estimator.fit(X, y)
show_results(estimator=polynomial_estimator, label="[0th-3rd]-order linear")
# %% Fit using a 0-3 order polynomial, getting the best fit for the data.
polynomial_estimator = GridSearchCV(
make_pipeline(PolynomialFeatures(), LinearRegression(fit_intercept=False)),
param_grid=dict(polynomialfeatures__degree=range(4)),
)
polynomial_estimator.fit(X, y)
show_results(estimator=polynomial_estimator, label="[0th-3rd]-order linear")
In [ ]:
Copied!
darts_estimator_tuned = DARTSRegressor(
batch_size=64,
arch_updates_per_epoch=100,
param_updates_per_epoch=100,
max_epochs=1500,
num_graph_nodes=5,
primitives=[
"none",
"linear",
"logistic",
]
)
darts_estimator_tuned.fit(X, y)
show_results(estimator=darts_estimator_tuned, label="DARTSRegressor")
darts_estimator_tuned.visualize_model()
darts_estimator_tuned = DARTSRegressor(
batch_size=64,
arch_updates_per_epoch=100,
param_updates_per_epoch=100,
max_epochs=1500,
num_graph_nodes=5,
primitives=[
"none",
"linear",
"logistic",
]
)
darts_estimator_tuned.fit(X, y)
show_results(estimator=darts_estimator_tuned, label="DARTSRegressor")
darts_estimator_tuned.visualize_model()
In [ ]:
Copied!
darts_estimator_tuned.set_params(
arch_updates_per_epoch=0,
param_updates_per_epoch=1000,
sampling_strategy="sample",
max_epochs=1
)
darts_estimator_tuned.fit(X, y)
show_results(estimator=darts_estimator_tuned, label="resampled DARTSRegressor")
darts_estimator_tuned.visualize_model()
darts_estimator_tuned.set_params(
arch_updates_per_epoch=0,
param_updates_per_epoch=1000,
sampling_strategy="sample",
max_epochs=1
)
darts_estimator_tuned.fit(X, y)
show_results(estimator=darts_estimator_tuned, label="resampled DARTSRegressor")
darts_estimator_tuned.visualize_model()
In [ ]:
Copied!
darts_estimator_tuned.set_params(
arch_updates_per_epoch=0,
param_updates_per_epoch=1000,
sampling_strategy="max",
max_epochs=0
)
darts_estimator_tuned.fit(X, y)
show_results(estimator=darts_estimator_tuned, label="resampled DARTSRegressor")
darts_estimator_tuned.visualize_model()
darts_estimator_tuned.set_params(
arch_updates_per_epoch=0,
param_updates_per_epoch=1000,
sampling_strategy="max",
max_epochs=0
)
darts_estimator_tuned.fit(X, y)
show_results(estimator=darts_estimator_tuned, label="resampled DARTSRegressor")
darts_estimator_tuned.visualize_model()
In [ ]:
Copied!