Note
Click here to download the full example code or to run this example in your browser via Binder
Easy REST API Model Serving with Neuraxle¶
This demonstrates an easy way to deploy your Neuraxle model or pipeline to a REST API.
Out:
/home/gui/Documents/GIT/www.neuraxle.org-builder/venv/lib/python3.8/site-packages/sklearn/utils/deprecation.py:87: FutureWarning: Function load_boston is deprecated; `load_boston` is deprecated in 1.0 and will be removed in 1.2.
The Boston housing prices dataset has an ethical problem. You can refer to
the documentation of this function for further details.
The scikit-learn maintainers therefore strongly discourage the use of this
dataset unless the purpose of the code is to study and educate about
ethical issues in data science and machine learning.
In this special case, you can fetch the dataset from the original
source::
import pandas as pd
import numpy as np
data_url = "http://lib.stat.cmu.edu/datasets/boston"
raw_df = pd.read_csv(data_url, sep="\s+", skiprows=22, header=None)
data = np.hstack([raw_df.values[::2, :], raw_df.values[1::2, :2]])
target = raw_df.values[1::2, 2]
Alternative datasets include the California housing dataset (i.e.
:func:`~sklearn.datasets.fetch_california_housing`) and the Ames housing
dataset. You can load the datasets as follows::
from sklearn.datasets import fetch_california_housing
housing = fetch_california_housing()
for the California housing dataset and::
from sklearn.datasets import fetch_openml
housing = fetch_openml(name="house_prices", as_frame=True)
for the Ames housing dataset.
warnings.warn(msg, category=FutureWarning)
Fitting on train:
/home/gui/Documents/GIT/www.neuraxle.org-builder/venv/lib/python3.8/site-packages/sklearn/decomposition/_fastica.py:488: FutureWarning: From version 1.3 whiten='unit-variance' will be used by default.
warnings.warn(
Transforming train and test:
Evaluating transformed train:
R2 regression score: 0.9800752378066783
Evaluating transformed test:
R2 regression score: 0.9228829976622316
Deploying the application by routing data to the transform method:
Finally, run the app by uncommenting this next line of code:
You can now call your pipeline over HTTP with a (JSON) REST API.
import numpy as np
from flask import Flask
from sklearn.cluster import KMeans
from sklearn.datasets import load_boston
from sklearn.decomposition import PCA, FastICA
from sklearn.ensemble import GradientBoostingRegressor
from sklearn.metrics import r2_score
from sklearn.model_selection import train_test_split
from sklearn.utils import shuffle
from neuraxle.rest.flask import FlaskRestApiWrapper, JSONDataBodyDecoder, JSONDataResponseEncoder
from neuraxle.pipeline import Pipeline
from neuraxle.steps.sklearn import RidgeModelStacking
from neuraxle.union import AddFeatures
def main():
boston = load_boston()
X, y = shuffle(boston.data, boston.target, random_state=13)
X = X.astype(np.float32)
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.25, shuffle=False)
pipeline = Pipeline([
AddFeatures([
PCA(n_components=2),
FastICA(n_components=2),
]),
RidgeModelStacking([
GradientBoostingRegressor(),
KMeans(),
]),
])
print("Fitting on train:")
pipeline = pipeline.fit(X_train, y_train)
print("")
print("Transforming train and test:")
y_train_predicted = pipeline.transform(X_train)
y_test_predicted = pipeline.transform(X_test)
print("")
print("Evaluating transformed train:")
score = r2_score(y_train_predicted, y_train)
print('R2 regression score:', score)
print("")
print("Evaluating transformed test:")
score = r2_score(y_test_predicted, y_test)
print('R2 regression score:', score)
print("Deploying the application by routing data to the transform method:")
class CustomJSONDecoderFor2DArray(JSONDataBodyDecoder):
"""This is a custom JSON decoder class that precedes the pipeline's transformation."""
def decode(self, data_inputs):
"""
Transform a JSON list object into an np.array object.
:param data_inputs: json object
:return: np array for data inputs
"""
return np.array(data_inputs)
class CustomJSONEncoderOfOutputs(JSONDataResponseEncoder):
"""This is a custom JSON response encoder class for converting the pipeline's transformation outputs."""
def encode(self, data_inputs) -> dict:
"""
Convert predictions to a dict for creating a JSON Response object.
:param data_inputs:
:return:
"""
return {
'predictions': list(data_inputs)
}
app = FlaskRestApiWrapper(
json_decoder=CustomJSONDecoderFor2DArray(),
wrapped=pipeline,
json_encoder=CustomJSONEncoderOfOutputs()
).get_app()
print("Finally, run the app by uncommenting this next line of code:")
# app.run(debug=False, port=5000)
print("You can now call your pipeline over HTTP with a (JSON) REST API.")
# test_predictictions = requests.post(
# url='http://127.0.0.1:5000/',
# json=X_test.tolist()
# )
# print(test_predictictions)
# print(test_predictictions.content)
assert isinstance(app, Flask)
return app
if __name__ == "__main__":
main()
Total running time of the script: ( 0 minutes 0.246 seconds)