normalize data python
>>> from sklearn import preprocessing
>>>
>>> data = [100, 10, 2, 32, 31, 949]
>>>
>>> preprocessing.normalize([data])
array([[0.10467389, 0.01046739, 0.00209348, 0.03349564, 0.03244891,0.99335519]])
normalize data python
>>> from sklearn import preprocessing
>>>
>>> data = [100, 10, 2, 32, 31, 949]
>>>
>>> preprocessing.normalize([data])
array([[0.10467389, 0.01046739, 0.00209348, 0.03349564, 0.03244891,0.99335519]])
feature scaling in python
from sklearn.preprocessing import MinMaxScaler
scaler = MinMaxScaler()
from sklearn.linear_model import Ridge
X_train, X_test, y_train, y_test = train_test_split(X_data, y_data,
random_state = 0)
X_train_scaled = scaler.fit_transform(X_train)
X_test_scaled = scaler.transform(X_test)
data normalization python
from sklearn import preprocessing
normalizer = preprocessing.Normalizer().fit(X_train)
X_train = normalizer.transform(X_train)
X_test = normalizer.transform(X_test)
data wrangling python
pd.merge(left, right, how='inner', on=None, left_on=None, right_on=None,
left_index=False, right_index=False, sort=True)
Scaling features to a range
# Scaling features to a range using MaxAbsScaler
X_train = np.array([[ 1., -1., 2.],
[ 2., 0., 0.],
[ 0., 1., -1.]])
max_abs_scaler = preprocessing.MaxAbsScaler()
X_train_maxabs = max_abs_scaler.fit_transform(X_train)
X_train_maxabs
# array([[ 0.5, -1., 1. ],
# [ 1. , 0. , 0. ],
# [ 0. , 1. , -0.5]])
X_test = np.array([[ -3., -1., 4.]])
X_test_maxabs = max_abs_scaler.transform(X_test)
X_test_maxabs
# array([[-1.5, -1. , 2. ]])
max_abs_scaler.scale_
# array([2., 1., 2.])
Copyright © 2021 Codeinu
Forgot your account's password or having trouble logging into your Account? Don't worry, we'll help you to get back your account. Enter your email address and we'll send you a recovery link to reset your password. If you are experiencing problems resetting your password contact us