Answers for "multinomial logistic regression python"

9

logistic regression algorithm in python

# import the class
from sklearn.linear_model import LogisticRegression

# instantiate the model (using the default parameters)
logreg = LogisticRegression()

# fit the model with data
logreg.fit(X_train,y_train)

#
y_pred=logreg.predict(X_test)
Posted by: Guest on May-23-2020
5

multinomial regression scikit learn

model1 = LogisticRegression(random_state=0, multi_class='multinomial', penalty='none', solver='newton-cg').fit(X_train, y_train)
preds = model1.predict(X_test)

#print the tunable parameters (They were not tuned in this example, everything kept as default)
params = model1.get_params()
print(params)

{'C': 1.0, 'class_weight': None, 'dual': False, 'fit_intercept': True, 'intercept_scaling': 1, 'l1_ratio': None, 'max_iter': 100, 'multi_class': 'multinomial', 'n_jobs': None, 'penalty': 'none', 'random_state': 0, 'solver': 'newton-cg', 'tol': 0.0001, 'verbose': 0, 'warm_start': False}
Posted by: Guest on August-25-2020
0

multinomial logit python

In [1]: import statsmodels.api as st
 
In [2]: iris = st.datasets.get_rdataset('iris', 'datasets')
 
In [3]: ### get the y 
 
In [4]: y = iris.data.Species
 
In [5]: print y.head(3)
0    setosa
1    setosa
2    setosa
Name: Species, dtype: object
 
In [6]: ### get the x 
 
In [7]: x = iris.data.ix[:, 0]
 
In [8]: x = st.add_constant(x, prepend = False)
 
In [9]: print x.head(3)
   Sepal.Length  const
0           5.1      1
1           4.9      1
2           4.7      1
 
In [10]: ### specify the model
 
In [11]: mdl = st.MNLogit(y, x)
 
In [12]: mdl_fit = mdl.fit()
Optimization terminated successfully.
         Current function value: 0.606893
         Iterations 8
 
In [13]: ### print model summary ###
 
In [14]: print mdl_fit.summary()
                          MNLogit Regression Results                          
==============================================================================
Dep. Variable:                Species   No. Observations:                  150
Model:                        MNLogit   Df Residuals:                      146
Method:                           MLE   Df Model:                            2
Date:                Fri, 23 Aug 2013   Pseudo R-squ.:                  0.4476
Time:                        22:22:58   Log-Likelihood:                -91.034
converged:                       True   LL-Null:                       -164.79
                                        LLR p-value:                 9.276e-33
=====================================================================================
Species=versicolor       coef    std err          z      P>|z|      [95.0% Conf. Int.]
--------------------------------------------------------------------------------------
Sepal.Length           4.8157      0.907      5.310      0.000         3.038     6.593
const                -26.0819      4.889     -5.335      0.000       -35.665   -16.499
--------------------------------------------------------------------------------------
Species=virginica       coef    std err          z      P>|z|      [95.0% Conf. Int.]
-------------------------------------------------------------------------------------
Sepal.Length          6.8464      1.022      6.698      0.000         4.843     8.850
const               -38.7590      5.691     -6.811      0.000       -49.913   -27.605
=====================================================================================
 
In [15]: ### marginal effects ###
 
In [16]: mdl_margeff = mdl_fit.get_margeff()
 
In [17]: print mdl_margeff.summary()
       MNLogit Marginal Effects      
=====================================
Dep. Variable:                Species
Method:                          dydx
At:                           overall
=====================================================================================
    Species=setosa      dy/dx    std err          z      P>|z|      [95.0% Conf. Int.]
--------------------------------------------------------------------------------------
Sepal.Length          -0.3785      0.003   -116.793      0.000        -0.385    -0.372
--------------------------------------------------------------------------------------
Species=versicolor      dy/dx    std err          z      P>|z|      [95.0% Conf. Int.]
--------------------------------------------------------------------------------------
Sepal.Length           0.0611      0.022      2.778      0.005         0.018     0.104
--------------------------------------------------------------------------------------
Species=virginica      dy/dx    std err          z      P>|z|      [95.0% Conf. Int.]
-------------------------------------------------------------------------------------
Sepal.Length          0.3173      0.022     14.444      0.000         0.274     0.360
=====================================================================================
 
In [18]: ### aic and bic ###
 
In [19]: print mdl_fit.aic
190.06793279
 
In [20]: print mdl_fit.bic
202.110473966
<div class="open_grepper_editor" title="Edit & Save To Grepper"></div>
Posted by: Guest on June-07-2020
0

supports multinomial logistic (softmax) and binomial logistic regression

# supports multinomial logistic (softmax) and binomial logistic regression

from pyspark.sql import Row
from pyspark.ml.linearalg import Vectors
bdf = sc.parallelize([
  Row(label=1.0, weight=2.0, features=Vectors.dense(1.0)),
  Row(label=0.0, weight=2.0, features=Vectors.sparse(1, [], []))]).toDF()
blor = LogisticRegression(maxIter=5, regParam=0.01, weightCol="weight")
blorModel = blor.fit(bdf)
blorModel.coefficients
# DenseVector([5.5...])
blorModel.intercept
# -2.68...
mdf = sc.parallelize([
  Row(label=1.0, weight=2.0, features=Vectors.dense(1.0)),
  Row(label=0.0, weight=2.0, features=Vectors.sparse(1, [], [])),
  Row(label=2.0, weight=2.0, features=Vectors.dense(3.0))]).toDF()
mlor = LogisticRegression(maxIter=5, regParam=0.01, weightCol="weight",
                          family="multinomial")
mlorModel = mlor.fit(mdf)
print(mlorModel.coefficientMatrix)
# DenseMatrix([[-2.3...],
#              [ 0.2...],
#              [ 2.1... ]])
mlorModel.interceptVector
# DenseVector([2.0..., 0.8..., -2.8...])
test0 = sc.parallelize([Row(features=Vectors.dense(-1.0))]).toDF()
result = blorModel.transform(test0).head()
result.prediction
# 0.0
result.probability
# DenseVector([0.99..., 0.00...])
result.rawPrediction
# DenseVector([0.99..., 0.00...])
test1 = sc.parallelize([Row(features=Vectors.sparse(1, [0], [1.0]))]).toDF()
blorModel.transform(test1).head().prediction
1.0
blor.setParams("vector")
# Traceback (most recent call last):
#     ...
# TypeError: Method setParams forces keyword arguments.
lr_path = temp_path + "/lr"
blor.save(lr_path)
lr2 = LogisticRegression.load(lr_path)
lr2.getMaxIter()
# 5
model_path = temp_path + "/lr_path"
blorModel.save(model_path)
model2 = LogisticRegressionModel.load(model_path)
blorModel.coefficients[0] == model2.coefficients[0]
# True
blorModel.intercept == model2.intercept
# True
Posted by: Guest on March-22-2020

Code answers related to "multinomial logistic regression python"

Python Answers by Framework

Browse Popular Code Answers by Language