Skip to content

Instantly share code, notes, and snippets.

@devharsh
Created December 15, 2021 21:51
Show Gist options
  • Save devharsh/e375e7fcc5aa780ac0e59b68e767cb9e to your computer and use it in GitHub Desktop.
Save devharsh/e375e7fcc5aa780ac0e59b68e767cb9e to your computer and use it in GitHub Desktop.
Evaluating classification algorithms on a multi-class single feature problem
import pandas as pd
from sklearn.linear_model import LogisticRegression
from sklearn import svm
from sklearn.ensemble import RandomForestClassifier
from sklearn.neural_network import MLPClassifier
from sklearn.model_selection import train_test_split
df = pd.DataFrame(columns=['Number','Class'])
for i in range(122):
flag3 = False
flag5 = False
if i%3 == 0:
flag3 = True
if i%5 == 0:
flag5 = True
if flag3 and flag5:
df.loc[i] = [str(i)] + ['fizzbuzz']
elif flag3:
df.loc[i] = [str(i)] + ['fizz']
elif flag5:
df.loc[i] = [str(i)] + ['buzz']
else:
df.loc[i] = [str(i)] + ['blank']
X = df[['Number']]
y = df.Class
X_tr, X_test, y_tr, y_test = train_test_split(X, y, random_state = 0)
LR = LogisticRegression(random_state=0, solver='lbfgs',
multi_class='multinomial').fit(X_tr, y_tr)
LR.predict(X_test)
print(round(LR.score(X_test,y_test), 4))
SVM = svm.SVC(decision_function_shape="ovo").fit(X_tr, y_tr)
SVM.predict(X_test)
print(round(SVM.score(X_test, y_test), 4))
RF = RandomForestClassifier(n_estimators=1000, max_depth=10,
random_state=0).fit(X_tr, y_tr)
RF.predict(X_test)
print(round(RF.score(X_test, y_test), 4))
NN = MLPClassifier(solver='lbfgs', alpha=1e-5, hidden_layer_sizes=(150, 10),
random_state=1).fit(X_tr, y_tr)
NN.predict(X_test)
print(round(NN.score(X_test, y_test), 4))
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment