# Classification Metrics ([[Classification Systems]])
- [[Accuracy]] #flashcard
- [[Precision]] #flashcard
<!--ID: 1750622811410-->
- [[Recall]] #flashcard
- [[F1 Score]]
- [[ROC-AUC]]
- [[PR-AUC]]
- [[Confusion Matrix]]
- [[Log Loss]]
- [[Pearson Correlation Coefficient]]
- [[Natural Language Processing (NLP)]]
- [[GLUE Benchmark]]
# Ranking Metrics ([[Recommendation Systems]])
- [[Normalized Discounted Cumulative Gain (NDCG)]]
- [[Mean Average Precision (MAP)]]
- [[Mean Reciprocal Rank (MRR)]]
# GenAI Metrics ([[Generative AI Systems]])
## Image Generation Metrics
- [[Frechet Inception Distance (FID)]]
- [[CLIP Score]]
## Text Generation Metrics
- [[Perplexity Metric]]
- [[BLEU]]
- [[ROUGE]]
- [[BERTScore]]
# Regression Metrics
- [[Mean Absolute Error (MAE)]]
- [[Mean Squared Error]]
- [[R-squared]]
<!--ID: 1750622811413-->
```
"""
K-fold
"""
from sklearn.model_selection import cross_val_score
from sklearn.ensemble import RandomForestClassifier
model = RandomForestClassifier()
scores = cross_val_score(model, X_train, y_train, cv=10)
print(scores.mean())
"""
Accuracy
"""
from sklearn.metrics import accuracy_score
y_pred = model.predict(X_test)
accuracy = accuracy_score(y_test, y_pred)
print('Accuracy:', accuracy)
"""
Precision
"""
from sklearn.metrics import precision_score
precision = precision_score(y_test, y_pred, average='binary')
print('Precision:', precision)
"""
Recall
"""
from sklearn.metrics import recall_score
recall = recall_score(y_test, y_pred, average='binary')
print('Recall:', recall)
"""
F1 Score
"""
from sklearn.metrics import f1_score
f1 = f1_score(y_test, y_pred, average='binary')
print('F1 Score:', f1)
"""
ROC-AUC
"""
from sklearn.metrics import roc_auc_score
roc_auc = roc_auc_score(y_test, model.predict_proba(X_test)[:, 1])
print('ROC-AUC:', roc_auc)
"""
Confusion Matrix
"""
from sklearn.metrics import confusion_matrix
conf_matrix = confusion_matrix(y_test, y_pred)
print('Confusion Matrix:\n', conf_matrix)
"""
Mean Absolute Error (MAE)
"""
from sklearn.metrics import mean_absolute_error
mae = mean_absolute_error(y_test, y_pred)
print('Mean Absolute Error:', mae)
"""
Mean Squared Error (MSE)
"""
from sklearn.metrics import mean_squared_error
mse = mean_squared_error(y_test, y_pred)
print('Mean Squared Error:', mse)
"""
R-squared
"""
from sklearn.metrics import r2_score
r2 = r2_score(y_test, y_pred)
print('R-squared:', r2)
"""
Log Loss
"""
from sklearn.metrics import log_loss
logloss = log_loss(y_test, model.predict_proba(X_test))
print('Log Loss:', logloss)
```