Translations:Dimensionner et faire un tracker solaire photovolatïque low tech/277/en

   @staticmethod
   def compute_confusion_matrix(y_test, _predicted, _accuracy, classifier):
       "compute confusion matrix"
       confusion_clf = sklearn.metrics.confusion_matrix(y_test, _predicted)
       df_clf = pd.DataFrame(confusion_clf,
                             index=list(range(0, 2)),
                             columns=list(range(0, 2)))
       plt.figure(figsize=(5.5, 4))
       ax_heatmap=sns.heatmap(df_clf, annot=True, vmin=0, vmax=11, cmap="Blues")
       plt.title(str(classifier) + ' \nAccuracy:{0:.3f}'.format(_accuracy))
       plt.ylabel('True label')
       plt.xlabel('Predicted label')
       return df_clf,ax_heatmap
   @staticmethod
   def plot_precision_recall(clf, X_test, y_test, classifier, _predicted):
       "plot precision recall curve"
       _precision = sklearn.metrics.precision_score(y_test, _predicted)
       _recall = sklearn.metrics.recall_score(y_test, _predicted)
       y_score_clf = clf.predict_proba(X_test)
       y_score_df = pd.DataFrame(data=y_score_clf)
       precision, recall, thresholds = sklearn.metrics.precision_recall_curve(
           y_test, y_score_df[1])
       closest_zero = np.argmin(np.abs(thresholds))
       closest_zero_p = precision[closest_zero]
       closest_zero_r = recall[closest_zero]
       plt.figure()
       plt.xlim([0.0, 1.01])
       plt.ylim([0.0, 1.01])
       result,=plt.plot(precision, recall)
       plt.title(
           str(classifier) +
           ' Precision-Recall Curve \nprecision :{:0.2f}'.format(_precision) +
           ' recall: {:0.2f}'.format(_recall))
       plt.plot(closest_zero_p,
                closest_zero_r,
                'o',
                markersize=12,
                fillstyle='none',
                c='r',
                mew=3)
       plt.xlabel('Precision', fontsize=16)
       plt.ylabel('Recall', fontsize=16)
       plt.show()
       return result
   @staticmethod
   def plot_roc(clf, X_test, y_test, classifier):
       "plot roc curve"
       y_score_clf = clf.predict_proba(X_test)
       y_score_df = pd.DataFrame(data=y_score_clf)
       fpr_clf, tpr_clf, _ = sklearn.metrics.roc_curve(y_test, y_score_df[1])
       roc_auc_clf = sklearn.metrics.auc(fpr_clf, tpr_clf)
       plt.figure()
       plt.xlim([-0.01, 1.00])
       plt.ylim([-0.01, 1.01])
       result,=plt.plot(fpr_clf,
                tpr_clf,
                lw=3,
                label=str(classifier) +
                ' ROC curve (area = {:0.2f})'.format(roc_auc_clf))
       plt.xlabel('False Positive Rate', fontsize=16)
       plt.ylabel('True Positive Rate', fontsize=16)
       plt.title('ROC curve ' + str(classifier) +
                 ' \nAUC:{0:.3f}'.format(roc_auc_clf),
                 fontsize=16)
       plt.legend(loc='lower right', fontsize=13)
       plt.plot([0, 1], [0, 1], color='navy', lw=3, linestyle='--')
       plt.show()
       return roc_auc_clf,result