.. DO NOT EDIT.
.. THIS FILE WAS AUTOMATICALLY GENERATED BY SPHINX-GALLERY.
.. TO MAKE CHANGES, EDIT THE SOURCE PYTHON FILE:
.. "auto_examples/classification/plot_lda.py"
.. LINE NUMBERS ARE GIVEN BELOW.

.. only:: html

    .. note::
        :class: sphx-glr-download-link-note

        Click :ref:`here <sphx_glr_download_auto_examples_classification_plot_lda.py>`
        to download the full example code or to run this example in your browser via Binder

.. rst-class:: sphx-glr-example-title

.. _sphx_glr_auto_examples_classification_plot_lda.py:


===========================================================================
Normal, Ledoit-Wolf and OAS Linear Discriminant Analysis for classification
===========================================================================

This example illustrates how the Ledoit-Wolf and Oracle Shrinkage
Approximating (OAS) estimators of covariance can improve classification.

.. GENERATED FROM PYTHON SOURCE LINES 10-105



.. image-sg:: /auto_examples/classification/images/sphx_glr_plot_lda_001.png
   :alt: Linear Discriminant Analysis vs.  Shrinkage Linear Discriminant Analysis vs.  OAS Linear Discriminant Analysis (1 discriminative feature)
   :srcset: /auto_examples/classification/images/sphx_glr_plot_lda_001.png
   :class: sphx-glr-single-img





.. code-block:: default


    import numpy as np
    import matplotlib.pyplot as plt

    from sklearn.datasets import make_blobs
    from sklearn.discriminant_analysis import LinearDiscriminantAnalysis
    from sklearn.covariance import OAS


    n_train = 20  # samples for training
    n_test = 200  # samples for testing
    n_averages = 50  # how often to repeat classification
    n_features_max = 75  # maximum number of features
    step = 4  # step size for the calculation


    def generate_data(n_samples, n_features):
        """Generate random blob-ish data with noisy features.

        This returns an array of input data with shape `(n_samples, n_features)`
        and an array of `n_samples` target labels.

        Only one feature contains discriminative information, the other features
        contain only noise.
        """
        X, y = make_blobs(n_samples=n_samples, n_features=1, centers=[[-2], [2]])

        # add non-discriminative features
        if n_features > 1:
            X = np.hstack([X, np.random.randn(n_samples, n_features - 1)])
        return X, y


    acc_clf1, acc_clf2, acc_clf3 = [], [], []
    n_features_range = range(1, n_features_max + 1, step)
    for n_features in n_features_range:
        score_clf1, score_clf2, score_clf3 = 0, 0, 0
        for _ in range(n_averages):
            X, y = generate_data(n_train, n_features)

            clf1 = LinearDiscriminantAnalysis(solver="lsqr", shrinkage="auto").fit(X, y)
            clf2 = LinearDiscriminantAnalysis(solver="lsqr", shrinkage=None).fit(X, y)
            oa = OAS(store_precision=False, assume_centered=False)
            clf3 = LinearDiscriminantAnalysis(solver="lsqr", covariance_estimator=oa).fit(
                X, y
            )

            X, y = generate_data(n_test, n_features)
            score_clf1 += clf1.score(X, y)
            score_clf2 += clf2.score(X, y)
            score_clf3 += clf3.score(X, y)

        acc_clf1.append(score_clf1 / n_averages)
        acc_clf2.append(score_clf2 / n_averages)
        acc_clf3.append(score_clf3 / n_averages)

    features_samples_ratio = np.array(n_features_range) / n_train

    plt.plot(
        features_samples_ratio,
        acc_clf1,
        linewidth=2,
        label="Linear Discriminant Analysis with Ledoit Wolf",
        color="navy",
        linestyle="dashed",
    )
    plt.plot(
        features_samples_ratio,
        acc_clf2,
        linewidth=2,
        label="Linear Discriminant Analysis",
        color="gold",
        linestyle="solid",
    )
    plt.plot(
        features_samples_ratio,
        acc_clf3,
        linewidth=2,
        label="Linear Discriminant Analysis with OAS",
        color="red",
        linestyle="dotted",
    )

    plt.xlabel("n_features / n_samples")
    plt.ylabel("Classification accuracy")

    plt.legend(loc=3, prop={"size": 12})
    plt.suptitle(
        "Linear Discriminant Analysis vs. "
        + "\n"
        + "Shrinkage Linear Discriminant Analysis vs. "
        + "\n"
        + "OAS Linear Discriminant Analysis (1 discriminative feature)"
    )
    plt.show()


.. rst-class:: sphx-glr-timing

   **Total running time of the script:** ( 0 minutes  4.476 seconds)


.. _sphx_glr_download_auto_examples_classification_plot_lda.py:

.. only:: html

  .. container:: sphx-glr-footer sphx-glr-footer-example


    .. container:: binder-badge

      .. image:: images/binder_badge_logo.svg
        :target: https://mybinder.org/v2/gh/scikit-learn/scikit-learn/1.1.X?urlpath=lab/tree/notebooks/auto_examples/classification/plot_lda.ipynb
        :alt: Launch binder
        :width: 150 px

    .. container:: sphx-glr-download sphx-glr-download-python

      :download:`Download Python source code: plot_lda.py <plot_lda.py>`

    .. container:: sphx-glr-download sphx-glr-download-jupyter

      :download:`Download Jupyter notebook: plot_lda.ipynb <plot_lda.ipynb>`


.. only:: html

 .. rst-class:: sphx-glr-signature

    `Gallery generated by Sphinx-Gallery <https://sphinx-gallery.github.io>`_