|
55 | 55 | # Plot a PCA representation |
56 | 56 | # ------------------------- |
57 | 57 | # Let's apply a Principal Component Analysis (PCA) to the iris dataset |
58 | | -# and then plot the irises across the first three PCA dimensions. |
| 58 | +# and then plot the irises across the first three principal components. |
59 | 59 | # This will allow us to better differentiate among the three types! |
60 | 60 |
|
61 | 61 | import matplotlib.pyplot as plt |
|
78 | 78 | ) |
79 | 79 |
|
80 | 80 | ax.set( |
81 | | - title="First three PCA dimensions", |
82 | | - xlabel="1st Eigenvector", |
83 | | - ylabel="2nd Eigenvector", |
84 | | - zlabel="3rd Eigenvector", |
| 81 | + title="First three principal components", |
| 82 | + xlabel="1st Principal Component", |
| 83 | + ylabel="2nd Principal Component", |
| 84 | + zlabel="3rd Principal Component", |
85 | 85 | ) |
86 | 86 | ax.xaxis.set_ticklabels([]) |
87 | 87 | ax.yaxis.set_ticklabels([]) |
|
101 | 101 | # %% |
102 | 102 | # PCA will create 3 new features that are a linear combination of the 4 original |
103 | 103 | # features. In addition, this transformation maximizes the variance. With this |
104 | | -# transformation, we see that we can identify each species using only the first feature |
105 | | -# (i.e., first eigenvector). |
| 104 | +# transformation, we can identify each species using only the first principal component. |
0 commit comments