1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46
   | class SoftmaxRegression:     def __init__(self, n_classes, learning_rate=0.01, n_iters=1000):         self.n_classes = n_classes             self.lr = learning_rate                self.n_iters = n_iters                 self.W = None                          self.b = None                 
      def _softmax(self, Z):         exp_Z = np.exp(Z - np.max(Z, axis=1, keepdims=True))         return exp_Z / exp_Z.sum(axis=1, keepdims=True)
      def _one_hot(self, y):                  m = y.shape[0]         y_one_hot = np.zeros((m, self.n_classes))         y_one_hot[np.arange(m), y] = 1         return y_one_hot
      def fit(self, X, y):         m, n = X.shape         y_one_hot = self._one_hot(y)  
                   self.W = np.zeros((self.n_classes, n))           self.b = np.zeros(self.n_classes)       
                   for _ in range(self.n_iters):                          Z = np.dot(X, self.W.T) + self.b                 P = self._softmax(Z)                
                           dZ = P - y_one_hot                               dW = (1/m) * np.dot(dZ.T, X)                     db = (1/m) * np.sum(dZ, axis=0)     
                           self.W -= self.lr * dW             self.b -= self.lr * db
      def predict(self, X):         Z = np.dot(X, self.W.T) + self.b         P = self._softmax(Z)         return np.argmax(P, axis=1)  
 
  |