以下のコードを実行すると

AttributeError: 'LogisticRegression' object has no attribute 'w'

と表示されます。LogisticRegressionのクラスにはwのメソッドは作ってないのですがどうしてでしょう?

import numpy as np
from numpy import *
import scipy.optimize as op
import matplotlib.pyplot as plt
import seaborn as sns

sns.set_style("whitegrid")


class LogisticRegression(object):
    def __init__(self, X, y, alg=0, eta=0.001, w=None):
        self.X = X
        self.y = y
        self.alg = alg
        self.eta = eta
        self.m, self.n = np.shape(X)
        if not (w is None):
            self.w = np.random.randn(self.n, 1)

    def sigFunc(self, z):
        return 1.0 / (1.0 + np.exp(-z))

    def decide(self, x):
        return np.where(x >= 0.5, 1, 0)

    def costfunc(self, w):
        z = np.dot(self.X, w)
        phi = self.sigFunc(z)
        part1 = np.multiply(self.y, np.log(phi))
        part2 = np.multiply((1 - self.y), np.log(1 - phi))
        J = (-part1 - part2).sum() / self.m
        grad = np.dot(self.X.T, (phi - self.y))/self.m
        return J, grad

    def graddescent(self, maxiter):
        self.J = []
        self.epoch = []
        for i in range(0, maxiter):
            J, grad = self.costfunc(self.w)
            self.J.append(J)
            self.epoch.append(i)
            self.w = self.w - self.eta * grad
        return self.w

    def fit(self):
        if self.alg == 0:
            maxiter = 1000
            self.w = self.graddescent(maxiter)
        else:
            Result = op.minimize(fun=self.costfunc,
                                 x0=self.w,
                                 args=(self.X, self.y),
                                 method='TNC',
                                 jac=True)
            self.w = Result.x
            self.w = self.w.T

        z = np.dot(self.X, self.w)
        phi = self.sigFunc(z)
        correctAnswer = np.where(np.array(self.y == self.decide(phi)) == True, 1, 0)
        self.accuracy = float(sum(correctAnswer)) / len(correctAnswer)

    def plot(self):
        ind_1 = np.where(self.y == 1)
        ind_0 = np.where(self.y == 0)

        x1_1 = self.X[:, [1]].min()
        x1_2 = self.X[:, [1]].max()

        x2_1 = -(self.w[0, 0] + self.w[1, 0] * x1_1) / self.w[2, 0]
        x2_2 = -(self.w[0, 0] + self.w[1, 0] * x1_2) / self.w[2, 0]

        plt.plot(self.X[ind_1, [1]], self.X[ind_1, [2]], "bo", markersize=3)
        plt.plot(self.X[ind_0, [1]], self.X[ind_0, [2]], "ro", markersize=3)

        plt.plot([x1_1, x1_2], [x2_1, x2_2], "g-")
        plt.show()

    def lossplot(self):
        plt.plot(self.epoch, self.J)
        plt.xlabel("Epoch")
        plt.ylabel("cost")
        plt.show()

x1 = np.random.normal([2, 2], 0.5, (500, 2))
x2 = np.random.normal([-2, -2], 0.5, (500, 2))
y1 = np.zeros(500).T
y2 = np.ones(500).T
Y = np.concatenate((y1, y2), axis=0)
x = np.concatenate((x1, x2), axis=0)
X = np.concatenate((np.ones(1000).reshape(-1, 1), x), axis=1)

lr = LogisticRegression(X, Y, alg=0, eta=0.001)

lr.fit()

またplotの部分のコードをもっとシンプルにする方法はないでしょうか?よろしくお願いします