C言語を用いたディープラーニング
C/C++によるDeep Learningの実装(Deep Belief Nets, Stacked Denoising Autoencoders 編) - Yusuke Sugomori's Blog にある、DBN.cを参考にしています。
DBN.cを少し変えて、トレーニングデータにdouble型のデータを入力できるように変えたつもりなのですが、うまくいきません。どこが間違っているのでしょうか?
用いるデータはフリーデータirisを0~1の間に収まるように変換したものです。
期待した結果
テストデータごとにあるクラスへの出力が大きくなり、クラスの判別率が高い。
下記に載せている、トレーニングラベルのような出力を期待していました。
0.900 0.050 0.050
0.900 0.040 0.060
:
0.040 0.900 0.060
0.100 0.850 0.050
:
0.050 0.100 0.850
実際
クラスへの出力がほとんど同じで、判別率が約33%とチャンスレベルになっている。
どのテストデータについても同じ値を返している。
0.32467 0.33520 0.34013
0.32467 0.33520 0.34013
文字数の関係で省略しています。
0.32467 0.33520 0.34014
コード
DBNTrainingDouble.c
#include <stdio.h>
#include <stdlib.h>
#include <math.h>
void readFileData(const char *fileName,double array[][4]){
FILE *fp ;
int row,inData;
if((fp=fopen(fileName,"r"))!=NULL){
row=0;
while(4==fscanf(fp,"%lf,%lf,%lf,%lf",&array[row][0],&array[row][1],&array[row][2],&array[row][3])){
row++;
}
fclose(fp);
}else{
printf("Open error!!\n");
}
}
void readFileLabel(const char *fileName,int array[][3]){
FILE *fp ;
int row,inData;
if((fp=fopen(fileName,"r"))!=NULL){
row=0;
while(3==fscanf(fp,"%d,%d,%d",&array[row][0],&array[row][1],&array[row][2],&array[row][3])){
row++;
}
fclose(fp);
}else{
printf("Open error!!\n");
}
}
/* HiddenLayer.h */
typedef struct {
int N;
int n_in;
int n_out;
double **W;
double *b;
} HiddenLayer;
void HiddenLayer__construct(HiddenLayer*, int, int, int, double**, double*);
void HiddenLayer__destruct(HiddenLayer*);
double HiddenLayer_output(HiddenLayer*, double*, double*, double);
void HiddenLayer_sample_h_given_v(HiddenLayer*, double*, double*);
/* RBM.h */
typedef struct {
int N;
int n_visible;
int n_hidden;
double **W;
double *hbias;
double *vbias;
} RBM;
void RBM__construct(RBM*, int, int, int, double**, double*, double*);
void RBM__destruct(RBM*);
void RBM_contrastive_divergence(RBM*, double*, double, int);
void RBM_sample_h_given_v(RBM*, double*, double*, double*);
void RBM_sample_v_given_h(RBM*, double*, double*, double*);
double RBM_propup(RBM*, double*, double*, double);
double RBM_propdown(RBM*, double*, int, double);
void RBM_gibbs_hvh(RBM*, double*, double*, double*, double*, double*);
void RBM_reconstruct(RBM*, double*, double*);
/* LogisticRegression.h */
typedef struct {
int N;
int n_in;
int n_out;
double **W;
double *b;
} LogisticRegression;
void LogisticRegression__construct(LogisticRegression*, int, int, int);
void LogisticRegression__destruct(LogisticRegression*);
void LogisticRegression_train(LogisticRegression*, double*, int*, double);
void LogisticRegression_softmax(LogisticRegression*, double*);
void LogisticRegression_predict(LogisticRegression*, double*, double*);
/* DBN.h */
typedef struct {
int N;
int n_ins;
int *hidden_layer_sizes;
int n_outs;
int n_layers;
HiddenLayer *sigmoid_layers;
RBM *rbm_layers;
LogisticRegression log_layer;
} DBN;
void DBN__construct(DBN*, int, int, int*, int, int);
void DBN__destruct(DBN*);
void DBN_pretrain(DBN*, double*, double, int, int);
void DBN_finetune(DBN*, double*, int*, double, int);
void DBN_predict(DBN*, double*, double*);
/* utils.h */
double uniform(double, double);
int binomial(int, double);
double sigmoid(double);
void test_dbn(void);
double uniform(double min, double max) {
return rand() / (RAND_MAX + 1.0) * (max - min) + min;
}
int binomial(int n, double p) {
if(p < 0 || p > 1) return 0;
int i;
int c = 0;
double r;
for(i=0; i<n; i++) {
r = rand() / (RAND_MAX + 1.0);
if (r < p) c++;
}
return c;
}
double sigmoid(double x) {
return 1.0 / (1.0 + exp(-x));
}
// DBN
void DBN__construct(DBN* this, int N, \
int n_ins, int *hidden_layer_sizes, int n_outs, int n_layers) {
int i, input_size;
this->N = N;
this->n_ins = n_ins;
this->hidden_layer_sizes = hidden_layer_sizes;
this->n_outs = n_outs;
this->n_layers = n_layers;
this->sigmoid_layers = (HiddenLayer *)malloc(sizeof(HiddenLayer) * n_layers);
this->rbm_layers = (RBM *)malloc(sizeof(RBM) * n_layers);
// construct multi-layer
for(i=0; i<n_layers; i++) {
if(i == 0) {
input_size = n_ins;
} else {
input_size = hidden_layer_sizes[i - 1];
}
// construct sigmoid_layer
HiddenLayer__construct(&(this->sigmoid_layers[i]), \
N, input_size, hidden_layer_sizes[i], NULL, NULL);
// construct rbm_layer
RBM__construct(&(this->rbm_layers[i]), N, input_size, hidden_layer_sizes[i], \
this->sigmoid_layers[i].W, this->sigmoid_layers[i].b, NULL);
}
// layer for output using LogisticRegression
LogisticRegression__construct(&(this->log_layer), \
N, hidden_layer_sizes[n_layers-1], n_outs);
}
void DBN__destruct(DBN* this) {
int i;
for(i=0; i<this->n_layers; i++) {
HiddenLayer__destruct(&(this->sigmoid_layers[i]));
RBM__destruct(&(this->rbm_layers[i]));
}
free(this->sigmoid_layers);
free(this->rbm_layers);
}
void DBN_pretrain(DBN* this, double *input, double lr, int k, int epochs) {
int i, j, l, m, n, epoch;
double *layer_input;
int prev_layer_input_size;
double *prev_layer_input;
double *train_X = (double *)malloc(sizeof(double) * this->n_ins);
for(i=0; i<this->n_layers; i++) { // layer-wise
for(epoch=0; epoch<epochs; epoch++) { // training epochs
for(n=0; n<this->N; n++) { // input x1...xN
// initial input
for(m=0; m<this->n_ins; m++) train_X[m] = input[n * this->n_ins + m];
// layer input
for(l=0; l<=i; l++) {
if(l == 0) {
layer_input = (double *)malloc(sizeof(double) * this->n_ins);
for(j=0; j<this->n_ins; j++) layer_input[j] = train_X[j];
} else {
if(l == 1) prev_layer_input_size = this->n_ins;
else prev_layer_input_size = this->hidden_layer_sizes[l-2];
prev_layer_input = (double *)malloc(sizeof(double) * prev_layer_input_size);
for(j=0; j<prev_layer_input_size; j++) prev_layer_input[j] = layer_input[j];
free(layer_input);
layer_input = (double *)malloc(sizeof(double) * this->hidden_layer_sizes[l-1]);
HiddenLayer_sample_h_given_v(&(this->sigmoid_layers[l-1]), \
prev_layer_input, layer_input);
free(prev_layer_input);
}
}
RBM_contrastive_divergence(&(this->rbm_layers[i]), layer_input, lr, k);
}
}
}
free(train_X);
free(layer_input);
}
void DBN_finetune(DBN* this, double *input, int *label, double lr, int epochs) {
int i, j, m, n, epoch;
double *layer_input;
int prev_layer_input_size;
double *prev_layer_input;
double *train_X = (double *)malloc(sizeof(double) * this->n_ins);
int *train_Y = (int *)malloc(sizeof(int) * this->n_outs);
for(epoch=0; epoch<epochs; epoch++) {
for(n=0; n<this->N; n++) { // input x1...xN
// initial input
for(m=0; m<this->n_ins; m++) train_X[m] = input[n * this->n_ins + m];
for(m=0; m<this->n_outs; m++) train_Y[m] = label[n * this->n_outs + m];
// layer input
for(i=0; i<this->n_layers; i++) {
if(i == 0) {
prev_layer_input = (double *)malloc(sizeof(double) * this->n_ins);
for(j=0; j<this->n_ins; j++) prev_layer_input[j] = train_X[j];
} else {
prev_layer_input = (double *)malloc(sizeof(double) * this->hidden_layer_sizes[i-1]);
for(j=0; j<this->hidden_layer_sizes[i-1]; j++) prev_layer_input[j] = layer_input[j];
free(layer_input);
}
layer_input = (double*)malloc(sizeof(double) * this->hidden_layer_sizes[i]);
HiddenLayer_sample_h_given_v(&(this->sigmoid_layers[i]), \
prev_layer_input, layer_input);
free(prev_layer_input);
}
LogisticRegression_train(&(this->log_layer), layer_input, train_Y, lr);
}
// lr *= 0.95;
}
free(layer_input);
free(train_X);
free(train_Y);
}
void DBN_predict(DBN* this, double *x, double *y) {
int i, j, k;
double *layer_input;
int prev_layer_input_size;
double *prev_layer_input;
double linear_output;
prev_layer_input = (double *)malloc(sizeof(double) * this->n_ins);
for(j=0; j<this->n_ins; j++) prev_layer_input[j] = x[j];
// layer activation
for(i=0; i<this->n_layers; i++) {
layer_input = (double *)malloc(sizeof(double) * this->sigmoid_layers[i].n_out);
for(k=0; k<this->sigmoid_layers[i].n_out; k++) {
linear_output = 0.0;
for(j=0; j<this->sigmoid_layers[i].n_in; j++) {
linear_output += this->sigmoid_layers[i].W[k][j] * prev_layer_input[j];
}
linear_output += this->sigmoid_layers[i].b[k];
layer_input[k] = sigmoid(linear_output);
}
free(prev_layer_input);
if(i < this->n_layers-1) {
prev_layer_input = (double *)malloc(sizeof(double) * this->sigmoid_layers[i].n_out);
for(j=0; j<this->sigmoid_layers[i].n_out; j++) prev_layer_input[j] = layer_input[j];
free(layer_input);
}
}
for(i=0; i<this->log_layer.n_out; i++) {
y[i] = 0;
for(j=0; j<this->log_layer.n_in; j++) {
y[i] += this->log_layer.W[i][j] * layer_input[j];
}
y[i] += this->log_layer.b[i];
}
LogisticRegression_softmax(&(this->log_layer), y);
free(layer_input);
}
// HiddenLayer
void HiddenLayer__construct(HiddenLayer* this, int N, int n_in, int n_out, \
double **W, double *b) {
int i, j;
double a = 1.0 / n_in;
this->N = N;
this->n_in = n_in;
this->n_out = n_out;
if(W == NULL) {
this->W = (double **)malloc(sizeof(double*) * n_out);
this->W[0] = (double *)malloc(sizeof(double) * n_in * n_out);
for(i=0; i<n_out; i++) this->W[i] = this->W[0] + i * n_in;
for(i=0; i<n_out; i++) {
for(j=0; j<n_in; j++) {
this->W[i][j] = uniform(-a, a);
}
}
} else {
this->W = W;
}
if(b == NULL) {
this->b = (double *)malloc(sizeof(double) * n_out);
} else {
this->b = b;
}
}
void HiddenLayer__destruct(HiddenLayer* this) {
free(this->W[0]);
free(this->W);
free(this->b);
}
double HiddenLayer_output(HiddenLayer* this, double *input, double *w, double b) {
int j;
double linear_output = 0.0;
for(j=0; j<this->n_in; j++) {
linear_output += w[j] * input[j];
}
linear_output += b;
return sigmoid(linear_output);
}
void HiddenLayer_sample_h_given_v(HiddenLayer* this, double *input, double *sample) {
int i;
for(i=0; i<this->n_out; i++) {
sample[i] = binomial(1, HiddenLayer_output(this, input, this->W[i], this->b[i]));
}
}
// RBM
void RBM__construct(RBM* this, int N, int n_visible, int n_hidden, \
double **W, double *hbias, double *vbias) {
int i, j;
double a = 1.0 / n_visible;
this->N = N;
this->n_visible = n_visible;
this->n_hidden = n_hidden;
if(W == NULL) {
this->W = (double **)malloc(sizeof(double*) * n_hidden);
this->W[0] = (double *)malloc(sizeof(double) * n_visible * n_hidden);
for(i=0; i<n_hidden; i++) this->W[i] = this->W[0] + i * n_visible;
for(i=0; i<n_hidden; i++) {
for(j=0; j<n_visible; j++) {
this->W[i][j] = uniform(-a, a);
}
}
} else {
this->W = W;
}
if(hbias == NULL) {
this->hbias = (double *)malloc(sizeof(double) * n_hidden);
for(i=0; i<n_hidden; i++) this->hbias[i] = 0;
} else {
this->hbias = hbias;
}
if(vbias == NULL) {
this->vbias = (double *)malloc(sizeof(double) * n_visible);
for(i=0; i<n_visible; i++) this->vbias[i] = 0;
} else {
this->vbias = vbias;
}
}
void RBM__destruct(RBM* this) {
// free(this->W[0]);
// free(this->W);
// free(this->hbias);
free(this->vbias);
}
void RBM_contrastive_divergence(RBM* this, double *input, double lr, int k) {
int i, j, step;
double *ph_mean = (double *)malloc(sizeof(double) * this->n_hidden);
double *ph_sample = (double *)malloc(sizeof(double) * this->n_hidden);
double *nv_means = (double *)malloc(sizeof(double) * this->n_visible);
double *nv_samples = (double *)malloc(sizeof(double) * this->n_visible);
double *nh_means = (double *)malloc(sizeof(double) * this->n_hidden);
double *nh_samples = (double *)malloc(sizeof(double) * this->n_hidden);
/* CD-k */
RBM_sample_h_given_v(this, input, ph_mean, ph_sample);
for(step=0; step<k; step++) {
if(step == 0) {
RBM_gibbs_hvh(this, ph_sample, nv_means, nv_samples, nh_means, nh_samples);
} else {
RBM_gibbs_hvh(this, nh_samples, nv_means, nv_samples, nh_means, nh_samples);
}
}
for(i=0; i<this->n_hidden; i++) {
for(j=0; j<this->n_visible; j++) {
// this->W[i][j] += lr * (ph_sample[i] * input[j] - nh_means[i] * nv_samples[j]) / this->N;
this->W[i][j] += lr * (ph_mean[i] * input[j] - nh_means[i] * nv_samples[j]) / this->N;
}
this->hbias[i] += lr * (ph_sample[i] - nh_means[i]) / this->N;
}
for(i=0; i<this->n_visible; i++) {
this->vbias[i] += lr * (input[i] - nv_samples[i]) / this->N;
}
free(ph_mean);
free(ph_sample);
free(nv_means);
free(nv_samples);
free(nh_means);
free(nh_samples);
}
void RBM_sample_h_given_v(RBM* this, double *v0_sample, double *mean, double *sample) {
int i;
for(i=0; i<this->n_hidden; i++) {
mean[i] = RBM_propup(this, v0_sample, this->W[i], this->hbias[i]);
sample[i] = binomial(1, mean[i]);
}
}
void RBM_sample_v_given_h(RBM* this, double *h0_sample, double *mean, double *sample) {
int i;
for(i=0; i<this->n_visible; i++) {
mean[i] = RBM_propdown(this, h0_sample, i, this->vbias[i]);
sample[i] = binomial(1, mean[i]);
}
}
double RBM_propup(RBM* this, double *v, double *w, double b) {
int j;
double pre_sigmoid_activation = 0.0;
for(j=0; j<this->n_visible; j++) {
pre_sigmoid_activation += w[j] * v[j];
}
pre_sigmoid_activation += b;
return sigmoid(pre_sigmoid_activation);
}
double RBM_propdown(RBM* this, double *h, int i, double b) {
int j;
double pre_sigmoid_activation = 0.0;
for(j=0; j<this->n_hidden; j++) {
pre_sigmoid_activation += this->W[j][i] * h[j];
}
pre_sigmoid_activation += b;
return sigmoid(pre_sigmoid_activation);
}
void RBM_gibbs_hvh(RBM* this, double *h0_sample, double *nv_means, double *nv_samples, \
double *nh_means, double *nh_samples) {
RBM_sample_v_given_h(this, h0_sample, nv_means, nv_samples);
RBM_sample_h_given_v(this, nv_samples, nh_means, nh_samples);
}
void RBM_reconstruct(RBM* this, double *v, double *reconstructed_v) {
int i, j;
double *h = (double *)malloc(sizeof(double) * this->n_hidden);
double pre_sigmoid_activation;
for(i=0; i<this->n_hidden; i++) {
h[i] = RBM_propup(this, v, this->W[i], this->hbias[i]);
}
for(i=0; i<this->n_visible; i++) {
pre_sigmoid_activation = 0.0;
for(j=0; j<this->n_hidden; j++) {
pre_sigmoid_activation += this->W[j][i] * h[j];
}
pre_sigmoid_activation += this->vbias[i];
reconstructed_v[i] = sigmoid(pre_sigmoid_activation);
}
free(h);
}
// LogisticRegression
void LogisticRegression__construct(LogisticRegression *this, int N, int n_in, int n_out) {
int i, j;
this->N = N;
this->n_in = n_in;
this->n_out = n_out;
this->W = (double **)malloc(sizeof(double*) * n_out);
this->W[0] = (double *)malloc(sizeof(double) * n_in * n_out);
for(i=0; i<n_out; i++) this->W[i] = this->W[0] + i * n_in;
this->b = (double *)malloc(sizeof(double) * n_out);
for(i=0; i<n_out; i++) {
for(j=0; j<n_in; j++) {
this->W[i][j] = 0;
}
this->b[i] = 0;
}
}
void LogisticRegression__destruct(LogisticRegression *this) {
free(this->W[0]);
free(this->W);
free(this->b);
}
void LogisticRegression_train(LogisticRegression *this, double *x, int *y, double lr) {
int i,j;
double *p_y_given_x = (double *)malloc(sizeof(double) * this->n_out);
double *dy = (double *)malloc(sizeof(double) * this->n_out);
for(i=0; i<this->n_out; i++) {
p_y_given_x[i] = 0;
for(j=0; j<this->n_in; j++) {
p_y_given_x[i] += this->W[i][j] * x[j];
}
p_y_given_x[i] += this->b[i];
}
LogisticRegression_softmax(this, p_y_given_x);
for(i=0; i<this->n_out; i++) {
dy[i] = y[i] - p_y_given_x[i];
for(j=0; j<this->n_in; j++) {
this->W[i][j] += lr * dy[i] * x[j] / this->N;
}
this->b[i] += lr * dy[i] / this->N;
}
free(p_y_given_x);
free(dy);
}
void LogisticRegression_softmax(LogisticRegression *this, double *x) {
int i;
double max = 0.0;
double sum = 0.0;
for(i=0; i<this->n_out; i++) if(max < x[i]) max = x[i];
for(i=0; i<this->n_out; i++) {
x[i] = exp(x[i] - max);
sum += x[i];
}
for(i=0; i<this->n_out; i++) x[i] /= sum;
}
void LogisticRegression_predict(LogisticRegression *this, double *x, double *y) {
int i,j;
for(i=0; i<this->n_out; i++) {
y[i] = 0;
for(j=0; j<this->n_in; j++) {
y[i] += this->W[i][j] * x[j];
}
y[i] += this->b[i];
}
LogisticRegression_softmax(this, y);
}
void test_dbn(void) {
srand(0);
int i, j;
double pretrain_lr = 0.1;
int pretraining_epochs = 1000;
int k = 1;
double finetune_lr = 0.1;
int finetune_epochs = 500;
int train_N = 75;
int test_N = 75;
int n_ins = 4;
int n_outs = 3;
int hidden_layer_sizes[] = {3, 3, 3};
int n_layers = sizeof(hidden_layer_sizes) / sizeof(hidden_layer_sizes[0]);
double train_X[75][4];
int train_Y[75][3];
double test_X[75][4];
double test_Y[75][3];
readFileData("trainingData.csv",train_X);
readFileLabel("trainingLabel.csv",train_Y);
readFileData("testData.csv",test_X);
// construct DBN
DBN dbn;
DBN__construct(&dbn, train_N, n_ins, hidden_layer_sizes, n_outs, n_layers);
// pretrain
DBN_pretrain(&dbn, *train_X, pretrain_lr, k, pretraining_epochs);
// finetune
DBN_finetune(&dbn, *train_X, *train_Y, finetune_lr, finetune_epochs);
// test
for(i=0; i<test_N; i++) {
DBN_predict(&dbn, test_X[i], test_Y[i]);
for(j=0; j<n_outs; j++) {
printf("%.5f ", test_Y[i][j]);
}
printf("\n");
}
// destruct DBN
DBN__destruct(&dbn);
}
int main(void) {
test_dbn();
return 0;
}
トレーニングデータ(irisの奇数行)
trainingData.csv
0.222222222,0.625,0.06779661,0.041666667
0.111111111,0.5,0.050847458,0.041666667
0.194444444,0.666666667,0.06779661,0.041666667
0.083333333,0.583333333,0.06779661,0.083333333
0.027777778,0.375,0.06779661,0.041666667
0.305555556,0.708333333,0.084745763,0.041666667
0.138888889,0.416666667,0.06779661,0
0.416666667,0.833333333,0.033898305,0.041666667
0.305555556,0.791666667,0.050847458,0.125
0.388888889,0.75,0.118644068,0.083333333
0.305555556,0.583333333,0.118644068,0.041666667
0.083333333,0.666666667,0,0.041666667
0.138888889,0.583333333,0.152542373,0.041666667
0.194444444,0.583333333,0.101694915,0.125
0.25,0.583333333,0.06779661,0.041666667
0.138888889,0.458333333,0.101694915,0.041666667
0.25,0.875,0.084745763,0
0.166666667,0.458333333,0.084745763,0.041666667
0.333333333,0.625,0.050847458,0.041666667
0.027777778,0.416666667,0.050847458,0.041666667
0.194444444,0.625,0.050847458,0.083333333
0.027777778,0.5,0.050847458,0.041666667
0.222222222,0.75,0.152542373,0.125
0.222222222,0.75,0.101694915,0.041666667
0.277777778,0.708333333,0.084745763,0.041666667
0.75,0.5,0.627118644,0.541666667
0.722222222,0.458333333,0.661016949,0.583333333
0.611111111,0.333333333,0.610169492,0.583333333
0.555555556,0.541666667,0.627118644,0.625
0.638888889,0.375,0.610169492,0.5
0.194444444,0,0.423728814,0.375
0.472222222,0.083333333,0.508474576,0.375
0.361111111,0.375,0.440677966,0.5
0.361111111,0.416666667,0.593220339,0.583333333
0.527777778,0.083333333,0.593220339,0.583333333
0.444444444,0.5,0.644067797,0.708333333
0.555555556,0.208333333,0.661016949,0.583333333
0.583333333,0.375,0.559322034,0.5
0.694444444,0.333333333,0.644067797,0.541666667
0.472222222,0.375,0.593220339,0.583333333
0.333333333,0.166666667,0.474576271,0.416666667
0.416666667,0.291666667,0.491525424,0.458333333
0.305555556,0.416666667,0.593220339,0.583333333
0.666666667,0.458333333,0.627118644,0.583333333
0.361111111,0.416666667,0.525423729,0.5
0.333333333,0.25,0.576271186,0.458333333
0.416666667,0.25,0.508474576,0.458333333
0.361111111,0.291666667,0.542372881,0.5
0.388888889,0.375,0.542372881,0.5
0.222222222,0.208333333,0.338983051,0.416666667
0.555555556,0.541666667,0.847457627,1
0.777777778,0.416666667,0.830508475,0.833333333
0.611111111,0.416666667,0.813559322,0.875
0.166666667,0.208333333,0.593220339,0.666666667
0.666666667,0.208333333,0.813559322,0.708333333
0.611111111,0.5,0.694915254,0.791666667
0.694444444,0.416666667,0.762711864,0.833333333
0.416666667,0.333333333,0.694915254,0.958333333
0.611111111,0.416666667,0.762711864,0.708333333
0.944444444,0.25,1,0.916666667
0.722222222,0.5,0.796610169,0.916666667
0.944444444,0.333333333,0.966101695,0.791666667
0.666666667,0.541666667,0.796610169,0.833333333
0.527777778,0.333333333,0.644067797,0.708333333
0.583333333,0.333333333,0.779661017,0.833333333
0.861111111,0.333333333,0.86440678,0.75
0.583333333,0.333333333,0.779661017,0.875
0.5,0.25,0.779661017,0.541666667
0.555555556,0.583333333,0.779661017,0.958333333
0.472222222,0.416666667,0.644067797,0.708333333
0.666666667,0.458333333,0.779661017,0.958333333
0.416666667,0.291666667,0.694915254,0.75
0.666666667,0.541666667,0.796610169,1
0.555555556,0.208333333,0.677966102,0.75
0.527777778,0.583333333,0.745762712,0.916666667
トレーニングラベル
trainingLabel.csv
1,0,0
1,0,0
1,0,0
1,0,0
1,0,0
1,0,0
1,0,0
1,0,0
1,0,0
1,0,0
1,0,0
1,0,0
1,0,0
1,0,0
1,0,0
1,0,0
1,0,0
1,0,0
1,0,0
1,0,0
1,0,0
1,0,0
1,0,0
1,0,0
1,0,0
0,1,0
0,1,0
0,1,0
0,1,0
0,1,0
0,1,0
0,1,0
0,1,0
0,1,0
0,1,0
0,1,0
0,1,0
0,1,0
0,1,0
0,1,0
0,1,0
0,1,0
0,1,0
0,1,0
0,1,0
0,1,0
0,1,0
0,1,0
0,1,0
0,1,0
0,0,1
0,0,1
0,0,1
0,0,1
0,0,1
0,0,1
0,0,1
0,0,1
0,0,1
0,0,1
0,0,1
0,0,1
0,0,1
0,0,1
0,0,1
0,0,1
0,0,1
0,0,1
0,0,1
0,0,1
0,0,1
0,0,1
0,0,1
0,0,1
0,0,1
テストデータ(irisの偶数行)
testData.csv
0.166666667,0.416666667,0.06779661,0.041666667
0.083333333,0.458333333,0.084745763,0.041666667
0.305555556,0.791666667,0.118644068,0.125
0.194444444,0.583333333,0.084745763,0.041666667
0.166666667,0.458333333,0.084745763,0
0.138888889,0.583333333,0.101694915,0.041666667
0,0.416666667,0.016949153,0
0.388888889,1,0.084745763,0.125
0.222222222,0.625,0.06779661,0.083333333
0.222222222,0.75,0.084745763,0.083333333
0.222222222,0.708333333,0.084745763,0.125
0.222222222,0.541666667,0.118644068,0.166666667
0.194444444,0.416666667,0.101694915,0.041666667
0.25,0.625,0.084745763,0.041666667
0.111111111,0.5,0.101694915,0.041666667
0.305555556,0.583333333,0.084745763,0.125
0.333333333,0.916666667,0.06779661,0.041666667
0.194444444,0.5,0.033898305,0.041666667
0.166666667,0.666666667,0.06779661,0
0.222222222,0.583333333,0.084745763,0.041666667
0.055555556,0.125,0.050847458,0.083333333
0.194444444,0.625,0.101694915,0.208333333
0.138888889,0.416666667,0.06779661,0.083333333
0.083333333,0.5,0.06779661,0.041666667
0.194444444,0.541666667,0.06779661,0.041666667
0.583333333,0.5,0.593220339,0.583333333
0.333333333,0.125,0.508474576,0.5
0.388888889,0.333333333,0.593220339,0.5
0.166666667,0.166666667,0.389830508,0.375
0.25,0.291666667,0.491525424,0.541666667
0.444444444,0.416666667,0.542372881,0.583333333
0.5,0.375,0.627118644,0.541666667
0.666666667,0.458333333,0.576271186,0.541666667
0.416666667,0.291666667,0.525423729,0.375
0.361111111,0.208333333,0.491525424,0.416666667
0.5,0.333333333,0.508474576,0.5
0.5,0.333333333,0.627118644,0.458333333
0.638888889,0.416666667,0.576271186,0.541666667
0.666666667,0.416666667,0.677966102,0.666666667
0.388888889,0.25,0.423728814,0.375
0.333333333,0.166666667,0.457627119,0.375
0.472222222,0.291666667,0.694915254,0.625
0.472222222,0.583333333,0.593220339,0.625
0.555555556,0.125,0.576271186,0.5
0.333333333,0.208333333,0.508474576,0.5
0.5,0.416666667,0.610169492,0.541666667
0.194444444,0.125,0.389830508,0.375
0.388888889,0.416666667,0.542372881,0.458333333
0.527777778,0.375,0.559322034,0.5
0.388888889,0.333333333,0.525423729,0.5
0.416666667,0.291666667,0.694915254,0.75
0.555555556,0.375,0.779661017,0.708333333
0.916666667,0.416666667,0.949152542,0.833333333
0.833333333,0.375,0.898305085,0.708333333
0.805555556,0.666666667,0.86440678,1
0.583333333,0.291666667,0.728813559,0.75
0.388888889,0.208333333,0.677966102,0.791666667
0.583333333,0.5,0.728813559,0.916666667
0.944444444,0.75,0.966101695,0.875
0.472222222,0.083333333,0.677966102,0.583333333
0.361111111,0.333333333,0.661016949,0.791666667
0.555555556,0.291666667,0.661016949,0.708333333
0.805555556,0.5,0.847457627,0.708333333
0.5,0.416666667,0.661016949,0.708333333
0.805555556,0.416666667,0.813559322,0.625
1,0.75,0.915254237,0.791666667
0.555555556,0.333333333,0.694915254,0.583333333
0.944444444,0.416666667,0.86440678,0.916666667
0.583333333,0.458333333,0.762711864,0.708333333
0.722222222,0.458333333,0.745762712,0.833333333
0.722222222,0.458333333,0.694915254,0.916666667
0.694444444,0.5,0.830508475,0.916666667
0.666666667,0.416666667,0.711864407,0.916666667
0.611111111,0.416666667,0.711864407,0.791666667
0.444444444,0.416666667,0.694915254,0.708333333