Gadzan

TensorFlow.js实现逻辑回归

效果

使用tensorflow.js实现了逻辑回归。

什么是逻辑回归可以看上一编文章

预测模型使用了softmax

  1. 点击New Category Data生成一个类别数据
  2. 点击坐标图生成几个训练数据点
  3. 重复步骤1.2.生成几个类别的训练数据点
  4. 点击train训练


JavaScript 代码

const train_data = [[25,84],[22,72],[59,76],[43,89],[35,64],[57,61],[81,75],[85,56],[41,23],[21,34],[23,43],[61,37],[53,13],[41,9],[45,57],[38,43],[58,43],[48,36],[52,50],[84,49],[79,19],[81,28],[69,27],[73,9],[93,12],[96,37]],
train_label = [0,0,0,0,0,0,0,0,1,1,1,1,1,1,2,2,2,2,2,2,3,3,3,3,3,3];

function logistic_regression(train_data, train_label) {  
  const numIterations = 100;
  const learningRate = 0.1;
  const optimizer = tf.train.adam(learningRate);
  //Caculate how many category do we have
  const number_of_labels = Array.from(new Set(train_label)).length;
  const number_of_data = train_label.length;
  
  const w = tf.variable(tf.zeros([2,number_of_labels]));
  const b = tf.variable(tf.zeros([number_of_labels]));
  
  const train_x = tf.tensor2d(train_data,[train_data.length,2]);
  const train_y = tf.tensor1d(train_label,'int32');
  
  function predict(x) {
    return tf.softmax(tf.add(tf.matMul(x, w),b));
  }
  function loss(predictions, labels) {
    const y = tf.oneHot(labels,number_of_labels);
    const entropy = tf.mean(tf.sub(tf.scalar(1),tf.sum(tf.mul(y, tf.log(predictions)),1)));
    return entropy;
  }
  
  for (let iter = 0; iter < numIterations; iter++) {
    optimizer.minimize(() => {
      const loss_var = loss(predict(train_x), train_y);
      loss_var.print();
      return loss_var;
    })    
  }
}

logistic_regression(train_data, train_label)

打赏码

知识共享许可协议 本作品采用知识共享署名 4.0 国际许可协议进行许可。

评论