微信公众号搜"智元新知"关注
微信扫一扫可直接关注哦!

TFJS 模型只为二分类任务预测相同的值

如何解决TFJS 模型只为二分类任务预测相同的值

我有一个大小为 [299,13] 的数据集(包含数据和标签),并且模型不断输出/预测相同的值。这是一个二元分类任务。我如何让我的模型预测不总是相同的值?

这是代码(带有一些虚拟数据):

//X is the data and y is the label

    
   var Dataset = tf.tensor([[1,0.491821360184978,9,314,0.504585169147173,542,1231,3213,1,0.267304071302649,3,0.615917680092409,0],[0,0.72959029133292,758,0.402582737085955,400,1788,4599,0.532702887951197,4,0.18630897965037,1],[1,0.198764110760428,5,787,0.65507860022684,887,192,4831,0.739456077544426,0.100068056951143,0.583574833590476,596,0.933996451580092,631,331,811,0.258445986493932,7,0.811276729811182,0.701499878184206,8,854,0.0326334179806069,845,470,4930,0.825469683527519,0.448086959665654,0.954482878414911,2,468,0.736300149681564,557,3110,739,0.325783042694677,0.43488580142501,0.384845877769,662,0.265402742189238,649,384,1158,0.484884260891815,0.915444292219105,0.379266474923531,551,0.275982850450116,1022,3329,1413,0.237295089390298,0.817104709627837,0.691365367558705,549,0.479627221800976,796,3381,495,0.37129382411555,0.332832739155564,0.433042848178662,529,0.545178403950882,842,4768,506,0.386370525896832,0.189942077251933,0.611272282663452,823,0.737901576655264,839,2724,1787,0.365032317656007,6,0.884073622694046,0.0084315409129881,352,0.76858549557176,476,685,4796,0.302944943656102,0.849655932794213,0.977380232874908,701,0.588833228576897,999,2897,3325,0.418024491281536,0.631872118440871,0.419601058571829,10,0.0157052616592944,1009,4438,113,0.909015627566542,0.0297684897733232,0.739471449044276,836,0.0430176780439737,1030,1456,3932,0.331426481315121,0.734008754824423,0.00209807072438295,0.499622407429238,418,1912,4452,0.727130871883893,0.157427964683612,0.956533819923862,681,0.196708599930969,829,4562,1718,0.233193195569506,0.60582783922237,0.504637155233183,809,0.608861975627751,717,130,4194,0.134197560919101,0.375188428842507,0.747363884375055,522,0.868234577182028,849,3529,1192,0.0322641640468155,0.185973206518818,0.244142898027225,402,0.0280582030746698,315,3576,3882,0.724916254371562,0.062229775169706,0.858414851618448,459,0.367325906336267,616,930,3892,0.177388425930446,0.859824526007041,0.921555604905976,863,0.821166873626313,528,1624,1289,0.366243396916411,0.453840754701258,0.171321120311715,524,0.177251413832862,1608,3123,0.192861821442111,0.122983286410146,0.539946042901786,692,0.817780349862711,392,1053,4891,0.409578972921785,0.0453862502541893,0.996848843212564,0.877740438211017,762,3046,843,0.888578696082088,0.877971306478434,0.218116987741582,655,0.240496962520226,407,1001,1474,0.976212355833712,0.936396547703282,1]])
    function onBatchEnd(batch,logs) {
        console.log('Accuracy',logs.acc);
    }
    
    var x = Dataset.slice([0,[-1,12])
    const y = Dataset.slice([0,12],1])
    
    const model = tf.sequential({
        layers: [
            tf.layers.dense({ inputShape: [12],units: 12,activation: "sigmoid" }),tf.layers.dense({ units: 8,activation: "relu" }),tf.layers.dense({ units: 4,activation: "tanh" }),tf.layers.dense({ units: 1,activation: "sigmoid" })
        ]
    })
    
    model.compile({
        optimizer: tf.train.adam(0.001),loss: "binaryCrossentropy",metrics: ["accuracy"]
    })
    
    model.fit(x,y,{
        shuffle: true,epochs: 100,//validationSplit: 0.1,callbacks: { onBatchEnd }
    }).then(info => {
        var predictions = model.predict(x)
        console.log('Final accuracy',info.history.acc);
        console.log("Predictions: ")
        console.log(predictions.dataSync());
    })

解决方法

   tf.layers.dense({ units: 1,activation: "sigmoid" })

您只有一个类别预测,但对于二元分类,您需要两个目标类别和 softmax 激活。请参阅 Linear Classification 的 cs321n 注释。

,

该模型使用 sigmoid 激活进行二元分类。因此,最后一个单位应该是2

tf.layers.dense({ units: 2,activation: "sigmoid" })

标签张量 y 的最内层尺寸为 1,值为 0 或 1。该张量应该是单热编码的。

const x = Dataset.slice([0,0],[-1,12])
const y = Dataset.slice([0,12],1])

z = y.cast('int32').reshape([-1]).oneHot(2)
z.print()
console.log(z.shape) // [26,2]
// now use z instead of y

版权声明:本文内容由互联网用户自发贡献,该文观点与技术仅代表作者本人。本站仅提供信息存储空间服务,不拥有所有权,不承担相关法律责任。如发现本站有涉嫌侵权/违法违规的内容, 请发送邮件至 dio@foxmail.com 举报,一经查实,本站将立刻删除。