1. 程式人生 > >mxnet中自定義損失函式和評估標準

mxnet中自定義損失函式和評估標準

mxnet中使用MakeLoss自定義損失函式

mxnet.symbol.MakeLoss(data=None, grad_scale=_Null, valid_thresh=_Null, normalization=_Null, name=None, attr=None, out=None, **kwargs)

這裡寫圖片描述

cross_entropy = label * log(out) + (1 - label) * log(1 - out)
loss = MakeLoss(cross_entropy)
# -*- coding=utf-8 -*-

import mxnet as
mx import numpy as np import logging logging.basicConfig(level=logging.INFO) x = mx.sym.Variable('data') y = mx.sym.FullyConnected(data=x, num_hidden=1) label = mx.sym.Variable('label') cross_entropy = label * log(out) + (1 - label) * log(1 - out) loss = MakeLoss(cross_entropy) pred_loss = mx.sym.Group([mx.sym.BlockGrad(y), loss]) ex = pred_loss.simple_bind(mx.cpu(), data=(32
, 2)) # test test_data = mx.nd.array(np.random.random(size=(32, 2))) test_label = mx.nd.array(np.random.random(size=(32, 1))) ex.forward(is_train=True, data=test_data, label=test_label) ex.backward() print ex.arg_dict fc_w = ex.arg_dict['fullyconnected0_weight'].asnumpy() fc_w_grad = ex.grad_arrays[1
].asnumpy() fc_bias = ex.arg_dict['fullyconnected0_bias'].asnumpy() fc_bias_grad = ex.grad_arrays[2].asnumpy() logging.info('fc_weight:{}, fc_weights_grad:{}'.format(fc_w, fc_w_grad)) logging.info('fc_bias:{}, fc_bias_grad:{}'.format(fc_bias, fc_bias_grad))
label = mx.sym.Variable('label')
out = mx.sym.Activation(data=final, act_type='sigmoid')
ce = label * mx.sym.log(out) + (1 - label) * mx.sym.log(1 - out)
weights = mx.sym.Variable('weights')
loss = mx.sym.MakeLoss(weigths * ce, normalization='batch')

Then you want to input your weight vector into the weights Variable along with your normal input data and labels.

As an added tip, the output of an mxnet network with a custom loss via MakeLoss outputs the loss, not the prediction. You’ll probably want both in practice, in which case its useful to group the loss with a gradient-blocked version of the prediction so that you can get both. You’d do that like this:
pred_loss = mx.sym.Group([mx.sym.BlockGrad(out), loss])
}

用mxnet.metric.create(metric, *args, **kwargs)建立自己的評估標準

這裡寫圖片描述

or

通過繼承mx.metric.EvalMetric類新增自己的損失函式和評估驗證函式

class Siamise_metric(mx.metric.EvalMetric):

    def __init__(self, name='siamise_acc'):
        super(Siamise_metric, self).__init__(name=name)

    def update(self, label, pred):
        preds = pred[0]
        labels = label[0]
        preds_label = preds.asnumpy().ravel()
        labels = labels.asnumpy().ravel()
        #self.sum_metric += labels[preds_label < 0.5].sum() + len(
        #    labels[preds_label >= 0.5]) - labels[preds_label >= 0.5].sum()
        #self.num_inst += len(labels)

        pred = (preds_label < 0.5)
        acc = (pred == labels).sum()
        self.sum_metric += acc
        self.num_inst += len(labels)  # numpy.prod(label.shape)


class Contrastive_loss(mx.metric.EvalMetric):
    def __init__(self, name='contrastive_loss'):
        super(Contrastive_loss, self).__init__(name=name)

    def update(self, label, pred):
        loss = pred[1].asnumpy()
        self.sum_metric += loss
        self.num_inst += len(loss)