1. 程式人生 > >matlab 畫 caffe loss 曲線

matlab 畫 caffe loss 曲線

方法一,僅使用日誌檔案生成曲線圖

command

sudo ../caffe/build/tools/caffe train -solver solver_fully.prototxt -weights ../NJUDS2000.caffemodel -gpu 0 2>&1 | tee ./log/train.log

code

function plot_log()
logName='./train.log';
dir_log='';
fid = fopen(logName, 'r'); 
test_loss = fopen([dir_log 'test_loss.txt'], 'w'); 
train_loss = fopen([dir_log 'train_loss.txt'], 'w'); 
train_lr = fopen([dir_log 'train_lr.txt'],'w');
tline = fgetl(fid); 
while ischar(tline) 
    k = strfind(tline, 'Iteration'); 
    % it's a valid log
    if ~isempty(k) 
        iter_start = k+10;
        iter_end = strfind(tline(k:end),',');
        iter = tline(iter_start:iter_end+k-2);
        %store test_loss
        lr_k = strfind(tline, 'lr'); 
        if ~isempty(lr_k) 
            lr_tart = lr_k + 5; 
            lr = tline(lr_tart : end);
            fprintf(train_lr, '%s\t%s\n', iter,lr); 
        end 
        %store train_loss
        train_k = strfind(tline, 'loss'); 
        if ~isempty(train_k) 
            train_tart = train_k + 7; 
            loss_train = tline(train_tart : end);
            fprintf(train_loss, '%s\t%s\n', iter,loss_train); 
        end 
        %store test_loss
        test_k = strfind(tline, 'Testing'); 
        if ~isempty(test_k) % 
            tline = fgetl(fid); 
            flag = 1;
            while(ischar(tline) && flag)
                test_k = strfind(tline, ': loss ='); 
                if ~isempty(test_k) 
                    flag = 0;
                    test_start = test_k + 7;
                    test_end = strfind(tline(test_start:end),'(')-3;
                    loss_test = tline(test_start+1 : test_end+test_start);
                    fprintf(test_loss, '%s\t%s\n', iter,loss_test); 
                end 
                tline = fgetl(fid); 
            end
        end 
    end
    tline = fgetl(fid); 
end
fclose(fid); 
fclose(test_loss);
fclose(train_loss);
fclose(train_lr);
%plot

train_loss=importdata([dir_log 'train_loss.txt']);
if(~isempty(train_loss))
    figure(1)
    plot(train_loss(:,1),train_loss(:,2));
    title('train-loss vs. Iterations')
end


test_loss=importdata([dir_log 'test_loss.txt']);
if(~isempty(test_loss))
    figure(2)
    plot(test_loss(:,1),test_loss(:,2));
    title('test-loss vs. Iterations')
end


% train_lr=importdata('train_lr.txt');
% if(~isempty(train_lr))
%     figure(3)
%     plot(train_lr(:,1),train_lr(:,2));
%     title('train_lr vs. Iterations')
% end

方法二、使用caffe自帶tools

使用caffe-master/tools/extra目錄裡的extract_seconds.py , plot_training_log.py.example, parse_log.sh三個檔案

解析日誌檔案
python parse_log.py ./log/train.log ./log/

繪製曲線
./plot_traing_log.py.example 0 ./log/test_acc.png ./log/train.log

其中:
0 test acc vs iters
1 test acc vs seconds
2 test loss vs iters
3 test loss vs seconds
4 train learning rate vs iters
5 trian learning rate vs seconds
6 Train loss vs iters
7 train loss vs seconds

方法三、 使用caffe自帶tools的中間結果

Python

import pandas as pd 
import matplotlib.pyplot as plt 
train_log = pd.read_csv("train.log.train") 
test_log = pd.read_csv("train.log.test") 
_, ax1 = plt.subplots() 
ax1.set_title("train loss and test loss") 
ax1.plot(train_log["NumIters"], train_log["loss"], alpha=0.5) ax1.plot(test_log["NumIters"], test_log["loss"], 'g') 
ax1.set_xlabel('iteration') ax1.set_ylabel('train loss') 
plt.legend(loc='upper left') 
ax2 = ax1.twinx() 
ax2.plot(test_log["NumIters"], test_log["acc/top-1"], 'r') ax2.plot(test_log["NumIters"], test_log["acc/top-5"], 'm') 
ax2.set_ylabel('test accuracy') 
plt.legend(loc='upper right') 
plt.show() print 'Done.'

MATLAB

clear all;clc;close all;

dir_log='/usr/home/matlab_Program/rgbd-saliency-master_6_input/training/log/';
dir_py='./extra/parse_log.py';
dir_save='/usr/home/matlab_Program/rgbd-saliency-master_6_input/training/log/';

command=['python ' dir_py ' ' dir_log 'train.log ' dir_save];
[a,b]=system(command);
if a > 0
    disp('error....');
end
train_data=importdata([dir_log 'train.log.train']);
test_data=importdata([dir_log 'train.log.test']);

train_x=train_data.data(:,1);
train_y=train_data.data(:,8);

test_x=test_data.data(:,1);
test_y=test_data.data(:,8);

figure;
plot(train_x,train_y,'-r','LineWidth',0.5);hold on;
plot(test_x,test_y,'-g','LineWidth',0.5);hold off;
xlabel('iter');ylabel('loss');
% axis([0 train_x(end) 0 max(train_y)+0.01]);
legend('train-loss','test-loss');
grid on;title('Train-Test-loss');
disp('ok');