데모¶
라이브러리 import 및 설정¶
%reload_ext autoreload
%autoreload 2
%matplotlib inline
import lightgbm as lgb
from matplotlib import pyplot as plt
from matplotlib import rcParams
import numpy as np
from pathlib import Path
import pandas as pd
from sklearn.metrics import accuracy_score
from sklearn.model_selection import StratifiedKFold
import seaborn as sns
import warnings
rcParams['figure.figsize'] = (16, 8)
plt.style.use('fivethirtyeight')
pd.set_option('max_columns', 100)
pd.set_option("display.precision", 4)
warnings.simplefilter('ignore')
학습데이터 로드¶
03-pandas-eda.ipynb에서 생성한 feature.csv
피처파일 사용
data_dir = Path('../data/dacon-dku')
feature_dir = Path('../build/feature')
val_dir = Path('../build/val')
tst_dir = Path('../build/tst')
sub_dir = Path('../build/sub')
trn_file = data_dir / 'train.csv'
tst_file = data_dir / 'test.csv'
sample_file = data_dir / 'sample_submission.csv'
target_col = 'class'
n_fold = 5
n_class = 3
seed = 42
algo_name = 'lgbcv'
feature_name = 'feature'
model_name = f'{algo_name}_{feature_name}'
feature_file = feature_dir / f'{feature_name}.csv'
p_val_file = val_dir / f'{model_name}.val.csv'
p_tst_file = tst_dir / f'{model_name}.tst.csv'
sub_file = sub_dir / f'{model_name}.csv'
df = pd.read_csv(feature_file, index_col=0)
print(df.shape)
df.head()
(400000, 20)
z | redshift | dered_u | dered_g | dered_r | dered_i | dered_z | nObserve | airmass_u | class | d_dered_u | d_dered_g | d_dered_r | d_dered_i | d_dered_z | d_dered_ig | d_dered_zg | d_dered_rz | d_dered_iz | d_obs_det | |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
id | ||||||||||||||||||||
0 | 16.9396 | -8.1086e-05 | 23.1243 | 20.2578 | 18.9551 | 17.6321 | 16.9089 | 2.9444 | 1.1898 | 0.0 | -0.1397 | -0.0790 | -0.0544 | -0.0403 | -0.0307 | -2.6257 | -3.3488 | 2.0462 | 0.7232 | -15.0556 |
1 | 13.1689 | 4.5061e-03 | 14.9664 | 14.0045 | 13.4114 | 13.2363 | 13.1347 | 0.6931 | 1.2533 | 1.0 | -0.0857 | -0.0574 | -0.0410 | -0.0322 | -0.0343 | -0.7683 | -0.8698 | 0.2767 | 0.1016 | -0.3069 |
2 | 15.3500 | 4.7198e-04 | 16.6076 | 15.6866 | 15.4400 | 15.3217 | 15.2961 | 1.0986 | 1.0225 | 0.0 | -0.1787 | -0.1388 | -0.0963 | -0.0718 | -0.0540 | -0.3649 | -0.3905 | 0.1440 | 0.0257 | -0.9014 |
3 | 19.6346 | 5.8143e-06 | 25.3536 | 20.9947 | 20.0873 | 19.7947 | 19.5552 | 1.6094 | 1.2054 | 0.0 | -0.3070 | -0.1941 | -0.1339 | -0.1003 | -0.0795 | -1.2000 | -1.4395 | 0.5321 | 0.2395 | -1.3906 |
4 | 17.9826 | -3.3247e-05 | 23.7714 | 20.4338 | 18.8630 | 18.1903 | 17.8759 | 2.6391 | 1.1939 | 0.0 | -0.6820 | -0.2653 | -0.1794 | -0.1339 | -0.1067 | -2.2436 | -2.5579 | 0.9871 | 0.3144 | -9.3609 |
y = df[target_col].values[:320000]
df.drop(target_col, axis=1, inplace=True)
trn = df.iloc[:320000].values
tst = df.iloc[320000:].values
feature_name = df.columns.tolist()
print(y.shape, trn.shape, tst.shape)
(320000,) (320000, 19) (80000, 19)
Stratified K-Fold Cross Validation¶
cv = StratifiedKFold(n_splits=n_fold, shuffle=True, random_state=seed)
LightGBM 모델 학습¶
p_val = np.zeros((trn.shape[0], n_class))
p_tst = np.zeros((tst.shape[0], n_class))
for i, (i_trn, i_val) in enumerate(cv.split(trn, y), 1):
print(f'training model for CV #{i}')
clf = lgb.LGBMClassifier(objective='multiclass',
n_estimators=1000,
num_leaves=64,
learning_rate=0.1,
min_child_samples=10,
subsample=.5,
subsample_freq=1,
colsample_bytree=.8,
random_state=seed,
n_jobs=-1)
clf.fit(trn[i_trn], y[i_trn],
eval_set=[(trn[i_val], y[i_val])],
eval_metric='multiclass',
early_stopping_rounds=10)
p_val[i_val, :] = clf.predict_proba(trn[i_val])
p_tst += clf.predict_proba(tst) / n_fold
training model for CV #1
[1] valid_0's multi_logloss: 0.888783
Training until validation scores don't improve for 10 rounds
[2] valid_0's multi_logloss: 0.808303
[3] valid_0's multi_logloss: 0.735768
[4] valid_0's multi_logloss: 0.674393
[5] valid_0's multi_logloss: 0.621266
[6] valid_0's multi_logloss: 0.579717
[7] valid_0's multi_logloss: 0.540529
[8] valid_0's multi_logloss: 0.508314
[9] valid_0's multi_logloss: 0.481377
[10] valid_0's multi_logloss: 0.451845
[11] valid_0's multi_logloss: 0.426169
[12] valid_0's multi_logloss: 0.402967
[13] valid_0's multi_logloss: 0.382223
[14] valid_0's multi_logloss: 0.363352
[15] valid_0's multi_logloss: 0.346798
[16] valid_0's multi_logloss: 0.332813
[17] valid_0's multi_logloss: 0.319653
[18] valid_0's multi_logloss: 0.307032
[19] valid_0's multi_logloss: 0.295718
[20] valid_0's multi_logloss: 0.285881
[21] valid_0's multi_logloss: 0.276321
[22] valid_0's multi_logloss: 0.267506
[23] valid_0's multi_logloss: 0.259526
[24] valid_0's multi_logloss: 0.252791
[25] valid_0's multi_logloss: 0.246756
[26] valid_0's multi_logloss: 0.242056
[27] valid_0's multi_logloss: 0.236363
[28] valid_0's multi_logloss: 0.230898
[29] valid_0's multi_logloss: 0.226091
[30] valid_0's multi_logloss: 0.221748
[31] valid_0's multi_logloss: 0.218616
[32] valid_0's multi_logloss: 0.215371
[33] valid_0's multi_logloss: 0.211716
[34] valid_0's multi_logloss: 0.208485
[35] valid_0's multi_logloss: 0.205515
[36] valid_0's multi_logloss: 0.20296
[37] valid_0's multi_logloss: 0.200538
[38] valid_0's multi_logloss: 0.198057
[39] valid_0's multi_logloss: 0.195934
[40] valid_0's multi_logloss: 0.193947
[41] valid_0's multi_logloss: 0.192
[42] valid_0's multi_logloss: 0.190347
[43] valid_0's multi_logloss: 0.188778
[44] valid_0's multi_logloss: 0.187407
[45] valid_0's multi_logloss: 0.186234
[46] valid_0's multi_logloss: 0.184883
[47] valid_0's multi_logloss: 0.183621
[48] valid_0's multi_logloss: 0.182644
[49] valid_0's multi_logloss: 0.181481
[50] valid_0's multi_logloss: 0.180526
[51] valid_0's multi_logloss: 0.179553
[52] valid_0's multi_logloss: 0.178721
[53] valid_0's multi_logloss: 0.177925
[54] valid_0's multi_logloss: 0.177253
[55] valid_0's multi_logloss: 0.176668
[56] valid_0's multi_logloss: 0.17604
[57] valid_0's multi_logloss: 0.175452
[58] valid_0's multi_logloss: 0.174856
[59] valid_0's multi_logloss: 0.174347
[60] valid_0's multi_logloss: 0.173863
[61] valid_0's multi_logloss: 0.173395
[62] valid_0's multi_logloss: 0.173009
[63] valid_0's multi_logloss: 0.172583
[64] valid_0's multi_logloss: 0.172102
[65] valid_0's multi_logloss: 0.171626
[66] valid_0's multi_logloss: 0.171315
[67] valid_0's multi_logloss: 0.170991
[68] valid_0's multi_logloss: 0.170588
[69] valid_0's multi_logloss: 0.170238
[70] valid_0's multi_logloss: 0.169865
[71] valid_0's multi_logloss: 0.169607
[72] valid_0's multi_logloss: 0.169319
[73] valid_0's multi_logloss: 0.169108
[74] valid_0's multi_logloss: 0.168895
[75] valid_0's multi_logloss: 0.168608
[76] valid_0's multi_logloss: 0.168286
[77] valid_0's multi_logloss: 0.168004
[78] valid_0's multi_logloss: 0.167797
[79] valid_0's multi_logloss: 0.167615
[80] valid_0's multi_logloss: 0.168315
[81] valid_0's multi_logloss: 0.16751
[82] valid_0's multi_logloss: 0.167307
[83] valid_0's multi_logloss: 0.167124
[84] valid_0's multi_logloss: 0.16698
[85] valid_0's multi_logloss: 0.16677
[86] valid_0's multi_logloss: 0.166628
[87] valid_0's multi_logloss: 0.166428
[88] valid_0's multi_logloss: 0.167046
[89] valid_0's multi_logloss: 0.166202
[90] valid_0's multi_logloss: 0.166032
[91] valid_0's multi_logloss: 0.166864
[92] valid_0's multi_logloss: 0.167182
[93] valid_0's multi_logloss: 0.16707
[94] valid_0's multi_logloss: 0.166343
[95] valid_0's multi_logloss: 0.166279
[96] valid_0's multi_logloss: 0.166104
[97] valid_0's multi_logloss: 0.167086
[98] valid_0's multi_logloss: 0.165807
[99] valid_0's multi_logloss: 0.166655
[100] valid_0's multi_logloss: 0.166165
[101] valid_0's multi_logloss: 0.169185
[102] valid_0's multi_logloss: 0.170863
[103] valid_0's multi_logloss: 0.168526
[104] valid_0's multi_logloss: 0.167332
[105] valid_0's multi_logloss: 0.170462
[106] valid_0's multi_logloss: 0.169335
[107] valid_0's multi_logloss: 0.170213
[108] valid_0's multi_logloss: 0.172375
Early stopping, best iteration is:
[98] valid_0's multi_logloss: 0.165807
training model for CV #2
[1] valid_0's multi_logloss: 0.889196
Training until validation scores don't improve for 10 rounds
[2] valid_0's multi_logloss: 0.808799
[3] valid_0's multi_logloss: 0.736544
[4] valid_0's multi_logloss: 0.674999
[5] valid_0's multi_logloss: 0.622015
[6] valid_0's multi_logloss: 0.580525
[7] valid_0's multi_logloss: 0.541204
[8] valid_0's multi_logloss: 0.509103
[9] valid_0's multi_logloss: 0.481765
[10] valid_0's multi_logloss: 0.452333
[11] valid_0's multi_logloss: 0.426822
[12] valid_0's multi_logloss: 0.403522
[13] valid_0's multi_logloss: 0.383115
[14] valid_0's multi_logloss: 0.364452
[15] valid_0's multi_logloss: 0.347696
[16] valid_0's multi_logloss: 0.333724
[17] valid_0's multi_logloss: 0.320589
[18] valid_0's multi_logloss: 0.308423
[19] valid_0's multi_logloss: 0.297079
[20] valid_0's multi_logloss: 0.287164
[21] valid_0's multi_logloss: 0.277648
[22] valid_0's multi_logloss: 0.269049
[23] valid_0's multi_logloss: 0.261231
[24] valid_0's multi_logloss: 0.254586
[25] valid_0's multi_logloss: 0.248492
[26] valid_0's multi_logloss: 0.24376
[27] valid_0's multi_logloss: 0.238
[28] valid_0's multi_logloss: 0.232758
[29] valid_0's multi_logloss: 0.228041
[30] valid_0's multi_logloss: 0.223499
[31] valid_0's multi_logloss: 0.220296
[32] valid_0's multi_logloss: 0.217039
[33] valid_0's multi_logloss: 0.213456
[34] valid_0's multi_logloss: 0.21002
[35] valid_0's multi_logloss: 0.207116
[36] valid_0's multi_logloss: 0.204514
[37] valid_0's multi_logloss: 0.201951
[38] valid_0's multi_logloss: 0.199577
[39] valid_0's multi_logloss: 0.197492
[40] valid_0's multi_logloss: 0.195487
[41] valid_0's multi_logloss: 0.193514
[42] valid_0's multi_logloss: 0.191853
[43] valid_0's multi_logloss: 0.190222
[44] valid_0's multi_logloss: 0.188677
[45] valid_0's multi_logloss: 0.187531
[46] valid_0's multi_logloss: 0.186253
[47] valid_0's multi_logloss: 0.185096
[48] valid_0's multi_logloss: 0.184136
[49] valid_0's multi_logloss: 0.183068
[50] valid_0's multi_logloss: 0.181948
[51] valid_0's multi_logloss: 0.181008
[52] valid_0's multi_logloss: 0.180142
[53] valid_0's multi_logloss: 0.179345
[54] valid_0's multi_logloss: 0.178623
[55] valid_0's multi_logloss: 0.177935
[56] valid_0's multi_logloss: 0.177315
[57] valid_0's multi_logloss: 0.176741
[58] valid_0's multi_logloss: 0.176108
[59] valid_0's multi_logloss: 0.175577
[60] valid_0's multi_logloss: 0.17505
[61] valid_0's multi_logloss: 0.174551
[62] valid_0's multi_logloss: 0.173974
[63] valid_0's multi_logloss: 0.173549
[64] valid_0's multi_logloss: 0.173064
[65] valid_0's multi_logloss: 0.172609
[66] valid_0's multi_logloss: 0.172186
[67] valid_0's multi_logloss: 0.171723
[68] valid_0's multi_logloss: 0.171192
[69] valid_0's multi_logloss: 0.170821
[70] valid_0's multi_logloss: 0.170537
[71] valid_0's multi_logloss: 0.17032
[72] valid_0's multi_logloss: 0.170013
[73] valid_0's multi_logloss: 0.169805
[74] valid_0's multi_logloss: 0.169566
[75] valid_0's multi_logloss: 0.169248
[76] valid_0's multi_logloss: 0.168999
[77] valid_0's multi_logloss: 0.168772
[78] valid_0's multi_logloss: 0.168648
[79] valid_0's multi_logloss: 0.16842
[80] valid_0's multi_logloss: 0.168233
[81] valid_0's multi_logloss: 0.168195
[82] valid_0's multi_logloss: 0.167973
[83] valid_0's multi_logloss: 0.167777
[84] valid_0's multi_logloss: 0.1679
[85] valid_0's multi_logloss: 0.16766
[86] valid_0's multi_logloss: 0.167503
[87] valid_0's multi_logloss: 0.167306
[88] valid_0's multi_logloss: 0.167131
[89] valid_0's multi_logloss: 0.166976
[90] valid_0's multi_logloss: 0.166824
[91] valid_0's multi_logloss: 0.166619
[92] valid_0's multi_logloss: 0.166511
[93] valid_0's multi_logloss: 0.166377
[94] valid_0's multi_logloss: 0.166228
[95] valid_0's multi_logloss: 0.166155
[96] valid_0's multi_logloss: 0.166021
[97] valid_0's multi_logloss: 0.165876
[98] valid_0's multi_logloss: 0.165769
[99] valid_0's multi_logloss: 0.165733
[100] valid_0's multi_logloss: 0.165622
[101] valid_0's multi_logloss: 0.165559
[102] valid_0's multi_logloss: 0.165496
[103] valid_0's multi_logloss: 0.165434
[104] valid_0's multi_logloss: 0.165403
[105] valid_0's multi_logloss: 0.165334
[106] valid_0's multi_logloss: 0.165257
[107] valid_0's multi_logloss: 0.165189
[108] valid_0's multi_logloss: 0.16508
[109] valid_0's multi_logloss: 0.164988
[110] valid_0's multi_logloss: 0.164952
[111] valid_0's multi_logloss: 0.164895
[112] valid_0's multi_logloss: 0.164917
[113] valid_0's multi_logloss: 0.16486
[114] valid_0's multi_logloss: 0.164812
[115] valid_0's multi_logloss: 0.164754
[116] valid_0's multi_logloss: 0.164707
[117] valid_0's multi_logloss: 0.164655
[118] valid_0's multi_logloss: 0.164612
[119] valid_0's multi_logloss: 0.164579
[120] valid_0's multi_logloss: 0.1645
[121] valid_0's multi_logloss: 0.164443
[122] valid_0's multi_logloss: 0.164383
[123] valid_0's multi_logloss: 0.164306
[124] valid_0's multi_logloss: 0.164247
[125] valid_0's multi_logloss: 0.164163
[126] valid_0's multi_logloss: 0.164096
[127] valid_0's multi_logloss: 0.164114
[128] valid_0's multi_logloss: 0.164024
[129] valid_0's multi_logloss: 0.163967
[130] valid_0's multi_logloss: 0.163937
[131] valid_0's multi_logloss: 0.163912
[132] valid_0's multi_logloss: 0.163861
[133] valid_0's multi_logloss: 0.163826
[134] valid_0's multi_logloss: 0.16374
[135] valid_0's multi_logloss: 0.163647
[136] valid_0's multi_logloss: 0.163582
[137] valid_0's multi_logloss: 0.163554
[138] valid_0's multi_logloss: 0.16351
[139] valid_0's multi_logloss: 0.163475
[140] valid_0's multi_logloss: 0.163484
[141] valid_0's multi_logloss: 0.163467
[142] valid_0's multi_logloss: 0.163407
[143] valid_0's multi_logloss: 0.163359
[144] valid_0's multi_logloss: 0.163332
[145] valid_0's multi_logloss: 0.163309
[146] valid_0's multi_logloss: 0.163314
[147] valid_0's multi_logloss: 0.163334
[148] valid_0's multi_logloss: 0.163351
[149] valid_0's multi_logloss: 0.163353
[150] valid_0's multi_logloss: 0.163355
[151] valid_0's multi_logloss: 0.163362
[152] valid_0's multi_logloss: 0.163349
[153] valid_0's multi_logloss: 0.163287
[154] valid_0's multi_logloss: 0.163232
[155] valid_0's multi_logloss: 0.163155
[156] valid_0's multi_logloss: 0.163137
[157] valid_0's multi_logloss: 0.163132
[158] valid_0's multi_logloss: 0.16313
[159] valid_0's multi_logloss: 0.16309
[160] valid_0's multi_logloss: 0.163092
[161] valid_0's multi_logloss: 0.163018
[162] valid_0's multi_logloss: 0.162994
[163] valid_0's multi_logloss: 0.162964
[164] valid_0's multi_logloss: 0.162962
[165] valid_0's multi_logloss: 0.162929
[166] valid_0's multi_logloss: 0.16294
[167] valid_0's multi_logloss: 0.162894
[168] valid_0's multi_logloss: 0.16286
[169] valid_0's multi_logloss: 0.162843
[170] valid_0's multi_logloss: 0.162827
[171] valid_0's multi_logloss: 0.162802
[172] valid_0's multi_logloss: 0.162782
[173] valid_0's multi_logloss: 0.162789
[174] valid_0's multi_logloss: 0.162755
[175] valid_0's multi_logloss: 0.162724
[176] valid_0's multi_logloss: 0.162724
[177] valid_0's multi_logloss: 0.162715
[178] valid_0's multi_logloss: 0.162672
[179] valid_0's multi_logloss: 0.162673
[180] valid_0's multi_logloss: 0.162613
[181] valid_0's multi_logloss: 0.162627
[182] valid_0's multi_logloss: 0.162585
[183] valid_0's multi_logloss: 0.162541
[184] valid_0's multi_logloss: 0.1625
[185] valid_0's multi_logloss: 0.162509
[186] valid_0's multi_logloss: 0.162474
[187] valid_0's multi_logloss: 0.162466
[188] valid_0's multi_logloss: 0.162457
[189] valid_0's multi_logloss: 0.162467
[190] valid_0's multi_logloss: 0.162478
[191] valid_0's multi_logloss: 0.162466
[192] valid_0's multi_logloss: 0.162458
[193] valid_0's multi_logloss: 0.162448
[194] valid_0's multi_logloss: 0.162471
[195] valid_0's multi_logloss: 0.162481
[196] valid_0's multi_logloss: 0.162467
[197] valid_0's multi_logloss: 0.162441
[198] valid_0's multi_logloss: 0.162424
[199] valid_0's multi_logloss: 0.162397
[200] valid_0's multi_logloss: 0.162407
[201] valid_0's multi_logloss: 0.162397
[202] valid_0's multi_logloss: 0.162426
[203] valid_0's multi_logloss: 0.162468
[204] valid_0's multi_logloss: 0.162469
[205] valid_0's multi_logloss: 0.162431
[206] valid_0's multi_logloss: 0.162439
[207] valid_0's multi_logloss: 0.162464
[208] valid_0's multi_logloss: 0.162486
[209] valid_0's multi_logloss: 0.162448
[210] valid_0's multi_logloss: 0.162418
[211] valid_0's multi_logloss: 0.162391
[212] valid_0's multi_logloss: 0.162373
[213] valid_0's multi_logloss: 0.162414
[214] valid_0's multi_logloss: 0.162408
[215] valid_0's multi_logloss: 0.162366
[216] valid_0's multi_logloss: 0.162336
[217] valid_0's multi_logloss: 0.162315
[218] valid_0's multi_logloss: 0.162364
[219] valid_0's multi_logloss: 0.162364
[220] valid_0's multi_logloss: 0.162351
[221] valid_0's multi_logloss: 0.162346
[222] valid_0's multi_logloss: 0.162334
[223] valid_0's multi_logloss: 0.162311
[224] valid_0's multi_logloss: 0.162338
[225] valid_0's multi_logloss: 0.162352
[226] valid_0's multi_logloss: 0.162333
[227] valid_0's multi_logloss: 0.162338
[228] valid_0's multi_logloss: 0.162326
[229] valid_0's multi_logloss: 0.162359
[230] valid_0's multi_logloss: 0.162324
[231] valid_0's multi_logloss: 0.162294
[232] valid_0's multi_logloss: 0.16229
[233] valid_0's multi_logloss: 0.162317
[234] valid_0's multi_logloss: 0.162341
[235] valid_0's multi_logloss: 0.162334
[236] valid_0's multi_logloss: 0.162322
[237] valid_0's multi_logloss: 0.162331
[238] valid_0's multi_logloss: 0.162318
[239] valid_0's multi_logloss: 0.162328
[240] valid_0's multi_logloss: 0.162268
[241] valid_0's multi_logloss: 0.162259
[242] valid_0's multi_logloss: 0.162256
[243] valid_0's multi_logloss: 0.162244
[244] valid_0's multi_logloss: 0.162193
[245] valid_0's multi_logloss: 0.162188
[246] valid_0's multi_logloss: 0.162186
[247] valid_0's multi_logloss: 0.162201
[248] valid_0's multi_logloss: 0.162203
[249] valid_0's multi_logloss: 0.162183
[250] valid_0's multi_logloss: 0.162169
[251] valid_0's multi_logloss: 0.162191
[252] valid_0's multi_logloss: 0.162205
[253] valid_0's multi_logloss: 0.162186
[254] valid_0's multi_logloss: 0.162181
[255] valid_0's multi_logloss: 0.162193
[256] valid_0's multi_logloss: 0.162175
[257] valid_0's multi_logloss: 0.162145
[258] valid_0's multi_logloss: 0.162128
[259] valid_0's multi_logloss: 0.162106
[260] valid_0's multi_logloss: 0.162127
[261] valid_0's multi_logloss: 0.16214
[262] valid_0's multi_logloss: 0.162142
[263] valid_0's multi_logloss: 0.162155
[264] valid_0's multi_logloss: 0.162185
[265] valid_0's multi_logloss: 0.162204
[266] valid_0's multi_logloss: 0.162169
[267] valid_0's multi_logloss: 0.162156
[268] valid_0's multi_logloss: 0.162113
[269] valid_0's multi_logloss: 0.16212
Early stopping, best iteration is:
[259] valid_0's multi_logloss: 0.162106
training model for CV #3
[1] valid_0's multi_logloss: 0.889249
Training until validation scores don't improve for 10 rounds
[2] valid_0's multi_logloss: 0.809122
[3] valid_0's multi_logloss: 0.736794
[4] valid_0's multi_logloss: 0.675361
[5] valid_0's multi_logloss: 0.62244
[6] valid_0's multi_logloss: 0.580969
[7] valid_0's multi_logloss: 0.54176
[8] valid_0's multi_logloss: 0.509455
[9] valid_0's multi_logloss: 0.482426
[10] valid_0's multi_logloss: 0.452944
[11] valid_0's multi_logloss: 0.427421
[12] valid_0's multi_logloss: 0.404237
[13] valid_0's multi_logloss: 0.383643
[14] valid_0's multi_logloss: 0.364981
[15] valid_0's multi_logloss: 0.348324
[16] valid_0's multi_logloss: 0.334311
[17] valid_0's multi_logloss: 0.321182
[18] valid_0's multi_logloss: 0.308879
[19] valid_0's multi_logloss: 0.297679
[20] valid_0's multi_logloss: 0.287845
[21] valid_0's multi_logloss: 0.278486
[22] valid_0's multi_logloss: 0.269943
[23] valid_0's multi_logloss: 0.26191
[24] valid_0's multi_logloss: 0.255075
[25] valid_0's multi_logloss: 0.249062
[26] valid_0's multi_logloss: 0.244481
[27] valid_0's multi_logloss: 0.238609
[28] valid_0's multi_logloss: 0.233285
[29] valid_0's multi_logloss: 0.228507
[30] valid_0's multi_logloss: 0.224082
[31] valid_0's multi_logloss: 0.220916
[32] valid_0's multi_logloss: 0.217435
[33] valid_0's multi_logloss: 0.213868
[34] valid_0's multi_logloss: 0.210625
[35] valid_0's multi_logloss: 0.207675
[36] valid_0's multi_logloss: 0.205147
[37] valid_0's multi_logloss: 0.20264
[38] valid_0's multi_logloss: 0.200329
[39] valid_0's multi_logloss: 0.198171
[40] valid_0's multi_logloss: 0.196188
[41] valid_0's multi_logloss: 0.194275
[42] valid_0's multi_logloss: 0.192641
[43] valid_0's multi_logloss: 0.191064
[44] valid_0's multi_logloss: 0.189526
[45] valid_0's multi_logloss: 0.188352
[46] valid_0's multi_logloss: 0.187087
[47] valid_0's multi_logloss: 0.18584
[48] valid_0's multi_logloss: 0.184776
[49] valid_0's multi_logloss: 0.183723
[50] valid_0's multi_logloss: 0.182657
[51] valid_0's multi_logloss: 0.181664
[52] valid_0's multi_logloss: 0.180843
[53] valid_0's multi_logloss: 0.180019
[54] valid_0's multi_logloss: 0.179386
[55] valid_0's multi_logloss: 0.178744
[56] valid_0's multi_logloss: 0.178092
[57] valid_0's multi_logloss: 0.177489
[58] valid_0's multi_logloss: 0.176858
[59] valid_0's multi_logloss: 0.176222
[60] valid_0's multi_logloss: 0.175636
[61] valid_0's multi_logloss: 0.175149
[62] valid_0's multi_logloss: 0.174722
[63] valid_0's multi_logloss: 0.174323
[64] valid_0's multi_logloss: 0.173928
[65] valid_0's multi_logloss: 0.17354
[66] valid_0's multi_logloss: 0.17317
[67] valid_0's multi_logloss: 0.172813
[68] valid_0's multi_logloss: 0.172394
[69] valid_0's multi_logloss: 0.172156
[70] valid_0's multi_logloss: 0.171763
[71] valid_0's multi_logloss: 0.17141
[72] valid_0's multi_logloss: 0.17114
[73] valid_0's multi_logloss: 0.17094
[74] valid_0's multi_logloss: 0.170764
[75] valid_0's multi_logloss: 0.170468
[76] valid_0's multi_logloss: 0.170239
[77] valid_0's multi_logloss: 0.169994
[78] valid_0's multi_logloss: 0.16984
[79] valid_0's multi_logloss: 0.169533
[80] valid_0's multi_logloss: 0.16949
[81] valid_0's multi_logloss: 0.169307
[82] valid_0's multi_logloss: 0.169063
[83] valid_0's multi_logloss: 0.168876
[84] valid_0's multi_logloss: 0.168713
[85] valid_0's multi_logloss: 0.168509
[86] valid_0's multi_logloss: 0.168341
[87] valid_0's multi_logloss: 0.168212
[88] valid_0's multi_logloss: 0.167988
[89] valid_0's multi_logloss: 0.167798
[90] valid_0's multi_logloss: 0.167651
[91] valid_0's multi_logloss: 0.16749
[92] valid_0's multi_logloss: 0.167412
[93] valid_0's multi_logloss: 0.167215
[94] valid_0's multi_logloss: 0.167101
[95] valid_0's multi_logloss: 0.170287
[96] valid_0's multi_logloss: 0.16803
[97] valid_0's multi_logloss: 0.167969
[98] valid_0's multi_logloss: 0.167877
[99] valid_0's multi_logloss: 0.167763
[100] valid_0's multi_logloss: 0.167683
[101] valid_0's multi_logloss: 0.167561
[102] valid_0's multi_logloss: 0.16756
[103] valid_0's multi_logloss: 0.167484
[104] valid_0's multi_logloss: 0.167416
Early stopping, best iteration is:
[94] valid_0's multi_logloss: 0.167101
training model for CV #4
[1] valid_0's multi_logloss: 0.888444
Training until validation scores don't improve for 10 rounds
[2] valid_0's multi_logloss: 0.808209
[3] valid_0's multi_logloss: 0.735937
[4] valid_0's multi_logloss: 0.674783
[5] valid_0's multi_logloss: 0.621897
[6] valid_0's multi_logloss: 0.580365
[7] valid_0's multi_logloss: 0.541209
[8] valid_0's multi_logloss: 0.509023
[9] valid_0's multi_logloss: 0.481935
[10] valid_0's multi_logloss: 0.452723
[11] valid_0's multi_logloss: 0.427167
[12] valid_0's multi_logloss: 0.404007
[13] valid_0's multi_logloss: 0.38353
[14] valid_0's multi_logloss: 0.36489
[15] valid_0's multi_logloss: 0.348212
[16] valid_0's multi_logloss: 0.334078
[17] valid_0's multi_logloss: 0.320865
[18] valid_0's multi_logloss: 0.308582
[19] valid_0's multi_logloss: 0.297223
[20] valid_0's multi_logloss: 0.287362
[21] valid_0's multi_logloss: 0.277935
[22] valid_0's multi_logloss: 0.269114
[23] valid_0's multi_logloss: 0.261328
[24] valid_0's multi_logloss: 0.254698
[25] valid_0's multi_logloss: 0.248603
[26] valid_0's multi_logloss: 0.243877
[27] valid_0's multi_logloss: 0.238129
[28] valid_0's multi_logloss: 0.232998
[29] valid_0's multi_logloss: 0.228377
[30] valid_0's multi_logloss: 0.2239
[31] valid_0's multi_logloss: 0.220838
[32] valid_0's multi_logloss: 0.217446
[33] valid_0's multi_logloss: 0.21386
[34] valid_0's multi_logloss: 0.210626
[35] valid_0's multi_logloss: 0.207634
[36] valid_0's multi_logloss: 0.205075
[37] valid_0's multi_logloss: 0.202535
[38] valid_0's multi_logloss: 0.200113
[39] valid_0's multi_logloss: 0.198067
[40] valid_0's multi_logloss: 0.196041
[41] valid_0's multi_logloss: 0.194079
[42] valid_0's multi_logloss: 0.192431
[43] valid_0's multi_logloss: 0.190869
[44] valid_0's multi_logloss: 0.189335
[45] valid_0's multi_logloss: 0.188161
[46] valid_0's multi_logloss: 0.186828
[47] valid_0's multi_logloss: 0.185722
[48] valid_0's multi_logloss: 0.184685
[49] valid_0's multi_logloss: 0.183571
[50] valid_0's multi_logloss: 0.182477
[51] valid_0's multi_logloss: 0.181491
[52] valid_0's multi_logloss: 0.180663
[53] valid_0's multi_logloss: 0.179869
[54] valid_0's multi_logloss: 0.179199
[55] valid_0's multi_logloss: 0.178568
[56] valid_0's multi_logloss: 0.177956
[57] valid_0's multi_logloss: 0.177412
[58] valid_0's multi_logloss: 0.176805
[59] valid_0's multi_logloss: 0.176151
[60] valid_0's multi_logloss: 0.175572
[61] valid_0's multi_logloss: 0.175012
[62] valid_0's multi_logloss: 0.174546
[63] valid_0's multi_logloss: 0.17412
[64] valid_0's multi_logloss: 0.173652
[65] valid_0's multi_logloss: 0.173179
[66] valid_0's multi_logloss: 0.172829
[67] valid_0's multi_logloss: 0.172439
[68] valid_0's multi_logloss: 0.172038
[69] valid_0's multi_logloss: 0.171713
[70] valid_0's multi_logloss: 0.17135
[71] valid_0's multi_logloss: 0.171109
[72] valid_0's multi_logloss: 0.170846
[73] valid_0's multi_logloss: 0.170631
[74] valid_0's multi_logloss: 0.170411
[75] valid_0's multi_logloss: 0.170113
[76] valid_0's multi_logloss: 0.169889
[77] valid_0's multi_logloss: 0.169646
[78] valid_0's multi_logloss: 0.169455
[79] valid_0's multi_logloss: 0.169212
[80] valid_0's multi_logloss: 0.169047
[81] valid_0's multi_logloss: 0.168871
[82] valid_0's multi_logloss: 0.168695
[83] valid_0's multi_logloss: 0.168491
[84] valid_0's multi_logloss: 0.168295
[85] valid_0's multi_logloss: 0.168075
[86] valid_0's multi_logloss: 0.167899
[87] valid_0's multi_logloss: 0.167795
[88] valid_0's multi_logloss: 0.167622
[89] valid_0's multi_logloss: 0.167486
[90] valid_0's multi_logloss: 0.167363
[91] valid_0's multi_logloss: 0.167272
[92] valid_0's multi_logloss: 0.167143
[93] valid_0's multi_logloss: 0.167038
[94] valid_0's multi_logloss: 0.166963
[95] valid_0's multi_logloss: 0.166828
[96] valid_0's multi_logloss: 0.166732
[97] valid_0's multi_logloss: 0.166643
[98] valid_0's multi_logloss: 0.166523
[99] valid_0's multi_logloss: 0.166445
[100] valid_0's multi_logloss: 0.166381
[101] valid_0's multi_logloss: 0.166311
[102] valid_0's multi_logloss: 0.166278
[103] valid_0's multi_logloss: 0.166201
[104] valid_0's multi_logloss: 0.166096
[105] valid_0's multi_logloss: 0.166066
[106] valid_0's multi_logloss: 0.165963
[107] valid_0's multi_logloss: 0.166188
[108] valid_0's multi_logloss: 0.166068
[109] valid_0's multi_logloss: 0.166041
[110] valid_0's multi_logloss: 0.165934
[111] valid_0's multi_logloss: 0.165894
[112] valid_0's multi_logloss: 0.165821
[113] valid_0's multi_logloss: 0.165826
[114] valid_0's multi_logloss: 0.165779
[115] valid_0's multi_logloss: 0.165732
[116] valid_0's multi_logloss: 0.165697
[117] valid_0's multi_logloss: 0.165644
[118] valid_0's multi_logloss: 0.165717
[119] valid_0's multi_logloss: 0.165651
[120] valid_0's multi_logloss: 0.165575
[121] valid_0's multi_logloss: 0.165519
[122] valid_0's multi_logloss: 0.165487
[123] valid_0's multi_logloss: 0.165481
[124] valid_0's multi_logloss: 0.165427
[125] valid_0's multi_logloss: 0.165354
[126] valid_0's multi_logloss: 0.16529
[127] valid_0's multi_logloss: 0.165262
[128] valid_0's multi_logloss: 0.165192
[129] valid_0's multi_logloss: 0.165145
[130] valid_0's multi_logloss: 0.165087
[131] valid_0's multi_logloss: 0.165057
[132] valid_0's multi_logloss: 0.165037
[133] valid_0's multi_logloss: 0.165019
[134] valid_0's multi_logloss: 0.164994
[135] valid_0's multi_logloss: 0.16499
[136] valid_0's multi_logloss: 0.164964
[137] valid_0's multi_logloss: 0.164927
[138] valid_0's multi_logloss: 0.164865
[139] valid_0's multi_logloss: 0.164844
[140] valid_0's multi_logloss: 0.164841
[141] valid_0's multi_logloss: 0.164825
[142] valid_0's multi_logloss: 0.164826
[143] valid_0's multi_logloss: 0.164768
[144] valid_0's multi_logloss: 0.164733
[145] valid_0's multi_logloss: 0.164709
[146] valid_0's multi_logloss: 0.164699
[147] valid_0's multi_logloss: 0.164654
[148] valid_0's multi_logloss: 0.164605
[149] valid_0's multi_logloss: 0.164593
[150] valid_0's multi_logloss: 0.164566
[151] valid_0's multi_logloss: 0.164526
[152] valid_0's multi_logloss: 0.164456
[153] valid_0's multi_logloss: 0.16446
[154] valid_0's multi_logloss: 0.164389
[155] valid_0's multi_logloss: 0.164354
[156] valid_0's multi_logloss: 0.164278
[157] valid_0's multi_logloss: 0.164207
[158] valid_0's multi_logloss: 0.164227
[159] valid_0's multi_logloss: 0.164233
[160] valid_0's multi_logloss: 0.164213
[161] valid_0's multi_logloss: 0.1642
[162] valid_0's multi_logloss: 0.164171
[163] valid_0's multi_logloss: 0.164187
[164] valid_0's multi_logloss: 0.1642
[165] valid_0's multi_logloss: 0.164176
[166] valid_0's multi_logloss: 0.164173
[167] valid_0's multi_logloss: 0.164186
[168] valid_0's multi_logloss: 0.164186
[169] valid_0's multi_logloss: 0.164177
[170] valid_0's multi_logloss: 0.164175
[171] valid_0's multi_logloss: 0.164156
[172] valid_0's multi_logloss: 0.164137
[173] valid_0's multi_logloss: 0.164095
[174] valid_0's multi_logloss: 0.164113
[175] valid_0's multi_logloss: 0.164103
[176] valid_0's multi_logloss: 0.164105
[177] valid_0's multi_logloss: 0.164108
[178] valid_0's multi_logloss: 0.16408
[179] valid_0's multi_logloss: 0.164036
[180] valid_0's multi_logloss: 0.163974
[181] valid_0's multi_logloss: 0.16396
[182] valid_0's multi_logloss: 0.163971
[183] valid_0's multi_logloss: 0.163926
[184] valid_0's multi_logloss: 0.163898
[185] valid_0's multi_logloss: 0.163904
[186] valid_0's multi_logloss: 0.163892
[187] valid_0's multi_logloss: 0.163876
[188] valid_0's multi_logloss: 0.163908
[189] valid_0's multi_logloss: 0.163908
[190] valid_0's multi_logloss: 0.163926
[191] valid_0's multi_logloss: 0.163874
[192] valid_0's multi_logloss: 0.163888
[193] valid_0's multi_logloss: 0.163868
[194] valid_0's multi_logloss: 0.16386
[195] valid_0's multi_logloss: 0.163868
[196] valid_0's multi_logloss: 0.163833
[197] valid_0's multi_logloss: 0.163797
[198] valid_0's multi_logloss: 0.163793
[199] valid_0's multi_logloss: 0.163779
[200] valid_0's multi_logloss: 0.163745
[201] valid_0's multi_logloss: 0.163764
[202] valid_0's multi_logloss: 0.163746
[203] valid_0's multi_logloss: 0.163752
[204] valid_0's multi_logloss: 0.163742
[205] valid_0's multi_logloss: 0.163741
[206] valid_0's multi_logloss: 0.163758
[207] valid_0's multi_logloss: 0.163756
[208] valid_0's multi_logloss: 0.163735
[209] valid_0's multi_logloss: 0.163746
[210] valid_0's multi_logloss: 0.163717
[211] valid_0's multi_logloss: 0.163694
[212] valid_0's multi_logloss: 0.163673
[213] valid_0's multi_logloss: 0.163678
[214] valid_0's multi_logloss: 0.163684
[215] valid_0's multi_logloss: 0.163705
[216] valid_0's multi_logloss: 0.163688
[217] valid_0's multi_logloss: 0.16368
[218] valid_0's multi_logloss: 0.16369
[219] valid_0's multi_logloss: 0.163681
[220] valid_0's multi_logloss: 0.163677
[221] valid_0's multi_logloss: 0.163664
[222] valid_0's multi_logloss: 0.163661
[223] valid_0's multi_logloss: 0.163628
[224] valid_0's multi_logloss: 0.163597
[225] valid_0's multi_logloss: 0.163598
[226] valid_0's multi_logloss: 0.163607
[227] valid_0's multi_logloss: 0.163606
[228] valid_0's multi_logloss: 0.163635
[229] valid_0's multi_logloss: 0.163651
[230] valid_0's multi_logloss: 0.163644
[231] valid_0's multi_logloss: 0.163639
[232] valid_0's multi_logloss: 0.163635
[233] valid_0's multi_logloss: 0.16366
[234] valid_0's multi_logloss: 0.163668
Early stopping, best iteration is:
[224] valid_0's multi_logloss: 0.163597
training model for CV #5
[1] valid_0's multi_logloss: 0.889301
Training until validation scores don't improve for 10 rounds
[2] valid_0's multi_logloss: 0.809186
[3] valid_0's multi_logloss: 0.736954
[4] valid_0's multi_logloss: 0.67563
[5] valid_0's multi_logloss: 0.622468
[6] valid_0's multi_logloss: 0.581094
[7] valid_0's multi_logloss: 0.541842
[8] valid_0's multi_logloss: 0.509582
[9] valid_0's multi_logloss: 0.482569
[10] valid_0's multi_logloss: 0.4531
[11] valid_0's multi_logloss: 0.427371
[12] valid_0's multi_logloss: 0.404198
[13] valid_0's multi_logloss: 0.383482
[14] valid_0's multi_logloss: 0.364686
[15] valid_0's multi_logloss: 0.348147
[16] valid_0's multi_logloss: 0.334011
[17] valid_0's multi_logloss: 0.320785
[18] valid_0's multi_logloss: 0.30852
[19] valid_0's multi_logloss: 0.297246
[20] valid_0's multi_logloss: 0.287348
[21] valid_0's multi_logloss: 0.277689
[22] valid_0's multi_logloss: 0.269092
[23] valid_0's multi_logloss: 0.26126
[24] valid_0's multi_logloss: 0.254451
[25] valid_0's multi_logloss: 0.248264
[26] valid_0's multi_logloss: 0.243645
[27] valid_0's multi_logloss: 0.23789
[28] valid_0's multi_logloss: 0.232534
[29] valid_0's multi_logloss: 0.227853
[30] valid_0's multi_logloss: 0.223361
[31] valid_0's multi_logloss: 0.22008
[32] valid_0's multi_logloss: 0.216581
[33] valid_0's multi_logloss: 0.213039
[34] valid_0's multi_logloss: 0.209877
[35] valid_0's multi_logloss: 0.206866
[36] valid_0's multi_logloss: 0.204435
[37] valid_0's multi_logloss: 0.201927
[38] valid_0's multi_logloss: 0.199452
[39] valid_0's multi_logloss: 0.197402
[40] valid_0's multi_logloss: 0.19534
[41] valid_0's multi_logloss: 0.19337
[42] valid_0's multi_logloss: 0.191749
[43] valid_0's multi_logloss: 0.190056
[44] valid_0's multi_logloss: 0.188624
[45] valid_0's multi_logloss: 0.187468
[46] valid_0's multi_logloss: 0.186104
[47] valid_0's multi_logloss: 0.184786
[48] valid_0's multi_logloss: 0.183777
[49] valid_0's multi_logloss: 0.182641
[50] valid_0's multi_logloss: 0.18162
[51] valid_0's multi_logloss: 0.180622
[52] valid_0's multi_logloss: 0.179904
[53] valid_0's multi_logloss: 0.179078
[54] valid_0's multi_logloss: 0.178425
[55] valid_0's multi_logloss: 0.177712
[56] valid_0's multi_logloss: 0.177134
[57] valid_0's multi_logloss: 0.176609
[58] valid_0's multi_logloss: 0.176058
[59] valid_0's multi_logloss: 0.175442
[60] valid_0's multi_logloss: 0.174972
[61] valid_0's multi_logloss: 0.174526
[62] valid_0's multi_logloss: 0.17405
[63] valid_0's multi_logloss: 0.173614
[64] valid_0's multi_logloss: 0.173127
[65] valid_0's multi_logloss: 0.172728
[66] valid_0's multi_logloss: 0.172312
[67] valid_0's multi_logloss: 0.17187
[68] valid_0's multi_logloss: 0.171479
[69] valid_0's multi_logloss: 0.171167
[70] valid_0's multi_logloss: 0.170794
[71] valid_0's multi_logloss: 0.170392
[72] valid_0's multi_logloss: 0.170082
[73] valid_0's multi_logloss: 0.169806
[74] valid_0's multi_logloss: 0.169614
[75] valid_0's multi_logloss: 0.169286
[76] valid_0's multi_logloss: 0.168994
[77] valid_0's multi_logloss: 0.168701
[78] valid_0's multi_logloss: 0.168551
[79] valid_0's multi_logloss: 0.16838
[80] valid_0's multi_logloss: 0.16818
[81] valid_0's multi_logloss: 0.167997
[82] valid_0's multi_logloss: 0.167787
[83] valid_0's multi_logloss: 0.167625
[84] valid_0's multi_logloss: 0.167457
[85] valid_0's multi_logloss: 0.167309
[86] valid_0's multi_logloss: 0.167155
[87] valid_0's multi_logloss: 0.166982
[88] valid_0's multi_logloss: 0.166867
[89] valid_0's multi_logloss: 0.167041
[90] valid_0's multi_logloss: 0.166743
[91] valid_0's multi_logloss: 0.166582
[92] valid_0's multi_logloss: 0.166462
[93] valid_0's multi_logloss: 0.166272
[94] valid_0's multi_logloss: 0.166177
[95] valid_0's multi_logloss: 0.16605
[96] valid_0's multi_logloss: 0.165935
[97] valid_0's multi_logloss: 0.165868
[98] valid_0's multi_logloss: 0.165753
[99] valid_0's multi_logloss: 0.165654
[100] valid_0's multi_logloss: 0.165534
[101] valid_0's multi_logloss: 0.165444
[102] valid_0's multi_logloss: 0.165319
[103] valid_0's multi_logloss: 0.16522
[104] valid_0's multi_logloss: 0.165149
[105] valid_0's multi_logloss: 0.165096
[106] valid_0's multi_logloss: 0.165002
[107] valid_0's multi_logloss: 0.164923
[108] valid_0's multi_logloss: 0.164866
[109] valid_0's multi_logloss: 0.16479
[110] valid_0's multi_logloss: 0.164734
[111] valid_0's multi_logloss: 0.16486
[112] valid_0's multi_logloss: 0.164721
[113] valid_0's multi_logloss: 0.164593
[114] valid_0's multi_logloss: 0.164935
[115] valid_0's multi_logloss: 0.164825
[116] valid_0's multi_logloss: 0.164531
[117] valid_0's multi_logloss: 0.164465
[118] valid_0's multi_logloss: 0.164425
[119] valid_0's multi_logloss: 0.164327
[120] valid_0's multi_logloss: 0.16428
[121] valid_0's multi_logloss: 0.164234
[122] valid_0's multi_logloss: 0.164206
[123] valid_0's multi_logloss: 0.164153
[124] valid_0's multi_logloss: 0.164138
[125] valid_0's multi_logloss: 0.16408
[126] valid_0's multi_logloss: 0.164009
[127] valid_0's multi_logloss: 0.164
[128] valid_0's multi_logloss: 0.163976
[129] valid_0's multi_logloss: 0.163996
[130] valid_0's multi_logloss: 0.163968
[131] valid_0's multi_logloss: 0.163916
[132] valid_0's multi_logloss: 0.163788
[133] valid_0's multi_logloss: 0.163761
[134] valid_0's multi_logloss: 0.163759
[135] valid_0's multi_logloss: 0.163752
[136] valid_0's multi_logloss: 0.163708
[137] valid_0's multi_logloss: 0.163663
[138] valid_0's multi_logloss: 0.16362
[139] valid_0's multi_logloss: 0.163606
[140] valid_0's multi_logloss: 0.163561
[141] valid_0's multi_logloss: 0.163539
[142] valid_0's multi_logloss: 0.163524
[143] valid_0's multi_logloss: 0.163465
[144] valid_0's multi_logloss: 0.163464
[145] valid_0's multi_logloss: 0.16346
[146] valid_0's multi_logloss: 0.1634
[147] valid_0's multi_logloss: 0.163362
[148] valid_0's multi_logloss: 0.163349
[149] valid_0's multi_logloss: 0.163254
[150] valid_0's multi_logloss: 0.163215
[151] valid_0's multi_logloss: 0.163189
[152] valid_0's multi_logloss: 0.163152
[153] valid_0's multi_logloss: 0.163151
[154] valid_0's multi_logloss: 0.163078
[155] valid_0's multi_logloss: 0.163018
[156] valid_0's multi_logloss: 0.163014
[157] valid_0's multi_logloss: 0.162995
[158] valid_0's multi_logloss: 0.162981
[159] valid_0's multi_logloss: 0.162968
[160] valid_0's multi_logloss: 0.16294
[161] valid_0's multi_logloss: 0.162916
[162] valid_0's multi_logloss: 0.162923
[163] valid_0's multi_logloss: 0.162934
[164] valid_0's multi_logloss: 0.162908
[165] valid_0's multi_logloss: 0.162915
[166] valid_0's multi_logloss: 0.162939
[167] valid_0's multi_logloss: 0.162942
[168] valid_0's multi_logloss: 0.162923
[169] valid_0's multi_logloss: 0.162937
[170] valid_0's multi_logloss: 0.162955
[171] valid_0's multi_logloss: 0.162952
[172] valid_0's multi_logloss: 0.162952
[173] valid_0's multi_logloss: 0.162936
[174] valid_0's multi_logloss: 0.162936
Early stopping, best iteration is:
[164] valid_0's multi_logloss: 0.162908
print(f'{accuracy_score(y, np.argmax(p_val, axis=1)) * 100:.4f}%')
93.1306%
print(p_val.shape, p_tst.shape)
(320000, 3) (80000, 3)
np.savetxt(p_val_file, p_val, fmt='%.6f', delimiter=',')
np.savetxt(p_tst_file, p_tst, fmt='%.6f', delimiter=',')
피처 중요도 시각화¶
imp = pd.DataFrame({'feature': df.columns, 'importance': clf.feature_importances_})
imp = imp.sort_values('importance').set_index('feature')
imp.plot(kind='barh')
<matplotlib.axes._subplots.AxesSubplot at 0x7fdd61b17650>
제출 파일 생성¶
sub = pd.read_csv(sample_file, index_col=0)
print(sub.shape)
sub.head()
(80000, 1)
class | |
---|---|
id | |
320000 | 0 |
320001 | 0 |
320002 | 0 |
320003 | 0 |
320004 | 0 |
sub[target_col] = np.argmax(p_tst, axis=1)
sub.head()
class | |
---|---|
id | |
320000 | 2 |
320001 | 0 |
320002 | 2 |
320003 | 0 |
320004 | 2 |
sub[target_col].value_counts()
2 41170
0 29971
1 8859
Name: class, dtype: int64
sub.to_csv(sub_file)