Compare commits
1048 Commits
release_0.
...
v1.2.0rc2
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0089a0e6bf | ||
|
|
03a68a1714 | ||
|
|
a0da8a7e0a | ||
|
|
eee4eff49b | ||
|
|
936a854baa | ||
|
|
7856da5827 | ||
|
|
50a0def6c3 | ||
|
|
9116a0ec10 | ||
|
|
71197d1dfa | ||
|
|
5a2dcd1c33 | ||
|
|
bf2990e773 | ||
|
|
5f3c811e84 | ||
|
|
3fcfaad577 | ||
|
|
3b88bc948f | ||
|
|
70903c872f | ||
|
|
d268a2a463 | ||
|
|
fa3715f584 | ||
|
|
e4a273e1da | ||
|
|
071e10c1d1 | ||
|
|
f5fdcbe194 | ||
|
|
18349a7ccf | ||
|
|
75b8c22b0b | ||
|
|
5879acde9a | ||
|
|
8943eb4314 | ||
|
|
6347fa1c2e | ||
|
|
ace7fd328b | ||
|
|
40361043ae | ||
|
|
12110c900e | ||
|
|
487ab0ce73 | ||
|
|
a4de2f68e4 | ||
|
|
fbfbd525d8 | ||
|
|
4af857f95d | ||
|
|
bc1d3ee230 | ||
|
|
30363d9c35 | ||
|
|
66cc1e02aa | ||
|
|
627cf41a60 | ||
|
|
9b688aca3b | ||
|
|
8d7702766a | ||
|
|
03fb98fbde | ||
|
|
8b1afce316 | ||
|
|
b3d2e7644a | ||
|
|
ac9136ee49 | ||
|
|
6c0c87216f | ||
|
|
71b0528a2f | ||
|
|
7c2686146e | ||
|
|
e471056ec4 | ||
|
|
730866a7bc | ||
|
|
029a8b533f | ||
|
|
7aee0e51ed | ||
|
|
3c40f4a7f5 | ||
|
|
3cae287dea | ||
|
|
970b4b3fa2 | ||
|
|
e0c179c7cc | ||
|
|
dd445af56e | ||
|
|
9f85e92602 | ||
|
|
23e2c6ec91 | ||
|
|
1813804e36 | ||
|
|
0d411b0397 | ||
|
|
22a31b1faa | ||
|
|
06320729d4 | ||
|
|
d0a29c3135 | ||
|
|
a3ec964346 | ||
|
|
048d969be4 | ||
|
|
ac3f0e78dc | ||
|
|
93c44a9a64 | ||
|
|
4b0852ee41 | ||
|
|
0f17e35bce | ||
|
|
efe3e48ae2 | ||
|
|
1a0801238e | ||
|
|
4d277d750d | ||
|
|
eb067c1c34 | ||
|
|
90a9c68874 | ||
|
|
47c89775d6 | ||
|
|
95f11ed27e | ||
|
|
8234091368 | ||
|
|
dcff96ed27 | ||
|
|
8104f10328 | ||
|
|
26143ad0b1 | ||
|
|
c4d721200a | ||
|
|
a6d9a06b7b | ||
|
|
a67bc64819 | ||
|
|
abdf894fcf | ||
|
|
38ee514787 | ||
|
|
7c3a168ffd | ||
|
|
e8ecafb8dc | ||
|
|
b47b5ac771 | ||
|
|
02884b08aa | ||
|
|
ae18a094b0 | ||
|
|
d39da42e69 | ||
|
|
529b5c2cfd | ||
|
|
1bcbe1fc14 | ||
|
|
1fa84b61c1 | ||
|
|
306e38ff31 | ||
|
|
3028fa6b42 | ||
|
|
c35be9dc40 | ||
|
|
cb7f7e542c | ||
|
|
c96e1ef283 | ||
|
|
1d22a9be1c | ||
|
|
d087a12b04 | ||
|
|
b5ab009c19 | ||
|
|
cacff9232a | ||
|
|
bd9d57f579 | ||
|
|
359023c0fa | ||
|
|
cfc23c6a6b | ||
|
|
d3a0efbf16 | ||
|
|
cd3d14ad0e | ||
|
|
e49607af19 | ||
|
|
e533908922 | ||
|
|
0be0e6fd88 | ||
|
|
b77e3e3fcc | ||
|
|
325156c7a9 | ||
|
|
d19cec70f1 | ||
|
|
267c1ed784 | ||
|
|
073b625bde | ||
|
|
9e1b29944e | ||
|
|
057c762ecd | ||
|
|
251dc8a663 | ||
|
|
f779980f7e | ||
|
|
ca0d605b34 | ||
|
|
35e2205256 | ||
|
|
91c646392d | ||
|
|
fdbb6ae856 | ||
|
|
75a0025a3d | ||
|
|
78b4e95f25 | ||
|
|
e3aa7f1441 | ||
|
|
f145241593 | ||
|
|
8438c7d0e4 | ||
|
|
e35ad8a074 | ||
|
|
1ba24a7597 | ||
|
|
f656ef2fed | ||
|
|
5af8161a1a | ||
|
|
646def51e0 | ||
|
|
60511a3222 | ||
|
|
dd01e4ba8d | ||
|
|
e21a608552 | ||
|
|
7903286961 | ||
|
|
dd9aeb60ae | ||
|
|
83981a9ce3 | ||
|
|
535479e69f | ||
|
|
2c1a439869 | ||
|
|
4e64e2ef8e | ||
|
|
9ad40901a8 | ||
|
|
fcf57823b6 | ||
|
|
9910265064 | ||
|
|
21ed1f0c6d | ||
|
|
eaf2a00b5c | ||
|
|
67d267f9da | ||
|
|
33e052b1e5 | ||
|
|
8de7f1928e | ||
|
|
b9649e7b8e | ||
|
|
dfcdfabf1f | ||
|
|
c90457f489 | ||
|
|
7d93932423 | ||
|
|
8dfe7b3686 | ||
|
|
4fd95272c8 | ||
|
|
474cfddf91 | ||
|
|
a23de1c108 | ||
|
|
f68155de6c | ||
|
|
e726dd9902 | ||
|
|
ef26bc45bf | ||
|
|
660be66207 | ||
|
|
92913aaf7f | ||
|
|
e4f5b6c84f | ||
|
|
f27b6f9ba6 | ||
|
|
c355ab65ed | ||
|
|
564b22cee5 | ||
|
|
a734f52807 | ||
|
|
73142041b9 | ||
|
|
9c1103e06c | ||
|
|
fcbedcedf8 | ||
|
|
29a4cfe400 | ||
|
|
397d8f0ee7 | ||
|
|
b809f5d8b8 | ||
|
|
ccd30e4491 | ||
|
|
b2827a80e1 | ||
|
|
d6d1035950 | ||
|
|
e1f22baf8c | ||
|
|
c245eb8755 | ||
|
|
93df871c8c | ||
|
|
c69a19e2b1 | ||
|
|
cfee9fae91 | ||
|
|
bb29ce2818 | ||
|
|
a2d86b8e4b | ||
|
|
e268fb0093 | ||
|
|
6a169cd41a | ||
|
|
468b1594d3 | ||
|
|
0676a19e70 | ||
|
|
ec02f40d42 | ||
|
|
8b04736b81 | ||
|
|
ca4e05660e | ||
|
|
a2f54963b6 | ||
|
|
1b1969f20d | ||
|
|
2809fb8b6f | ||
|
|
c90119eb67 | ||
|
|
88b64c8162 | ||
|
|
04f69b43e6 | ||
|
|
449ab79e0c | ||
|
|
b56c902841 | ||
|
|
a3db79df22 | ||
|
|
093e2227e3 | ||
|
|
4a0c8ef237 | ||
|
|
1334aca437 | ||
|
|
866a477319 | ||
|
|
bd653fad4c | ||
|
|
7d52c0b8c2 | ||
|
|
dc2950fd90 | ||
|
|
6671b42dd4 | ||
|
|
ad826e913f | ||
|
|
8bc595ea1e | ||
|
|
a1085396e2 | ||
|
|
9097e8f0d9 | ||
|
|
c362125d7b | ||
|
|
0012f2ef93 | ||
|
|
30e94ddd04 | ||
|
|
15800107ad | ||
|
|
a9313802ea | ||
|
|
5fc5ec539d | ||
|
|
939973630d | ||
|
|
86beb68ce8 | ||
|
|
459b175dc6 | ||
|
|
c218d8ffbf | ||
|
|
d0b86c75d9 | ||
|
|
29c6ad943a | ||
|
|
e86030c360 | ||
|
|
babcb996e7 | ||
|
|
15f40e51e9 | ||
|
|
6601a641d7 | ||
|
|
7f980e9f83 | ||
|
|
27a8e36fc3 | ||
|
|
13b10a6370 | ||
|
|
780de49ddb | ||
|
|
4942da64ae | ||
|
|
7146b91d5a | ||
|
|
dcf439932a | ||
|
|
1de36cdf1e | ||
|
|
d2231fc840 | ||
|
|
cd7d6f7d59 | ||
|
|
4b7e2b7bff | ||
|
|
abca9908ba | ||
|
|
3cf665d3ec | ||
|
|
760d5d0c3c | ||
|
|
8ca06ab329 | ||
|
|
b51124c158 | ||
|
|
761a5dbdfc | ||
|
|
21b671aa06 | ||
|
|
668e432e2d | ||
|
|
fc88105620 | ||
|
|
ab7a46a1a4 | ||
|
|
b745b7acce | ||
|
|
bb8c8df39d | ||
|
|
45a97ddf32 | ||
|
|
3ad4333b0e | ||
|
|
7a99f8f27f | ||
|
|
a931589c96 | ||
|
|
a38e7bd19c | ||
|
|
0dd97c206b | ||
|
|
1ba6706167 | ||
|
|
8d06878bf9 | ||
|
|
9775da02d9 | ||
|
|
5dc8e894c9 | ||
|
|
71a8b8c65a | ||
|
|
1b97eaf7a7 | ||
|
|
b81f8cbbc0 | ||
|
|
2ba8c13b69 | ||
|
|
9a5efffebe | ||
|
|
2d76d40dfd | ||
|
|
a461a9a90a | ||
|
|
0fd455e162 | ||
|
|
f2b8cd2922 | ||
|
|
e0509b3307 | ||
|
|
b0ed3f0a66 | ||
|
|
655cf17b60 | ||
|
|
70a91ec3ba | ||
|
|
cfae247231 | ||
|
|
d6b31df449 | ||
|
|
7ac7e8778f | ||
|
|
8aa8ef1031 | ||
|
|
bc96ceb8b2 | ||
|
|
b2b2c4e231 | ||
|
|
9f77c18b0d | ||
|
|
0110754a76 | ||
|
|
e433a379e4 | ||
|
|
7e32af5c21 | ||
|
|
ed2465cce4 | ||
|
|
8ca9744b07 | ||
|
|
c35cdecddd | ||
|
|
24ad9dec0b | ||
|
|
911a902835 | ||
|
|
29eeea709a | ||
|
|
2e0067e790 | ||
|
|
94828a7c0c | ||
|
|
84e395d91e | ||
|
|
595a00466d | ||
|
|
e4b74c4d22 | ||
|
|
c74216f22c | ||
|
|
71e7e3b96f | ||
|
|
a5cc112eea | ||
|
|
ed0216642f | ||
|
|
856b81c727 | ||
|
|
d7b45fbcaf | ||
|
|
fa26313feb | ||
|
|
fe8d72b50b | ||
|
|
adc795929a | ||
|
|
472ded549d | ||
|
|
c67163250e | ||
|
|
4240daed4e | ||
|
|
cb3ed404cf | ||
|
|
f7105fa44f | ||
|
|
43974939f4 | ||
|
|
b513dcd352 | ||
|
|
ef19480eda | ||
|
|
0c7455276d | ||
|
|
1b3947d929 | ||
|
|
3eb1279bbf | ||
|
|
40680368cf | ||
|
|
44469a0ca9 | ||
|
|
b4f952bd22 | ||
|
|
aa9a68010b | ||
|
|
1891cc766d | ||
|
|
5d4c24a1fc | ||
|
|
9c56480c61 | ||
|
|
2a071cebc5 | ||
|
|
ff1342b252 | ||
|
|
e526871f0a | ||
|
|
5199b86126 | ||
|
|
808f61081b | ||
|
|
0184f2e9f7 | ||
|
|
a73e25e15f | ||
|
|
f100b8d878 | ||
|
|
7b65698187 | ||
|
|
8cbcc53ccb | ||
|
|
87ebfc1315 | ||
|
|
9559f81377 | ||
|
|
9049c7c653 | ||
|
|
ee287808fb | ||
|
|
77cfbff5a7 | ||
|
|
ebc86a3afa | ||
|
|
2b9a62a806 | ||
|
|
2d95b9a4b6 | ||
|
|
7b17e76c5b | ||
|
|
04db125699 | ||
|
|
018df6004e | ||
|
|
139ccc9902 | ||
|
|
d55489af14 | ||
|
|
6848d0426f | ||
|
|
61286c6e8f | ||
|
|
ee81ba8e1f | ||
|
|
9b0af6e882 | ||
|
|
f3d7877802 | ||
|
|
ced3660f60 | ||
|
|
298ebe68ac | ||
|
|
73b1bd2789 | ||
|
|
0202e04a8e | ||
|
|
1d0ca49761 | ||
|
|
a4b929385e | ||
|
|
c8bdb652c4 | ||
|
|
3d04a8cc97 | ||
|
|
b915788708 | ||
|
|
74f545bde3 | ||
|
|
e521bb6f83 | ||
|
|
37fdfa03f8 | ||
|
|
bc9d88259f | ||
|
|
27b3646d29 | ||
|
|
63ffd2f686 | ||
|
|
2fdb34ed2e | ||
|
|
3136185bc5 | ||
|
|
5aa007d7b2 | ||
|
|
ad4a1c732c | ||
|
|
208ab3b1ff | ||
|
|
c7cc657a4d | ||
|
|
e089e16e3d | ||
|
|
979f74d51a | ||
|
|
1cb6bcc382 | ||
|
|
b1789b0346 | ||
|
|
38763aa4fa | ||
|
|
608ebbe444 | ||
|
|
7ef5b78003 | ||
|
|
2dcb62ddfb | ||
|
|
64af1ecf86 | ||
|
|
f3d8536702 | ||
|
|
df9bdbbcb9 | ||
|
|
f5e13dcb9b | ||
|
|
f0ca53d9ec | ||
|
|
dcde433402 | ||
|
|
e3c34c79be | ||
|
|
f2277e7106 | ||
|
|
64f4361b47 | ||
|
|
761e938dbe | ||
|
|
b9dbfe0931 | ||
|
|
9f52e834dc | ||
|
|
d667ea9335 | ||
|
|
04c640f562 | ||
|
|
a4f5c86276 | ||
|
|
4d2779663e | ||
|
|
98b051269b | ||
|
|
e67388fb8f | ||
|
|
0afcc55d98 | ||
|
|
97abcc7ee2 | ||
|
|
886bf93ba4 | ||
|
|
2abe69d774 | ||
|
|
f4e7b707c9 | ||
|
|
1733c9e8f7 | ||
|
|
374648c21a | ||
|
|
7663de956c | ||
|
|
807a244517 | ||
|
|
b29b8c2f34 | ||
|
|
a37691428f | ||
|
|
6fac40cfb4 | ||
|
|
755a606201 | ||
|
|
6ec7e300bd | ||
|
|
96cd7ec2bb | ||
|
|
da6e74f7bb | ||
|
|
ac457c56a2 | ||
|
|
f24be2efb4 | ||
|
|
5b1715d97c | ||
|
|
310fe60b35 | ||
|
|
5620322a48 | ||
|
|
7e477a2adb | ||
|
|
95295ce026 | ||
|
|
741fbf47c4 | ||
|
|
4771bb0d41 | ||
|
|
010b8f1428 | ||
|
|
86ed01c4bb | ||
|
|
31030a8d3a | ||
|
|
ae536756ae | ||
|
|
9fc681001a | ||
|
|
a78d4e7aa8 | ||
|
|
60748b2071 | ||
|
|
185e3f1916 | ||
|
|
7e72a12871 | ||
|
|
2ebdec8aa6 | ||
|
|
b61d534472 | ||
|
|
a9053aff83 | ||
|
|
0e0849fa1e | ||
|
|
3d46bd0fa5 | ||
|
|
05d4751540 | ||
|
|
08ff510e48 | ||
|
|
f7487e4c2a | ||
|
|
5b4f28cc46 | ||
|
|
4bbf062ed3 | ||
|
|
c2cce4fac3 | ||
|
|
6c9b6f11da | ||
|
|
aefb1e5c2f | ||
|
|
80977182c5 | ||
|
|
095de3bf5f | ||
|
|
4ab1df5fe6 | ||
|
|
7e24a8d245 | ||
|
|
d30e63a0a5 | ||
|
|
2fa8b359e0 | ||
|
|
82ee2317e8 | ||
|
|
b8433c455a | ||
|
|
562bb0ae31 | ||
|
|
0b89cd1dfa | ||
|
|
a40b72d127 | ||
|
|
c7416002e9 | ||
|
|
fc8c9b0521 | ||
|
|
277e25797b | ||
|
|
22209b7b95 | ||
|
|
57106a3459 | ||
|
|
006eb80578 | ||
|
|
d669ea1eaa | ||
|
|
0e0955a6d8 | ||
|
|
5374f52531 | ||
|
|
125bcec62e | ||
|
|
512f037e55 | ||
|
|
c89bcc4de5 | ||
|
|
6a5e805886 | ||
|
|
0fc7dcfe6c | ||
|
|
f90e7f9aa8 | ||
|
|
a5f232feb8 | ||
|
|
52d44e07fe | ||
|
|
c0fbeff0ab | ||
|
|
2aed0ae230 | ||
|
|
733ed24dd9 | ||
|
|
0184eb5d02 | ||
|
|
830e73901d | ||
|
|
516955564b | ||
|
|
38ab79f889 | ||
|
|
41227d1933 | ||
|
|
6e6216ad67 | ||
|
|
fba298fecb | ||
|
|
3fa2ceb193 | ||
|
|
9700776597 | ||
|
|
ab357dd41c | ||
|
|
c358d95c44 | ||
|
|
c81238b5c4 | ||
|
|
b9b57f2289 | ||
|
|
53d4272c2a | ||
|
|
7b5cbcc846 | ||
|
|
ef9af33a00 | ||
|
|
c0ffe65f5c | ||
|
|
c5b229632d | ||
|
|
198f3a6c4a | ||
|
|
19f9fd5de9 | ||
|
|
f22b1c0348 | ||
|
|
602484e19f | ||
|
|
3e2c472944 | ||
|
|
851b5b3808 | ||
|
|
2a4df8e29f | ||
|
|
9c469b3844 | ||
|
|
97eece6ea0 | ||
|
|
b68de018b8 | ||
|
|
4fe0d8203e | ||
|
|
6edddd7966 | ||
|
|
e930a8e54f | ||
|
|
cb9a80ca90 | ||
|
|
166def9f75 | ||
|
|
b43f08bea5 | ||
|
|
d2e1e4d5b4 | ||
|
|
9b9e298ff2 | ||
|
|
7b74b1b64d | ||
|
|
59bc1ef330 | ||
|
|
2758c5acea | ||
|
|
d5c386ae24 | ||
|
|
001aaaee5f | ||
|
|
d4e0a30582 | ||
|
|
f0064c07ab | ||
|
|
ad1192e8a3 | ||
|
|
b45258ce66 | ||
|
|
4ef6d216b9 | ||
|
|
8ac8fbef29 | ||
|
|
1595e3f57b | ||
|
|
01b0c9047c | ||
|
|
3c506b076e | ||
|
|
5544a730f1 | ||
|
|
6323ef94ad | ||
|
|
9975c533c7 | ||
|
|
2973416f2e | ||
|
|
61f764946f | ||
|
|
beb7b295a8 | ||
|
|
3e339d9557 | ||
|
|
7a388cbf8b | ||
|
|
cd1526d3b1 | ||
|
|
30204b50fe | ||
|
|
d333918f5e | ||
|
|
1aaf4a679d | ||
|
|
562d9ae963 | ||
|
|
d9a47794a5 | ||
|
|
b7a1f22d24 | ||
|
|
4df246191f | ||
|
|
96bf91725b | ||
|
|
4e9fad74eb | ||
|
|
986fee6022 | ||
|
|
45876bf41b | ||
|
|
a30176907f | ||
|
|
923e6c86ba | ||
|
|
63ec95623d | ||
|
|
4d6590be3c | ||
|
|
abffbe014e | ||
|
|
dd01f7c4f5 | ||
|
|
cd3a3f99da | ||
|
|
5b2f805e74 | ||
|
|
8bdf15120a | ||
|
|
fe2de6f415 | ||
|
|
1f98f18cb8 | ||
|
|
2cff735126 | ||
|
|
9fa29ad753 | ||
|
|
30e1cb4e9e | ||
|
|
77fc28427d | ||
|
|
9494950ee7 | ||
|
|
6125521caf | ||
|
|
fdf27a5b82 | ||
|
|
221e163185 | ||
|
|
0c50f8417a | ||
|
|
ae05948e32 | ||
|
|
570374effe | ||
|
|
e94f85f0e4 | ||
|
|
6757654337 | ||
|
|
ba1d848767 | ||
|
|
7ae11c9284 | ||
|
|
90f683b25b | ||
|
|
a22368d210 | ||
|
|
66f9951d70 | ||
|
|
c5719cc457 | ||
|
|
a2042b685a | ||
|
|
4591039eba | ||
|
|
4e9965cb9d | ||
|
|
2f1319f273 | ||
|
|
9683fd433e | ||
|
|
da21ac0cc2 | ||
|
|
59ae42a179 | ||
|
|
afa99e6d9d | ||
|
|
3f2fe25a32 | ||
|
|
23a10c8339 | ||
|
|
399fabed49 | ||
|
|
c2a3902ba3 | ||
|
|
ea44417754 | ||
|
|
fbbae3386a | ||
|
|
dd60fc23e6 | ||
|
|
b48f895027 | ||
|
|
fed665ae8a | ||
|
|
6e16900711 | ||
|
|
c589eff941 | ||
|
|
a3fedbeaa8 | ||
|
|
972f693eaf | ||
|
|
3f7e5d9c47 | ||
|
|
09b90d9329 | ||
|
|
55e645c5f5 | ||
|
|
8ddd2715ee | ||
|
|
0ce300e73a | ||
|
|
278562db13 | ||
|
|
5a567ec249 | ||
|
|
515f5f5c47 | ||
|
|
adcd8ea7c6 | ||
|
|
cf2400036e | ||
|
|
3e930e4f2d | ||
|
|
a9ec2dd295 | ||
|
|
df2cdaca50 | ||
|
|
c6f2a7e186 | ||
|
|
e7d17ec4f4 | ||
|
|
b5f7cbfadf | ||
|
|
be0f346ec9 | ||
|
|
d16d9a9988 | ||
|
|
6ff994126a | ||
|
|
18e4fc3690 | ||
|
|
8da4907e89 | ||
|
|
ade3f30237 | ||
|
|
b511638ca1 | ||
|
|
eabcc0e210 | ||
|
|
5de7e12704 | ||
|
|
8d1098a983 | ||
|
|
9252b686ae | ||
|
|
2be85fc62a | ||
|
|
feb6ae3e18 | ||
|
|
54980b8959 | ||
|
|
c1e4a0f2c6 | ||
|
|
bfddc2c42c | ||
|
|
17df5fd296 | ||
|
|
4c74336384 | ||
|
|
ba98e0cdf2 | ||
|
|
eaab364a63 | ||
|
|
797ba8e72d | ||
|
|
253fdd8a42 | ||
|
|
91c513a0c1 | ||
|
|
5e582b0fa7 | ||
|
|
146e83f3b3 | ||
|
|
5dfb27fb2d | ||
|
|
77c03538b0 | ||
|
|
37dc82c3ff | ||
|
|
ea850ecd20 | ||
|
|
995698b0cb | ||
|
|
2d875ec019 | ||
|
|
503cc42f48 | ||
|
|
2c61f02add | ||
|
|
bbe0dbd7ec | ||
|
|
5e97de6a41 | ||
|
|
65db8d0626 | ||
|
|
711397d645 | ||
|
|
207f058711 | ||
|
|
84d992babc | ||
|
|
be7bc07ca3 | ||
|
|
edae664afb | ||
|
|
f4521bf6aa | ||
|
|
3078b5944d | ||
|
|
a448a8320c | ||
|
|
956e73f183 | ||
|
|
5c2575535f | ||
|
|
81c1cd40ca | ||
|
|
b72eab3e07 | ||
|
|
360f25ec27 | ||
|
|
c7bc739ed2 | ||
|
|
60a9af567c | ||
|
|
9080bba815 | ||
|
|
2e052e74b6 | ||
|
|
1ca5698221 | ||
|
|
70be1e38c2 | ||
|
|
37c75aac41 | ||
|
|
82dca3c108 | ||
|
|
2f7087eba1 | ||
|
|
680a1b36f3 | ||
|
|
ad4de0d718 | ||
|
|
7ea5b772fb | ||
|
|
7aed8f3d48 | ||
|
|
8c8021dfa7 | ||
|
|
3f312e30db | ||
|
|
c85181dd8a | ||
|
|
6d5b34d824 | ||
|
|
5aa42b5f11 | ||
|
|
263e2038e9 | ||
|
|
b374e0a7ab | ||
|
|
45c89a6792 | ||
|
|
8eab966998 | ||
|
|
09bd9e68cf | ||
|
|
00465d243d | ||
|
|
7814183199 | ||
|
|
359ed9c5bc | ||
|
|
29a1356669 | ||
|
|
cf8d5b9b76 | ||
|
|
fdcae024e7 | ||
|
|
7b1b11390a | ||
|
|
5465b73e7c | ||
|
|
4352fcdb15 | ||
|
|
b833b642ec | ||
|
|
99a714be64 | ||
|
|
7b9043cf71 | ||
|
|
259fb809e9 | ||
|
|
a36c3ed4f4 | ||
|
|
6fb4c5efef | ||
|
|
4eeeded7d1 | ||
|
|
f83e62dca5 | ||
|
|
331cd3e4f7 | ||
|
|
617f572c0f | ||
|
|
20845e8ccf | ||
|
|
224786f67f | ||
|
|
9837b09b20 | ||
|
|
0944360416 | ||
|
|
ac3d03089b | ||
|
|
28bd6cde22 | ||
|
|
00ea7b83c9 | ||
|
|
67c38805a1 | ||
|
|
5f34078fba | ||
|
|
3f83dcd502 | ||
|
|
0c1d5f1120 | ||
|
|
92b7577c62 | ||
|
|
9fefa2128d | ||
|
|
7ea5675679 | ||
|
|
74009afcac | ||
|
|
1b7405f688 | ||
|
|
dc2add96c5 | ||
|
|
8e0a08fbcf | ||
|
|
54793544a2 | ||
|
|
2aaae2e7bb | ||
|
|
cecbe0cf71 | ||
|
|
c8c472f39a | ||
|
|
1dac5e2410 | ||
|
|
a985a99cf0 | ||
|
|
0ff84d950e | ||
|
|
60f05352c5 | ||
|
|
549c8d6ae9 | ||
|
|
e1240413c9 | ||
|
|
2e618af743 | ||
|
|
71a604fae3 | ||
|
|
1fe874e58a | ||
|
|
ff2d4c99fa | ||
|
|
754fe8142b | ||
|
|
37ddfd7d6e | ||
|
|
d506a8bc63 | ||
|
|
c18a3660fa | ||
|
|
3be1b9ae30 | ||
|
|
9b917cda4f | ||
|
|
99a290489c | ||
|
|
3320a52192 | ||
|
|
ba584e5e9f | ||
|
|
2a9b085bc8 | ||
|
|
f8ca2960fc | ||
|
|
05243642bb | ||
|
|
017c97b8ce | ||
|
|
325b16bccd | ||
|
|
ae3bb9c2d5 | ||
|
|
8905df4a18 | ||
|
|
1088dff42c | ||
|
|
7a652a8c64 | ||
|
|
59f868bc60 | ||
|
|
0d0ce32908 | ||
|
|
a60e224484 | ||
|
|
e0094d996e | ||
|
|
a1c35cadf0 | ||
|
|
4fac9874e0 | ||
|
|
301cef4638 | ||
|
|
1fc37e4749 | ||
|
|
0f8af85f64 | ||
|
|
5f151c5cf3 | ||
|
|
dade7c3aff | ||
|
|
773ddbcfcb | ||
|
|
e290ec9a80 | ||
|
|
6a569b8cd9 | ||
|
|
55bc149efb | ||
|
|
431c850c03 | ||
|
|
1f022929f4 | ||
|
|
f368d0de2b | ||
|
|
15fe2f1e7c | ||
|
|
be948df23f | ||
|
|
9897b5042f | ||
|
|
7735252925 | ||
|
|
85939c6a6e | ||
|
|
f75a21af25 | ||
|
|
84c99f86f4 | ||
|
|
c055a32609 | ||
|
|
c8c7b9649c | ||
|
|
a2dc929598 | ||
|
|
42bf90eb8f | ||
|
|
e0a279114e | ||
|
|
fd722d60cd | ||
|
|
53f695acf2 | ||
|
|
3d81c48d3f | ||
|
|
84a3af8dc0 | ||
|
|
4be5edaf92 | ||
|
|
93f9ce9ef9 | ||
|
|
9af6b689d6 | ||
|
|
4f26053b09 | ||
|
|
48dddfd635 | ||
|
|
a9d684db18 | ||
|
|
c5f92df475 | ||
|
|
c5130e487a | ||
|
|
9c4ff50e83 | ||
|
|
42cac4a30b | ||
|
|
f9302a56fb | ||
|
|
7d3149a21f | ||
|
|
86aac98e54 | ||
|
|
e9ab4a1c6c | ||
|
|
dc2bfbfde1 | ||
|
|
7ebe8dcf5b | ||
|
|
973fc8b1ff | ||
|
|
93f63324e6 | ||
|
|
aa48b7e903 | ||
|
|
0cd326c1bc | ||
|
|
3a150742c7 | ||
|
|
0a0d4239d3 | ||
|
|
fe999bf968 | ||
|
|
2ea0f887c1 | ||
|
|
c76d993681 | ||
|
|
a2a8954659 | ||
|
|
7af0946ac1 | ||
|
|
143475b27b | ||
|
|
926eb651fe | ||
|
|
daf77ca7b7 | ||
|
|
97984f4890 | ||
|
|
0ddb8a7661 | ||
|
|
d810e6dec9 | ||
|
|
be0bb7dd90 | ||
|
|
e38d5a6831 | ||
|
|
828d75714d | ||
|
|
ad6e0d55f1 | ||
|
|
19ee0a3579 | ||
|
|
2b045aa805 | ||
|
|
d9642cf757 | ||
|
|
1bf4083dc6 | ||
|
|
20d5abf919 | ||
|
|
f1275f52c1 | ||
|
|
1698fe64bb | ||
|
|
91cc14ea70 | ||
|
|
78ec77fa97 | ||
|
|
c22e90d5d2 | ||
|
|
6da462234e | ||
|
|
a650131fc3 | ||
|
|
91537e7353 | ||
|
|
e04ab56b57 | ||
|
|
ad68865d6b | ||
|
|
583c88bce7 | ||
|
|
2febc105a4 | ||
|
|
45d321da28 | ||
|
|
411df9f878 | ||
|
|
42200ec03e | ||
|
|
87f49995be | ||
|
|
e3c1afac6b | ||
|
|
d81fedb955 | ||
|
|
5fbe230636 | ||
|
|
d83c818000 | ||
|
|
2a59ff2f9b | ||
|
|
32de54fdee | ||
|
|
02130af47d | ||
|
|
4ae225a08d | ||
|
|
e26b5d63b2 | ||
|
|
abf2f661be | ||
|
|
55ee9a92a1 | ||
|
|
b38c636d05 | ||
|
|
4302fc4027 | ||
|
|
f00fd87b36 | ||
|
|
516457fadc | ||
|
|
184efff9f9 | ||
|
|
5d6baed998 | ||
|
|
1db28b8718 | ||
|
|
5480e05173 | ||
|
|
9504f411c1 | ||
|
|
ca33bf6476 | ||
|
|
133b8d94df | ||
|
|
11eaf3eed1 | ||
|
|
6d42e56c85 | ||
|
|
7a7269e983 | ||
|
|
ea99b53d8e | ||
|
|
10cd7c8447 | ||
|
|
813d2436d3 | ||
|
|
c23783a0d1 | ||
|
|
91903ac5d4 | ||
|
|
ae7e58b96e | ||
|
|
e0fd60f4e5 | ||
|
|
4b892c2b30 | ||
|
|
785094db53 | ||
|
|
9e73087324 | ||
|
|
34522d56f0 | ||
|
|
c6b5df67f6 | ||
|
|
efc4f85505 | ||
|
|
d594b11f35 | ||
|
|
87aca8c244 | ||
|
|
70d208d68c | ||
|
|
b50bc2c1d4 | ||
|
|
baef5741df | ||
|
|
5a7f7e7d49 | ||
|
|
0b7fd74138 | ||
|
|
51478a39c9 | ||
|
|
fbe9d41dd0 | ||
|
|
79d854c695 | ||
|
|
3b5a1f389a | ||
|
|
2405c59352 | ||
|
|
73140ce84c | ||
|
|
aa53e9fc8d | ||
|
|
9119f9e369 | ||
|
|
0f99cdfe0e | ||
|
|
20a9e716bd | ||
|
|
7bbb44182a | ||
|
|
9acd549dc7 | ||
|
|
42b108136f | ||
|
|
bd41bd6605 | ||
|
|
3209b42b07 | ||
|
|
7707982a85 | ||
|
|
ad3a0bbab8 | ||
|
|
d1e75d615e | ||
|
|
14a8b96476 | ||
|
|
3564b68b98 | ||
|
|
f606cb8ef4 | ||
|
|
beab6e08dd | ||
|
|
4b43810f51 | ||
|
|
5a8bbb39a1 | ||
|
|
8dac0d1009 | ||
|
|
308f664ade | ||
|
|
56e906a789 | ||
|
|
d176a0fbc8 | ||
|
|
190d888695 | ||
|
|
c87153ed32 | ||
|
|
9344f081a4 | ||
|
|
8f4acba34b | ||
|
|
9254c58e4d | ||
|
|
dee0b69674 | ||
|
|
86d88c0758 | ||
|
|
5b662cbe1c | ||
|
|
10c31ab2cb | ||
|
|
7b1427f926 | ||
|
|
72cd1517d6 | ||
|
|
58d783df16 | ||
|
|
78bea0d204 | ||
|
|
7ef2b599c7 | ||
|
|
686e990ffc | ||
|
|
60787ecebc | ||
|
|
3261002099 | ||
|
|
cb4de521c1 | ||
|
|
4ed8a88240 | ||
|
|
4912c1f9c6 | ||
|
|
57f3c2f252 | ||
|
|
24a268a2e3 | ||
|
|
b13c3a8bcc | ||
|
|
cf2d86a4f6 | ||
|
|
983cb0b374 | ||
|
|
993e62b9e7 | ||
|
|
b53a5a262c | ||
|
|
ac7fc1306b | ||
|
|
caf4a756bf | ||
|
|
7c82dc92b2 | ||
|
|
725f4c36f2 | ||
|
|
73bd590a1d | ||
|
|
9265964ee7 | ||
|
|
2c502784ff | ||
|
|
2b7a1c5780 | ||
|
|
ce0f0568a6 | ||
|
|
6288f6d563 | ||
|
|
96826a3515 | ||
|
|
06ef4db4cc | ||
|
|
645996b12f | ||
|
|
0b607fb884 | ||
|
|
4202332783 | ||
|
|
7300002516 | ||
|
|
9c647d8130 | ||
|
|
2e7c3a0ed5 | ||
|
|
aa4ee6a0e4 | ||
|
|
bad76048d1 | ||
|
|
bbb771f32e | ||
|
|
3c72654e3b | ||
|
|
e3e776bd58 | ||
|
|
1c08b3b2ea | ||
|
|
246ec92163 | ||
|
|
55caad6e49 | ||
|
|
69454d9487 | ||
|
|
44811f2330 | ||
|
|
109473dae2 | ||
|
|
8c633d1ca3 | ||
|
|
4a429a7c4f | ||
|
|
7fefd6865d | ||
|
|
31d1baba3d | ||
|
|
34dc9155ab | ||
|
|
70026655b0 | ||
|
|
437b368b1f | ||
|
|
6cf97b4eae | ||
|
|
860263f814 | ||
|
|
b546321c83 | ||
|
|
3b62e75f2e | ||
|
|
dd07c25d12 | ||
|
|
2bb9b9d3db | ||
|
|
b5178d3d99 | ||
|
|
5850a2558a | ||
|
|
8973f2cb0e | ||
|
|
3363b9142e | ||
|
|
07ff52d54c | ||
|
|
b5fad42da2 | ||
|
|
8a5209c55e | ||
|
|
cc6a5a3666 | ||
|
|
e2f09db77a | ||
|
|
a725272e19 | ||
|
|
e9a97e0d88 | ||
|
|
a1505de631 | ||
|
|
a393d44c5d | ||
|
|
8e90b60c4d | ||
|
|
05b089405d | ||
|
|
c004cea788 | ||
|
|
b6dcbf0e07 | ||
|
|
0f145a0365 | ||
|
|
1b59316444 | ||
|
|
a13e29ece1 | ||
|
|
2f8764955c | ||
|
|
2200939416 | ||
|
|
a6331925d2 | ||
|
|
b40959042c | ||
|
|
6bed54ac39 | ||
|
|
cb017d0c9a | ||
|
|
aa90e5c6ce | ||
|
|
66e74d2223 | ||
|
|
48d6e68690 | ||
|
|
45bf4fbffb | ||
|
|
01aff45f26 | ||
|
|
e62639c59b | ||
|
|
aec6299c49 | ||
|
|
295252249e | ||
|
|
0cf88d036f | ||
|
|
18813a26ab | ||
|
|
594bcea83e | ||
|
|
24fde92660 | ||
|
|
30d10ab035 | ||
|
|
8bec8d5e9a | ||
|
|
12e34f32e2 | ||
|
|
64b8cffde3 | ||
|
|
cafc621914 | ||
|
|
e2743548ed | ||
|
|
a0a1df1aba | ||
|
|
0988fb191f | ||
|
|
5cd851ccef | ||
|
|
d062c6f61b | ||
|
|
9ac163d0bb | ||
|
|
eecf341ea7 | ||
|
|
0e78034607 | ||
|
|
2c4359e914 | ||
|
|
e6696337e4 | ||
|
|
578a0c7ddb | ||
|
|
34e3edfb1a | ||
|
|
902ecbade8 | ||
|
|
a96039141a | ||
|
|
286dccb8e8 | ||
|
|
3f7696ff53 | ||
|
|
bd01acdfbc | ||
|
|
f66731181f | ||
|
|
1214081f99 |
41
.clang-tidy
41
.clang-tidy
@@ -1,22 +1,21 @@
|
|||||||
Checks: 'modernize-*,-modernize-make-*,-modernize-raw-string-literal,google-*,-google-default-arguments,-clang-diagnostic-#pragma-messages,readability-identifier-naming'
|
Checks: 'modernize-*,-modernize-make-*,-modernize-use-auto,-modernize-raw-string-literal,-modernize-avoid-c-arrays,-modernize-use-trailing-return-type,google-*,-google-default-arguments,-clang-diagnostic-#pragma-messages,readability-identifier-naming'
|
||||||
CheckOptions:
|
CheckOptions:
|
||||||
- { key: readability-identifier-naming.ClassCase, value: CamelCase }
|
- { key: readability-identifier-naming.ClassCase, value: CamelCase }
|
||||||
- { key: readability-identifier-naming.StructCase, value: CamelCase }
|
- { key: readability-identifier-naming.StructCase, value: CamelCase }
|
||||||
- { key: readability-identifier-naming.TypeAliasCase, value: CamelCase }
|
- { key: readability-identifier-naming.TypeAliasCase, value: CamelCase }
|
||||||
- { key: readability-identifier-naming.TypedefCase, value: CamelCase }
|
- { key: readability-identifier-naming.TypedefCase, value: CamelCase }
|
||||||
- { key: readability-identifier-naming.TypeTemplateParameterCase, value: CamelCase }
|
- { key: readability-identifier-naming.TypeTemplateParameterCase, value: CamelCase }
|
||||||
- { key: readability-identifier-naming.LocalVariableCase, value: lower_case }
|
- { key: readability-identifier-naming.MemberCase, value: lower_case }
|
||||||
- { key: readability-identifier-naming.MemberCase, value: lower_case }
|
- { key: readability-identifier-naming.PrivateMemberSuffix, value: '_' }
|
||||||
- { key: readability-identifier-naming.PrivateMemberSuffix, value: '_' }
|
- { key: readability-identifier-naming.ProtectedMemberSuffix, value: '_' }
|
||||||
- { key: readability-identifier-naming.ProtectedMemberSuffix, value: '_' }
|
- { key: readability-identifier-naming.EnumCase, value: CamelCase }
|
||||||
- { key: readability-identifier-naming.EnumCase, value: CamelCase }
|
- { key: readability-identifier-naming.EnumConstant, value: CamelCase }
|
||||||
- { key: readability-identifier-naming.EnumConstant, value: CamelCase }
|
- { key: readability-identifier-naming.EnumConstantPrefix, value: k }
|
||||||
- { key: readability-identifier-naming.EnumConstantPrefix, value: k }
|
- { key: readability-identifier-naming.GlobalConstantCase, value: CamelCase }
|
||||||
- { key: readability-identifier-naming.GlobalConstantCase, value: CamelCase }
|
- { key: readability-identifier-naming.GlobalConstantPrefix, value: k }
|
||||||
- { key: readability-identifier-naming.GlobalConstantPrefix, value: k }
|
- { key: readability-identifier-naming.StaticConstantCase, value: CamelCase }
|
||||||
- { key: readability-identifier-naming.StaticConstantCase, value: CamelCase }
|
- { key: readability-identifier-naming.StaticConstantPrefix, value: k }
|
||||||
- { key: readability-identifier-naming.StaticConstantPrefix, value: k }
|
- { key: readability-identifier-naming.ConstexprVariableCase, value: CamelCase }
|
||||||
- { key: readability-identifier-naming.ConstexprVariableCase, value: CamelCase }
|
- { key: readability-identifier-naming.ConstexprVariablePrefix, value: k }
|
||||||
- { key: readability-identifier-naming.ConstexprVariablePrefix, value: k }
|
- { key: readability-identifier-naming.FunctionCase, value: CamelCase }
|
||||||
- { key: readability-identifier-naming.FunctionCase, value: CamelCase }
|
- { key: readability-identifier-naming.NamespaceCase, value: lower_case }
|
||||||
- { key: readability-identifier-naming.NamespaceCase, value: lower_case }
|
|
||||||
|
|||||||
11
.editorconfig
Normal file
11
.editorconfig
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
root = true
|
||||||
|
|
||||||
|
[*]
|
||||||
|
charset=utf-8
|
||||||
|
indent_style = space
|
||||||
|
indent_size = 2
|
||||||
|
insert_final_newline = true
|
||||||
|
|
||||||
|
[*.py]
|
||||||
|
indent_style = space
|
||||||
|
indent_size = 4
|
||||||
1
.github/FUNDING.yml
vendored
Normal file
1
.github/FUNDING.yml
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
open_collective: xgboost
|
||||||
7
.github/ISSUE_TEMPLATE.md
vendored
Normal file
7
.github/ISSUE_TEMPLATE.md
vendored
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
Thanks for participating in the XGBoost community! We use https://discuss.xgboost.ai for any general usage questions and discussions. The issue tracker is used for actionable items such as feature proposals discussion, roadmaps, and bug tracking. You are always welcomed to post on the forum first :)
|
||||||
|
|
||||||
|
Issues that are inactive for a period of time may get closed. We adopt this policy so that we won't lose track of actionable issues that may fall at the bottom of the pile. Feel free to reopen a new one if you feel there is an additional problem that needs attention when an old one gets closed.
|
||||||
|
|
||||||
|
For bug reports, to help the developer act on the issues, please include a description of your environment, preferably a minimum script to reproduce the problem.
|
||||||
|
|
||||||
|
For feature proposals, list clear, small actionable items so we can track the progress of the change.
|
||||||
32
.github/lock.yml
vendored
Normal file
32
.github/lock.yml
vendored
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
# Configuration for lock-threads - https://github.com/dessant/lock-threads
|
||||||
|
|
||||||
|
# Number of days of inactivity before a closed issue or pull request is locked
|
||||||
|
daysUntilLock: 90
|
||||||
|
|
||||||
|
# Issues and pull requests with these labels will not be locked. Set to `[]` to disable
|
||||||
|
exemptLabels:
|
||||||
|
- feature-request
|
||||||
|
|
||||||
|
# Label to add before locking, such as `outdated`. Set to `false` to disable
|
||||||
|
lockLabel: false
|
||||||
|
|
||||||
|
# Comment to post before locking. Set to `false` to disable
|
||||||
|
lockComment: false
|
||||||
|
|
||||||
|
# Assign `resolved` as the reason for locking. Set to `false` to disable
|
||||||
|
setLockReason: true
|
||||||
|
|
||||||
|
# Limit to only `issues` or `pulls`
|
||||||
|
# only: issues
|
||||||
|
|
||||||
|
# Optionally, specify configuration settings just for `issues` or `pulls`
|
||||||
|
# issues:
|
||||||
|
# exemptLabels:
|
||||||
|
# - help-wanted
|
||||||
|
# lockLabel: outdated
|
||||||
|
|
||||||
|
# pulls:
|
||||||
|
# daysUntilLock: 30
|
||||||
|
|
||||||
|
# Repository to extend settings from
|
||||||
|
# _extends: repo
|
||||||
138
.github/workflows/main.yml
vendored
Normal file
138
.github/workflows/main.yml
vendored
Normal file
@@ -0,0 +1,138 @@
|
|||||||
|
# This is a basic workflow to help you get started with Actions
|
||||||
|
|
||||||
|
name: XGBoost-CI
|
||||||
|
|
||||||
|
# Controls when the action will run. Triggers the workflow on push or pull request
|
||||||
|
# events but only for the master branch
|
||||||
|
on: [push, pull_request]
|
||||||
|
|
||||||
|
env:
|
||||||
|
R_PACKAGES: c('XML', 'igraph', 'data.table', 'magrittr', 'stringi', 'ggplot2', 'DiagrammeR', 'Ckmeans.1d.dp', 'vcd', 'testthat', 'lintr', 'knitr', 'rmarkdown', 'e1071', 'cplm', 'devtools')
|
||||||
|
|
||||||
|
# A workflow run is made up of one or more jobs that can run sequentially or in parallel
|
||||||
|
jobs:
|
||||||
|
test-with-jvm:
|
||||||
|
name: Test JVM on OS ${{ matrix.os }}
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
os: [windows-latest, windows-2016, ubuntu-latest]
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
with:
|
||||||
|
submodules: 'true'
|
||||||
|
|
||||||
|
- uses: actions/setup-java@v1
|
||||||
|
with:
|
||||||
|
java-version: 1.8
|
||||||
|
|
||||||
|
- name: Cache Maven packages
|
||||||
|
uses: actions/cache@v2
|
||||||
|
with:
|
||||||
|
path: ~/.m2
|
||||||
|
key: ${{ runner.os }}-m2-${{ hashFiles('./jvm-packages/pom.xml') }}
|
||||||
|
restore-keys: ${{ runner.os }}-m2
|
||||||
|
|
||||||
|
- name: Test JVM packages
|
||||||
|
run: |
|
||||||
|
cd jvm-packages
|
||||||
|
mvn test -pl :xgboost4j_2.12
|
||||||
|
|
||||||
|
|
||||||
|
lintr:
|
||||||
|
runs-on: ${{ matrix.config.os }}
|
||||||
|
|
||||||
|
name: Run R linters on OS ${{ matrix.config.os }}, R ${{ matrix.config.r }}, Compiler ${{ matrix.config.compiler }}, Build ${{ matrix.config.build }}
|
||||||
|
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
config:
|
||||||
|
- {os: windows-latest, r: 'release', compiler: 'mingw', build: 'autotools'}
|
||||||
|
env:
|
||||||
|
R_REMOTES_NO_ERRORS_FROM_WARNINGS: true
|
||||||
|
RSPM: ${{ matrix.config.rspm }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
with:
|
||||||
|
submodules: 'true'
|
||||||
|
|
||||||
|
- uses: r-lib/actions/setup-r@master
|
||||||
|
with:
|
||||||
|
r-version: ${{ matrix.config.r }}
|
||||||
|
|
||||||
|
- name: Cache R packages
|
||||||
|
uses: actions/cache@v2
|
||||||
|
with:
|
||||||
|
path: ${{ env.R_LIBS_USER }}
|
||||||
|
key: ${{ runner.os }}-r-${{ matrix.config.r }}-1-${{ hashFiles('R-package/DESCRIPTION') }}
|
||||||
|
restore-keys: ${{ runner.os }}-r-${{ matrix.config.r }}-2-
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
shell: Rscript {0}
|
||||||
|
run: |
|
||||||
|
install.packages(${{ env.R_PACKAGES }},
|
||||||
|
repos = 'http://cloud.r-project.org',
|
||||||
|
dependencies = c('Depends', 'Imports', 'LinkingTo'))
|
||||||
|
|
||||||
|
- name: Run lintr
|
||||||
|
run: |
|
||||||
|
cd R-package
|
||||||
|
R.exe CMD INSTALL .
|
||||||
|
Rscript.exe tests/run_lint.R
|
||||||
|
|
||||||
|
|
||||||
|
test-with-R:
|
||||||
|
runs-on: ${{ matrix.config.os }}
|
||||||
|
|
||||||
|
name: Test R on OS ${{ matrix.config.os }}, R ${{ matrix.config.r }}, Compiler ${{ matrix.config.compiler }}, Build ${{ matrix.config.build }}
|
||||||
|
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
config:
|
||||||
|
- {os: windows-latest, r: 'release', compiler: 'msvc', build: 'autotools'}
|
||||||
|
- {os: windows-2016, r: 'release', compiler: 'msvc', build: 'autotools'}
|
||||||
|
- {os: windows-latest, r: 'release', compiler: 'msvc', build: 'cmake'}
|
||||||
|
- {os: windows-2016, r: 'release', compiler: 'msvc', build: 'cmake'}
|
||||||
|
- {os: windows-latest, r: 'release', compiler: 'mingw', build: 'autotools'}
|
||||||
|
- {os: windows-2016, r: 'release', compiler: 'mingw', build: 'autotools'}
|
||||||
|
- {os: windows-latest, r: 'release', compiler: 'mingw', build: 'cmake'}
|
||||||
|
- {os: windows-2016, r: 'release', compiler: 'mingw', build: 'cmake'}
|
||||||
|
env:
|
||||||
|
R_REMOTES_NO_ERRORS_FROM_WARNINGS: true
|
||||||
|
RSPM: ${{ matrix.config.rspm }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
with:
|
||||||
|
submodules: 'true'
|
||||||
|
|
||||||
|
- uses: r-lib/actions/setup-r@master
|
||||||
|
with:
|
||||||
|
r-version: ${{ matrix.config.r }}
|
||||||
|
|
||||||
|
- name: Cache R packages
|
||||||
|
uses: actions/cache@v2
|
||||||
|
with:
|
||||||
|
path: ${{ env.R_LIBS_USER }}
|
||||||
|
key: ${{ runner.os }}-r-${{ matrix.config.r }}-1-${{ hashFiles('R-package/DESCRIPTION') }}
|
||||||
|
restore-keys: ${{ runner.os }}-r-${{ matrix.config.r }}-2-
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
shell: Rscript {0}
|
||||||
|
run: |
|
||||||
|
install.packages(${{ env.R_PACKAGES }},
|
||||||
|
repos = 'http://cloud.r-project.org',
|
||||||
|
dependencies = c('Depends', 'Imports', 'LinkingTo'))
|
||||||
|
|
||||||
|
- uses: actions/setup-python@v2
|
||||||
|
with:
|
||||||
|
python-version: '3.6' # Version range or exact version of a Python version to use, using SemVer's version range syntax
|
||||||
|
architecture: 'x64' # optional x64 or x86. Defaults to x64 if not specified
|
||||||
|
|
||||||
|
- name: Test R
|
||||||
|
run: |
|
||||||
|
python tests/ci_build/test_r_package.py --compiler="${{ matrix.config.compiler }}" --build-tool="${{ matrix.config.build }}"
|
||||||
24
.gitignore
vendored
24
.gitignore
vendored
@@ -17,7 +17,7 @@
|
|||||||
*.tar.gz
|
*.tar.gz
|
||||||
*conf
|
*conf
|
||||||
*buffer
|
*buffer
|
||||||
*model
|
*.model
|
||||||
*pyc
|
*pyc
|
||||||
*.train
|
*.train
|
||||||
*.test
|
*.test
|
||||||
@@ -51,6 +51,7 @@ Debug
|
|||||||
#.Rbuildignore
|
#.Rbuildignore
|
||||||
R-package.Rproj
|
R-package.Rproj
|
||||||
*.cache*
|
*.cache*
|
||||||
|
.mypy_cache/
|
||||||
# java
|
# java
|
||||||
java/xgboost4j/target
|
java/xgboost4j/target
|
||||||
java/xgboost4j/tmp
|
java/xgboost4j/tmp
|
||||||
@@ -65,14 +66,11 @@ nb-configuration*
|
|||||||
.pydevproject
|
.pydevproject
|
||||||
.settings/
|
.settings/
|
||||||
build
|
build
|
||||||
config.mk
|
|
||||||
/xgboost
|
/xgboost
|
||||||
*.data
|
*.data
|
||||||
build_plugin
|
build_plugin
|
||||||
.idea
|
|
||||||
recommonmark/
|
recommonmark/
|
||||||
tags
|
tags
|
||||||
*.iml
|
|
||||||
*.class
|
*.class
|
||||||
target
|
target
|
||||||
*.swp
|
*.swp
|
||||||
@@ -90,4 +88,20 @@ lib/
|
|||||||
# spark
|
# spark
|
||||||
metastore_db
|
metastore_db
|
||||||
|
|
||||||
plugin/updater_gpu/test/cpp/data
|
/include/xgboost/build_config.h
|
||||||
|
|
||||||
|
# files from R-package source install
|
||||||
|
**/config.status
|
||||||
|
R-package/src/Makevars
|
||||||
|
*.lib
|
||||||
|
|
||||||
|
# Visual Studio Code
|
||||||
|
/.vscode/
|
||||||
|
|
||||||
|
# IntelliJ/CLion
|
||||||
|
.idea
|
||||||
|
*.iml
|
||||||
|
/cmake-build-debug/
|
||||||
|
|
||||||
|
# GDB
|
||||||
|
.gdb_history
|
||||||
3
.gitmodules
vendored
3
.gitmodules
vendored
@@ -4,9 +4,6 @@
|
|||||||
[submodule "rabit"]
|
[submodule "rabit"]
|
||||||
path = rabit
|
path = rabit
|
||||||
url = https://github.com/dmlc/rabit
|
url = https://github.com/dmlc/rabit
|
||||||
[submodule "nccl"]
|
|
||||||
path = nccl
|
|
||||||
url = https://github.com/dmlc/nccl
|
|
||||||
[submodule "cub"]
|
[submodule "cub"]
|
||||||
path = cub
|
path = cub
|
||||||
url = https://github.com/NVlabs/cub
|
url = https://github.com/NVlabs/cub
|
||||||
|
|||||||
65
.travis.yml
65
.travis.yml
@@ -1,67 +1,56 @@
|
|||||||
# disable sudo for container build.
|
# disable sudo for container build.
|
||||||
sudo: required
|
sudo: required
|
||||||
|
|
||||||
# Enabling test on Linux and OS X
|
# Enabling test OS X
|
||||||
os:
|
os:
|
||||||
- linux
|
- linux
|
||||||
- osx
|
- osx
|
||||||
|
|
||||||
osx_image: xcode8
|
osx_image: xcode10.1
|
||||||
|
dist: bionic
|
||||||
group: deprecated-2017Q4
|
|
||||||
|
|
||||||
# Use Build Matrix to do lint and build seperately
|
# Use Build Matrix to do lint and build seperately
|
||||||
env:
|
env:
|
||||||
matrix:
|
matrix:
|
||||||
# code lint
|
|
||||||
- TASK=lint
|
|
||||||
# r package test
|
|
||||||
- TASK=r_test
|
|
||||||
# python package test
|
# python package test
|
||||||
- TASK=python_test
|
- TASK=python_test
|
||||||
- TASK=python_lightweight_test
|
# test installation of Python source distribution
|
||||||
|
- TASK=python_sdist_test
|
||||||
# java package test
|
# java package test
|
||||||
- TASK=java_test
|
- TASK=java_test
|
||||||
# cmake test
|
# cmake test
|
||||||
- TASK=cmake_test
|
- TASK=cmake_test
|
||||||
# c++ test
|
|
||||||
- TASK=cpp_test
|
global:
|
||||||
|
- secure: "PR16i9F8QtNwn99C5NDp8nptAS+97xwDtXEJJfEiEVhxPaaRkOp0MPWhogCaK0Eclxk1TqkgWbdXFknwGycX620AzZWa/A1K3gAs+GrpzqhnPMuoBJ0Z9qxXTbSJvCyvMbYwVrjaxc/zWqdMU8waWz8A7iqKGKs/SqbQ3rO6v7c="
|
||||||
|
- secure: "dAGAjBokqm/0nVoLMofQni/fWIBcYSmdq4XvCBX1ZAMDsWnuOfz/4XCY6h2lEI1rVHZQ+UdZkc9PioOHGPZh5BnvE49/xVVWr9c4/61lrDOlkD01ZjSAeoV0fAZq+93V/wPl4QV+MM+Sem9hNNzFSbN5VsQLAiWCSapWsLdKzqA="
|
||||||
|
|
||||||
matrix:
|
matrix:
|
||||||
exclude:
|
exclude:
|
||||||
- os: osx
|
|
||||||
env: TASK=lint
|
|
||||||
- os: osx
|
|
||||||
env: TASK=cmake_test
|
|
||||||
- os: linux
|
- os: linux
|
||||||
env: TASK=r_test
|
env: TASK=python_test
|
||||||
- os: osx
|
- os: linux
|
||||||
env: TASK=python_lightweight_test
|
env: TASK=java_test
|
||||||
- os: osx
|
- os: linux
|
||||||
env: TASK=cpp_test
|
env: TASK=cmake_test
|
||||||
|
|
||||||
# dependent apt packages
|
# dependent brew packages
|
||||||
addons:
|
addons:
|
||||||
apt:
|
homebrew:
|
||||||
sources:
|
|
||||||
- llvm-toolchain-trusty-5.0
|
|
||||||
- ubuntu-toolchain-r-test
|
|
||||||
- george-edison55-precise-backports
|
|
||||||
packages:
|
packages:
|
||||||
- clang
|
- cmake
|
||||||
- clang-tidy-5.0
|
- libomp
|
||||||
- cmake-data
|
|
||||||
- doxygen
|
|
||||||
- wget
|
|
||||||
- libcurl4-openssl-dev
|
|
||||||
- unzip
|
|
||||||
- graphviz
|
- graphviz
|
||||||
- gcc-4.8
|
- openssl
|
||||||
- g++-4.8
|
- libgit2
|
||||||
|
- lz4
|
||||||
|
- wget
|
||||||
|
- r
|
||||||
|
update: true
|
||||||
|
|
||||||
before_install:
|
before_install:
|
||||||
- source dmlc-core/scripts/travis/travis_setup_env.sh
|
- source tests/travis/travis_setup_env.sh
|
||||||
- export PYTHONPATH=${PYTHONPATH}:${PWD}/python-package
|
- if [ "${TASK}" != "python_sdist_test" ]; then export PYTHONPATH=${PYTHONPATH}:${PWD}/python-package; fi
|
||||||
- echo "MAVEN_OPTS='-Xmx2g -XX:MaxPermSize=1024m -XX:ReservedCodeCacheSize=512m -Dorg.slf4j.simpleLogger.defaultLogLevel=error'" > ~/.mavenrc
|
- echo "MAVEN_OPTS='-Xmx2g -XX:MaxPermSize=1024m -XX:ReservedCodeCacheSize=512m -Dorg.slf4j.simpleLogger.defaultLogLevel=error'" > ~/.mavenrc
|
||||||
|
|
||||||
install:
|
install:
|
||||||
@@ -76,7 +65,7 @@ cache:
|
|||||||
- ${HOME}/.cache/pip
|
- ${HOME}/.cache/pip
|
||||||
|
|
||||||
before_cache:
|
before_cache:
|
||||||
- dmlc-core/scripts/travis/travis_before_cache.sh
|
- tests/travis/travis_before_cache.sh
|
||||||
|
|
||||||
after_failure:
|
after_failure:
|
||||||
- tests/travis/travis_after_failure.sh
|
- tests/travis/travis_after_failure.sh
|
||||||
|
|||||||
513
CMakeLists.txt
513
CMakeLists.txt
@@ -1,247 +1,332 @@
|
|||||||
cmake_minimum_required (VERSION 3.2)
|
cmake_minimum_required(VERSION 3.13)
|
||||||
project(xgboost)
|
project(xgboost LANGUAGES CXX C VERSION 1.2.0)
|
||||||
include(cmake/Utils.cmake)
|
include(cmake/Utils.cmake)
|
||||||
list(APPEND CMAKE_MODULE_PATH "${PROJECT_SOURCE_DIR}/cmake/modules")
|
list(APPEND CMAKE_MODULE_PATH "${xgboost_SOURCE_DIR}/cmake/modules")
|
||||||
find_package(OpenMP)
|
cmake_policy(SET CMP0022 NEW)
|
||||||
|
cmake_policy(SET CMP0079 NEW)
|
||||||
|
cmake_policy(SET CMP0063 NEW)
|
||||||
|
|
||||||
|
if ((${CMAKE_VERSION} VERSION_GREATER 3.13) OR (${CMAKE_VERSION} VERSION_EQUAL 3.13))
|
||||||
|
cmake_policy(SET CMP0077 NEW)
|
||||||
|
endif ((${CMAKE_VERSION} VERSION_GREATER 3.13) OR (${CMAKE_VERSION} VERSION_EQUAL 3.13))
|
||||||
|
|
||||||
|
message(STATUS "CMake version ${CMAKE_VERSION}")
|
||||||
|
|
||||||
|
if (CMAKE_COMPILER_IS_GNUCC AND CMAKE_CXX_COMPILER_VERSION VERSION_LESS 5.0)
|
||||||
|
message(FATAL_ERROR "GCC version must be at least 5.0!")
|
||||||
|
endif()
|
||||||
|
|
||||||
|
include(${xgboost_SOURCE_DIR}/cmake/FindPrefetchIntrinsics.cmake)
|
||||||
|
find_prefetch_intrinsics()
|
||||||
|
include(${xgboost_SOURCE_DIR}/cmake/Version.cmake)
|
||||||
|
write_version()
|
||||||
set_default_configuration_release()
|
set_default_configuration_release()
|
||||||
msvc_use_static_runtime()
|
|
||||||
|
|
||||||
# Options
|
#-- Options
|
||||||
option(USE_CUDA "Build with GPU acceleration")
|
option(BUILD_C_DOC "Build documentation for C APIs using Doxygen." OFF)
|
||||||
option(USE_AVX "Build with AVX instructions. May not produce identical results due to approximate math." OFF)
|
option(USE_OPENMP "Build with OpenMP support." ON)
|
||||||
option(USE_NCCL "Build using NCCL for multi-GPU. Also requires USE_CUDA")
|
option(BUILD_STATIC_LIB "Build static library" OFF)
|
||||||
|
## Bindings
|
||||||
option(JVM_BINDINGS "Build JVM bindings" OFF)
|
option(JVM_BINDINGS "Build JVM bindings" OFF)
|
||||||
option(GOOGLE_TEST "Build google tests" OFF)
|
|
||||||
option(R_LIB "Build shared library for R package" OFF)
|
option(R_LIB "Build shared library for R package" OFF)
|
||||||
|
## Dev
|
||||||
|
option(USE_DEBUG_OUTPUT "Dump internal training results like gradients and predictions to stdout.
|
||||||
|
Should only be used for debugging." OFF)
|
||||||
|
option(FORCE_COLORED_OUTPUT "Force colored output from compilers, useful when ninja is used instead of make." OFF)
|
||||||
|
option(ENABLE_ALL_WARNINGS "Enable all compiler warnings. Only effective for GCC/Clang" OFF)
|
||||||
|
option(LOG_CAPI_INVOCATION "Log all C API invocations for debugging" OFF)
|
||||||
|
option(GOOGLE_TEST "Build google tests" OFF)
|
||||||
|
option(USE_DMLC_GTEST "Use google tests bundled with dmlc-core submodule" OFF)
|
||||||
|
option(USE_NVTX "Build with cuda profiling annotations. Developers only." OFF)
|
||||||
|
set(NVTX_HEADER_DIR "" CACHE PATH "Path to the stand-alone nvtx header")
|
||||||
|
option(RABIT_MOCK "Build rabit with mock" OFF)
|
||||||
|
option(HIDE_CXX_SYMBOLS "Build shared library and hide all C++ symbols" OFF)
|
||||||
|
## CUDA
|
||||||
|
option(USE_CUDA "Build with GPU acceleration" OFF)
|
||||||
|
option(USE_NCCL "Build with NCCL to enable distributed GPU support." OFF)
|
||||||
|
option(BUILD_WITH_SHARED_NCCL "Build with shared NCCL library." OFF)
|
||||||
set(GPU_COMPUTE_VER "" CACHE STRING
|
set(GPU_COMPUTE_VER "" CACHE STRING
|
||||||
"Space separated list of compute versions to be built against, e.g. '35 61'")
|
"Semicolon separated list of compute versions to be built against, e.g. '35;61'")
|
||||||
|
## Copied From dmlc
|
||||||
|
option(USE_HDFS "Build with HDFS support" OFF)
|
||||||
|
option(USE_AZURE "Build with AZURE support" OFF)
|
||||||
|
option(USE_S3 "Build with S3 support" OFF)
|
||||||
|
## Sanitizers
|
||||||
|
option(USE_SANITIZER "Use santizer flags" OFF)
|
||||||
|
option(SANITIZER_PATH "Path to sanitizes.")
|
||||||
|
set(ENABLED_SANITIZERS "address" "leak" CACHE STRING
|
||||||
|
"Semicolon separated list of sanitizer names. E.g 'address;leak'. Supported sanitizers are
|
||||||
|
address, leak, undefined and thread.")
|
||||||
|
## Plugins
|
||||||
|
option(PLUGIN_LZ4 "Build lz4 plugin" OFF)
|
||||||
|
option(PLUGIN_DENSE_PARSER "Build dense parser plugin" OFF)
|
||||||
|
option(ADD_PKGCONFIG "Add xgboost.pc into system." ON)
|
||||||
|
|
||||||
# Deprecation warning
|
#-- Checks for building XGBoost
|
||||||
if(PLUGIN_UPDATER_GPU)
|
if (USE_DEBUG_OUTPUT AND (NOT (CMAKE_BUILD_TYPE MATCHES Debug)))
|
||||||
set(USE_CUDA ON)
|
message(SEND_ERROR "Do not enable `USE_DEBUG_OUTPUT' with release build.")
|
||||||
message(WARNING "The option 'PLUGIN_UPDATER_GPU' is deprecated. Set 'USE_CUDA' instead.")
|
endif (USE_DEBUG_OUTPUT AND (NOT (CMAKE_BUILD_TYPE MATCHES Debug)))
|
||||||
endif()
|
if (USE_NCCL AND NOT (USE_CUDA))
|
||||||
|
message(SEND_ERROR "`USE_NCCL` must be enabled with `USE_CUDA` flag.")
|
||||||
|
endif (USE_NCCL AND NOT (USE_CUDA))
|
||||||
|
if (BUILD_WITH_SHARED_NCCL AND (NOT USE_NCCL))
|
||||||
|
message(SEND_ERROR "Build XGBoost with -DUSE_NCCL=ON to enable BUILD_WITH_SHARED_NCCL.")
|
||||||
|
endif (BUILD_WITH_SHARED_NCCL AND (NOT USE_NCCL))
|
||||||
|
if (JVM_BINDINGS AND R_LIB)
|
||||||
|
message(SEND_ERROR "`R_LIB' is not compatible with `JVM_BINDINGS' as they both have customized configurations.")
|
||||||
|
endif (JVM_BINDINGS AND R_LIB)
|
||||||
|
if (R_LIB AND GOOGLE_TEST)
|
||||||
|
message(WARNING "Some C++ unittests will fail with `R_LIB` enabled,
|
||||||
|
as R package redirects some functions to R runtime implementation.")
|
||||||
|
endif (R_LIB AND GOOGLE_TEST)
|
||||||
|
if (USE_AVX)
|
||||||
|
message(SEND_ERROR "The option 'USE_AVX' is deprecated as experimental AVX features have been removed from XGBoost.")
|
||||||
|
endif (USE_AVX)
|
||||||
|
if (ENABLE_ALL_WARNINGS)
|
||||||
|
if ((NOT CMAKE_CXX_COMPILER_ID MATCHES "Clang") AND (NOT CMAKE_CXX_COMPILER_ID STREQUAL "GNU"))
|
||||||
|
message(SEND_ERROR "ENABLE_ALL_WARNINGS is only available for Clang and GCC.")
|
||||||
|
endif ((NOT CMAKE_CXX_COMPILER_ID MATCHES "Clang") AND (NOT CMAKE_CXX_COMPILER_ID STREQUAL "GNU"))
|
||||||
|
endif (ENABLE_ALL_WARNINGS)
|
||||||
|
|
||||||
# Compiler flags
|
#-- Sanitizer
|
||||||
set(CMAKE_CXX_STANDARD 11)
|
if (USE_SANITIZER)
|
||||||
set(CMAKE_CXX_STANDARD_REQUIRED ON)
|
include(cmake/Sanitizer.cmake)
|
||||||
if(OpenMP_CXX_FOUND OR OPENMP_FOUND)
|
enable_sanitizers("${ENABLED_SANITIZERS}")
|
||||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${OpenMP_CXX_FLAGS}")
|
endif (USE_SANITIZER)
|
||||||
endif()
|
|
||||||
set(CMAKE_POSITION_INDEPENDENT_CODE ON)
|
|
||||||
if(MSVC)
|
|
||||||
# Multithreaded compilation
|
|
||||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /MP")
|
|
||||||
else()
|
|
||||||
# Correct error for GCC 5 and cuda
|
|
||||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -D_MWAITXINTRIN_H_INCLUDED -D_FORCE_INLINES")
|
|
||||||
# Performance
|
|
||||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -funroll-loops")
|
|
||||||
endif()
|
|
||||||
|
|
||||||
# AVX
|
if (USE_CUDA)
|
||||||
if(USE_AVX)
|
SET(USE_OPENMP ON CACHE BOOL "CUDA requires OpenMP" FORCE)
|
||||||
if(MSVC)
|
# `export CXX=' is ignored by CMake CUDA.
|
||||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /arch:AVX")
|
set(CMAKE_CUDA_HOST_COMPILER ${CMAKE_CXX_COMPILER})
|
||||||
else()
|
message(STATUS "Configured CUDA host compiler: ${CMAKE_CUDA_HOST_COMPILER}")
|
||||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -mavx")
|
|
||||||
|
enable_language(CUDA)
|
||||||
|
if (${CMAKE_CUDA_COMPILER_VERSION} VERSION_LESS 10.0)
|
||||||
|
message(FATAL_ERROR "CUDA version must be at least 10.0!")
|
||||||
endif()
|
endif()
|
||||||
add_definitions(-DXGBOOST_USE_AVX)
|
set(GEN_CODE "")
|
||||||
|
format_gencode_flags("${GPU_COMPUTE_VER}" GEN_CODE)
|
||||||
|
message(STATUS "CUDA GEN_CODE: ${GEN_CODE}")
|
||||||
|
endif (USE_CUDA)
|
||||||
|
|
||||||
|
if (FORCE_COLORED_OUTPUT AND (CMAKE_GENERATOR STREQUAL "Ninja") AND
|
||||||
|
((CMAKE_CXX_COMPILER_ID STREQUAL "GNU") OR
|
||||||
|
(CMAKE_CXX_COMPILER_ID STREQUAL "Clang")))
|
||||||
|
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fdiagnostics-color=always")
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
|
find_package(Threads REQUIRED)
|
||||||
|
|
||||||
# compiled code customizations for R package
|
if (USE_OPENMP)
|
||||||
if(R_LIB)
|
if (APPLE)
|
||||||
add_definitions(
|
# Require CMake 3.16+ on Mac OSX, as previous versions of CMake had trouble locating
|
||||||
-DXGBOOST_STRICT_R_MODE=1
|
# OpenMP on Mac. See https://github.com/dmlc/xgboost/pull/5146#issuecomment-568312706
|
||||||
-DXGBOOST_CUSTOMIZE_GLOBAL_PRNG=1
|
cmake_minimum_required(VERSION 3.16)
|
||||||
-DDMLC_LOG_BEFORE_THROW=0
|
endif (APPLE)
|
||||||
-DDMLC_DISABLE_STDIN=1
|
find_package(OpenMP REQUIRED)
|
||||||
-DDMLC_LOG_CUSTOMIZE=1
|
endif (USE_OPENMP)
|
||||||
-DRABIT_CUSTOMIZE_MSG_
|
|
||||||
-DRABIT_STRICT_CXX98_
|
|
||||||
)
|
|
||||||
endif()
|
|
||||||
|
|
||||||
include_directories (
|
|
||||||
${PROJECT_SOURCE_DIR}/include
|
|
||||||
${PROJECT_SOURCE_DIR}/dmlc-core/include
|
|
||||||
${PROJECT_SOURCE_DIR}/rabit/include
|
|
||||||
)
|
|
||||||
|
|
||||||
file(GLOB_RECURSE SOURCES
|
|
||||||
src/*.cc
|
|
||||||
src/*.h
|
|
||||||
include/*.h
|
|
||||||
)
|
|
||||||
|
|
||||||
# Only add main function for executable target
|
|
||||||
list(REMOVE_ITEM SOURCES ${PROJECT_SOURCE_DIR}/src/cli_main.cc)
|
|
||||||
|
|
||||||
file(GLOB_RECURSE CUDA_SOURCES
|
|
||||||
src/*.cu
|
|
||||||
src/*.cuh
|
|
||||||
)
|
|
||||||
|
|
||||||
# rabit
|
|
||||||
# TODO: Create rabit cmakelists.txt
|
|
||||||
set(RABIT_SOURCES
|
|
||||||
rabit/src/allreduce_base.cc
|
|
||||||
rabit/src/allreduce_robust.cc
|
|
||||||
rabit/src/engine.cc
|
|
||||||
rabit/src/c_api.cc
|
|
||||||
)
|
|
||||||
set(RABIT_EMPTY_SOURCES
|
|
||||||
rabit/src/engine_empty.cc
|
|
||||||
rabit/src/c_api.cc
|
|
||||||
)
|
|
||||||
if(MINGW OR R_LIB)
|
|
||||||
# build a dummy rabit library
|
|
||||||
add_library(rabit STATIC ${RABIT_EMPTY_SOURCES})
|
|
||||||
else()
|
|
||||||
add_library(rabit STATIC ${RABIT_SOURCES})
|
|
||||||
endif()
|
|
||||||
|
|
||||||
|
# core xgboost
|
||||||
|
add_subdirectory(${xgboost_SOURCE_DIR}/src)
|
||||||
|
|
||||||
# dmlc-core
|
# dmlc-core
|
||||||
add_subdirectory(dmlc-core)
|
msvc_use_static_runtime()
|
||||||
set(LINK_LIBRARIES dmlc rabit)
|
add_subdirectory(${xgboost_SOURCE_DIR}/dmlc-core)
|
||||||
|
set_target_properties(dmlc PROPERTIES
|
||||||
|
CXX_STANDARD 14
|
||||||
|
CXX_STANDARD_REQUIRED ON
|
||||||
|
POSITION_INDEPENDENT_CODE ON)
|
||||||
|
if (MSVC)
|
||||||
|
target_compile_options(dmlc PRIVATE
|
||||||
|
-D_CRT_SECURE_NO_WARNINGS -D_CRT_SECURE_NO_DEPRECATE)
|
||||||
|
if (TARGET dmlc_unit_tests)
|
||||||
|
target_compile_options(dmlc_unit_tests PRIVATE
|
||||||
|
-D_CRT_SECURE_NO_WARNINGS -D_CRT_SECURE_NO_DEPRECATE)
|
||||||
|
endif (TARGET dmlc_unit_tests)
|
||||||
|
endif (MSVC)
|
||||||
|
if (ENABLE_ALL_WARNINGS)
|
||||||
|
target_compile_options(dmlc PRIVATE -Wall -Wextra)
|
||||||
|
endif (ENABLE_ALL_WARNINGS)
|
||||||
|
target_link_libraries(objxgboost PUBLIC dmlc)
|
||||||
|
|
||||||
|
# rabit
|
||||||
|
set(RABIT_BUILD_DMLC OFF)
|
||||||
|
set(DMLC_ROOT ${xgboost_SOURCE_DIR}/dmlc-core)
|
||||||
|
set(RABIT_WITH_R_LIB ${R_LIB})
|
||||||
|
add_subdirectory(rabit)
|
||||||
|
|
||||||
if(USE_CUDA)
|
if (RABIT_MOCK)
|
||||||
find_package(CUDA 8.0 REQUIRED)
|
target_link_libraries(objxgboost PUBLIC rabit_mock_static)
|
||||||
cmake_minimum_required(VERSION 3.5)
|
if (MSVC)
|
||||||
|
target_compile_options(rabit_mock_static PRIVATE
|
||||||
add_definitions(-DXGBOOST_USE_CUDA)
|
-D_CRT_SECURE_NO_WARNINGS -D_CRT_SECURE_NO_DEPRECATE)
|
||||||
|
endif (MSVC)
|
||||||
include_directories(cub)
|
|
||||||
|
|
||||||
if(USE_NCCL)
|
|
||||||
include_directories(nccl/src)
|
|
||||||
add_definitions(-DXGBOOST_USE_NCCL)
|
|
||||||
endif()
|
|
||||||
|
|
||||||
set(GENCODE_FLAGS "")
|
|
||||||
format_gencode_flags("${GPU_COMPUTE_VER}" GENCODE_FLAGS)
|
|
||||||
message("cuda architecture flags: ${GENCODE_FLAGS}")
|
|
||||||
|
|
||||||
set(CUDA_NVCC_FLAGS "${CUDA_NVCC_FLAGS};--expt-extended-lambda;--expt-relaxed-constexpr;${GENCODE_FLAGS};-lineinfo;")
|
|
||||||
if(NOT MSVC)
|
|
||||||
set(CUDA_NVCC_FLAGS "${CUDA_NVCC_FLAGS};-Xcompiler -fPIC; -Xcompiler -Werror; -std=c++11")
|
|
||||||
endif()
|
|
||||||
|
|
||||||
if(USE_NCCL)
|
|
||||||
add_subdirectory(nccl)
|
|
||||||
endif()
|
|
||||||
|
|
||||||
cuda_add_library(gpuxgboost ${CUDA_SOURCES} STATIC)
|
|
||||||
|
|
||||||
if(USE_NCCL)
|
|
||||||
target_link_libraries(gpuxgboost nccl)
|
|
||||||
endif()
|
|
||||||
list(APPEND LINK_LIBRARIES gpuxgboost)
|
|
||||||
endif()
|
|
||||||
|
|
||||||
|
|
||||||
# flags and sources for R-package
|
|
||||||
if(R_LIB)
|
|
||||||
file(GLOB_RECURSE R_SOURCES
|
|
||||||
R-package/src/*.h
|
|
||||||
R-package/src/*.c
|
|
||||||
R-package/src/*.cc
|
|
||||||
)
|
|
||||||
list(APPEND SOURCES ${R_SOURCES})
|
|
||||||
endif()
|
|
||||||
|
|
||||||
add_library(objxgboost OBJECT ${SOURCES})
|
|
||||||
|
|
||||||
|
|
||||||
# building shared library for R package
|
|
||||||
if(R_LIB)
|
|
||||||
find_package(LibR REQUIRED)
|
|
||||||
|
|
||||||
list(APPEND LINK_LIBRARIES "${LIBR_CORE_LIBRARY}")
|
|
||||||
MESSAGE(STATUS "LIBR_CORE_LIBRARY " ${LIBR_CORE_LIBRARY})
|
|
||||||
|
|
||||||
include_directories(
|
|
||||||
"${LIBR_INCLUDE_DIRS}"
|
|
||||||
"${PROJECT_SOURCE_DIR}"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Shared library target for the R package
|
|
||||||
add_library(xgboost SHARED $<TARGET_OBJECTS:objxgboost>)
|
|
||||||
target_link_libraries(xgboost ${LINK_LIBRARIES})
|
|
||||||
# R uses no lib prefix in shared library names of its packages
|
|
||||||
set_target_properties(xgboost PROPERTIES PREFIX "")
|
|
||||||
|
|
||||||
setup_rpackage_install_target(xgboost ${CMAKE_CURRENT_BINARY_DIR})
|
|
||||||
# use a dummy location for any other remaining installs
|
|
||||||
set(CMAKE_INSTALL_PREFIX "${CMAKE_CURRENT_BINARY_DIR}/dummy_inst")
|
|
||||||
|
|
||||||
# main targets: shared library & exe
|
|
||||||
else()
|
else()
|
||||||
# Executable
|
target_link_libraries(objxgboost PUBLIC rabit)
|
||||||
add_executable(runxgboost $<TARGET_OBJECTS:objxgboost> src/cli_main.cc)
|
if (MSVC)
|
||||||
set_target_properties(runxgboost PROPERTIES
|
target_compile_options(rabit PRIVATE
|
||||||
OUTPUT_NAME xgboost
|
-D_CRT_SECURE_NO_WARNINGS -D_CRT_SECURE_NO_DEPRECATE)
|
||||||
)
|
endif (MSVC)
|
||||||
set_output_directory(runxgboost ${PROJECT_SOURCE_DIR})
|
endif(RABIT_MOCK)
|
||||||
target_link_libraries(runxgboost ${LINK_LIBRARIES})
|
foreach(lib rabit rabit_base rabit_empty rabit_mock rabit_mock_static)
|
||||||
|
# Explicitly link dmlc to rabit, so that configured header (build_config.h)
|
||||||
|
# from dmlc is correctly applied to rabit.
|
||||||
|
if (TARGET ${lib})
|
||||||
|
target_link_libraries(${lib} dmlc ${CMAKE_THREAD_LIBS_INIT})
|
||||||
|
if (HIDE_CXX_SYMBOLS) # Hide all C++ symbols from Rabit
|
||||||
|
set_target_properties(${lib} PROPERTIES CXX_VISIBILITY_PRESET hidden)
|
||||||
|
endif (HIDE_CXX_SYMBOLS)
|
||||||
|
if (ENABLE_ALL_WARNINGS)
|
||||||
|
target_compile_options(${lib} PRIVATE -Wall -Wextra)
|
||||||
|
endif (ENABLE_ALL_WARNINGS)
|
||||||
|
endif (TARGET ${lib})
|
||||||
|
endforeach()
|
||||||
|
|
||||||
# Shared library
|
# Exports some R specific definitions and objects
|
||||||
add_library(xgboost SHARED $<TARGET_OBJECTS:objxgboost>)
|
if (R_LIB)
|
||||||
target_link_libraries(xgboost ${LINK_LIBRARIES})
|
add_subdirectory(${xgboost_SOURCE_DIR}/R-package)
|
||||||
set_output_directory(xgboost ${PROJECT_SOURCE_DIR}/lib)
|
endif (R_LIB)
|
||||||
if(MINGW)
|
|
||||||
# remove the 'lib' prefix to conform to windows convention for shared library names
|
|
||||||
set_target_properties(xgboost PROPERTIES PREFIX "")
|
|
||||||
endif()
|
|
||||||
|
|
||||||
#Ensure these two targets do not build simultaneously, as they produce outputs with conflicting names
|
# Plugin
|
||||||
add_dependencies(xgboost runxgboost)
|
add_subdirectory(${xgboost_SOURCE_DIR}/plugin)
|
||||||
endif()
|
|
||||||
|
|
||||||
|
#-- library
|
||||||
|
if (BUILD_STATIC_LIB)
|
||||||
|
add_library(xgboost STATIC)
|
||||||
|
else (BUILD_STATIC_LIB)
|
||||||
|
add_library(xgboost SHARED)
|
||||||
|
endif (BUILD_STATIC_LIB)
|
||||||
|
target_link_libraries(xgboost PRIVATE objxgboost)
|
||||||
|
|
||||||
# JVM
|
if (USE_NVTX)
|
||||||
if(JVM_BINDINGS)
|
enable_nvtx(xgboost)
|
||||||
find_package(JNI QUIET REQUIRED)
|
endif (USE_NVTX)
|
||||||
|
|
||||||
include_directories(${JNI_INCLUDE_DIRS} jvm-packages/xgboost4j/src/native)
|
#-- Hide all C++ symbols
|
||||||
|
if (HIDE_CXX_SYMBOLS)
|
||||||
|
set_target_properties(objxgboost PROPERTIES CXX_VISIBILITY_PRESET hidden)
|
||||||
|
set_target_properties(xgboost PROPERTIES CXX_VISIBILITY_PRESET hidden)
|
||||||
|
endif (HIDE_CXX_SYMBOLS)
|
||||||
|
|
||||||
add_library(xgboost4j SHARED
|
target_include_directories(xgboost
|
||||||
$<TARGET_OBJECTS:objxgboost>
|
INTERFACE
|
||||||
jvm-packages/xgboost4j/src/native/xgboost4j.cpp)
|
$<INSTALL_INTERFACE:${CMAKE_INSTALL_PREFIX}/include>
|
||||||
set_output_directory(xgboost4j ${PROJECT_SOURCE_DIR}/lib)
|
$<BUILD_INTERFACE:${CMAKE_CURRENT_LIST_DIR}/include>)
|
||||||
target_link_libraries(xgboost4j
|
|
||||||
${LINK_LIBRARIES}
|
|
||||||
${JAVA_JVM_LIBRARY})
|
|
||||||
endif()
|
|
||||||
|
|
||||||
|
# This creates its own shared library `xgboost4j'.
|
||||||
|
if (JVM_BINDINGS)
|
||||||
|
add_subdirectory(${xgboost_SOURCE_DIR}/jvm-packages)
|
||||||
|
endif (JVM_BINDINGS)
|
||||||
|
#-- End shared library
|
||||||
|
|
||||||
# Test
|
#-- CLI for xgboost
|
||||||
if(GOOGLE_TEST)
|
add_executable(runxgboost ${xgboost_SOURCE_DIR}/src/cli_main.cc)
|
||||||
find_package(GTest REQUIRED)
|
target_link_libraries(runxgboost PRIVATE objxgboost)
|
||||||
|
if (USE_NVTX)
|
||||||
|
enable_nvtx(runxgboost)
|
||||||
|
endif (USE_NVTX)
|
||||||
|
|
||||||
|
target_include_directories(runxgboost
|
||||||
|
PRIVATE
|
||||||
|
${xgboost_SOURCE_DIR}/include
|
||||||
|
${xgboost_SOURCE_DIR}/dmlc-core/include
|
||||||
|
${xgboost_SOURCE_DIR}/rabit/include)
|
||||||
|
set_target_properties(
|
||||||
|
runxgboost PROPERTIES
|
||||||
|
OUTPUT_NAME xgboost
|
||||||
|
CXX_STANDARD 14
|
||||||
|
CXX_STANDARD_REQUIRED ON)
|
||||||
|
#-- End CLI for xgboost
|
||||||
|
|
||||||
|
set_output_directory(runxgboost ${xgboost_SOURCE_DIR})
|
||||||
|
set_output_directory(xgboost ${xgboost_SOURCE_DIR}/lib)
|
||||||
|
# Ensure these two targets do not build simultaneously, as they produce outputs with conflicting names
|
||||||
|
add_dependencies(xgboost runxgboost)
|
||||||
|
|
||||||
|
#-- Installing XGBoost
|
||||||
|
if (R_LIB)
|
||||||
|
include(cmake/RPackageInstallTargetSetup.cmake)
|
||||||
|
set_target_properties(xgboost PROPERTIES PREFIX "")
|
||||||
|
if (APPLE)
|
||||||
|
set_target_properties(xgboost PROPERTIES SUFFIX ".so")
|
||||||
|
endif (APPLE)
|
||||||
|
setup_rpackage_install_target(xgboost "${CMAKE_CURRENT_BINARY_DIR}/R-package-install")
|
||||||
|
set(CMAKE_INSTALL_PREFIX "${CMAKE_CURRENT_BINARY_DIR}/dummy_inst")
|
||||||
|
endif (R_LIB)
|
||||||
|
if (MINGW)
|
||||||
|
set_target_properties(xgboost PROPERTIES PREFIX "")
|
||||||
|
endif (MINGW)
|
||||||
|
|
||||||
|
if (BUILD_C_DOC)
|
||||||
|
include(cmake/Doc.cmake)
|
||||||
|
run_doxygen()
|
||||||
|
endif (BUILD_C_DOC)
|
||||||
|
|
||||||
|
include(GNUInstallDirs)
|
||||||
|
# Install all headers. Please note that currently the C++ headers does not form an "API".
|
||||||
|
install(DIRECTORY ${xgboost_SOURCE_DIR}/include/xgboost
|
||||||
|
DESTINATION ${CMAKE_INSTALL_INCLUDEDIR})
|
||||||
|
|
||||||
|
install(TARGETS xgboost runxgboost
|
||||||
|
EXPORT XGBoostTargets
|
||||||
|
ARCHIVE DESTINATION ${CMAKE_INSTALL_LIBDIR}
|
||||||
|
LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR}
|
||||||
|
RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR}
|
||||||
|
INCLUDES DESTINATION ${LIBLEGACY_INCLUDE_DIRS})
|
||||||
|
install(EXPORT XGBoostTargets
|
||||||
|
FILE XGBoostTargets.cmake
|
||||||
|
NAMESPACE xgboost::
|
||||||
|
DESTINATION ${CMAKE_INSTALL_LIBDIR}/cmake/xgboost)
|
||||||
|
|
||||||
|
include(CMakePackageConfigHelpers)
|
||||||
|
configure_package_config_file(
|
||||||
|
${CMAKE_CURRENT_LIST_DIR}/cmake/xgboost-config.cmake.in
|
||||||
|
${CMAKE_CURRENT_BINARY_DIR}/cmake/xgboost-config.cmake
|
||||||
|
INSTALL_DESTINATION ${CMAKE_INSTALL_LIBDIR}/cmake/xgboost)
|
||||||
|
write_basic_package_version_file(
|
||||||
|
${CMAKE_BINARY_DIR}/cmake/xgboost-config-version.cmake
|
||||||
|
VERSION ${XGBOOST_VERSION}
|
||||||
|
COMPATIBILITY AnyNewerVersion)
|
||||||
|
install(
|
||||||
|
FILES
|
||||||
|
${CMAKE_BINARY_DIR}/cmake/xgboost-config.cmake
|
||||||
|
${CMAKE_BINARY_DIR}/cmake/xgboost-config-version.cmake
|
||||||
|
DESTINATION ${CMAKE_INSTALL_LIBDIR}/cmake/xgboost)
|
||||||
|
|
||||||
|
#-- Test
|
||||||
|
if (GOOGLE_TEST)
|
||||||
enable_testing()
|
enable_testing()
|
||||||
|
# Unittests.
|
||||||
|
add_subdirectory(${xgboost_SOURCE_DIR}/tests/cpp)
|
||||||
|
add_test(
|
||||||
|
NAME TestXGBoostLib
|
||||||
|
COMMAND testxgboost
|
||||||
|
WORKING_DIRECTORY ${xgboost_BINARY_DIR})
|
||||||
|
|
||||||
file(GLOB_RECURSE TEST_SOURCES "tests/cpp/*.cc")
|
# CLI tests
|
||||||
auto_source_group("${TEST_SOURCES}")
|
configure_file(
|
||||||
include_directories(${GTEST_INCLUDE_DIR})
|
${xgboost_SOURCE_DIR}/tests/cli/machine.conf.in
|
||||||
|
${xgboost_BINARY_DIR}/tests/cli/machine.conf
|
||||||
|
@ONLY)
|
||||||
|
add_test(
|
||||||
|
NAME TestXGBoostCLI
|
||||||
|
COMMAND runxgboost ${xgboost_BINARY_DIR}/tests/cli/machine.conf
|
||||||
|
WORKING_DIRECTORY ${xgboost_BINARY_DIR})
|
||||||
|
set_tests_properties(TestXGBoostCLI
|
||||||
|
PROPERTIES
|
||||||
|
PASS_REGULAR_EXPRESSION ".*test-rmse:0.087.*")
|
||||||
|
endif (GOOGLE_TEST)
|
||||||
|
|
||||||
if(USE_CUDA)
|
# For MSVC: Call msvc_use_static_runtime() once again to completely
|
||||||
file(GLOB_RECURSE CUDA_TEST_SOURCES "tests/cpp/*.cu")
|
# replace /MD with /MT. See https://github.com/dmlc/xgboost/issues/4462
|
||||||
cuda_compile(CUDA_TEST_OBJS ${CUDA_TEST_SOURCES})
|
# for issues caused by mixing of /MD and /MT flags
|
||||||
else()
|
msvc_use_static_runtime()
|
||||||
set(CUDA_TEST_OBJS "")
|
|
||||||
endif()
|
|
||||||
|
|
||||||
add_executable(testxgboost ${TEST_SOURCES} ${CUDA_TEST_OBJS} $<TARGET_OBJECTS:objxgboost>)
|
# Add xgboost.pc
|
||||||
set_output_directory(testxgboost ${PROJECT_SOURCE_DIR})
|
if (ADD_PKGCONFIG)
|
||||||
target_link_libraries(testxgboost ${GTEST_LIBRARIES} ${LINK_LIBRARIES})
|
configure_file(${xgboost_SOURCE_DIR}/cmake/xgboost.pc.in ${xgboost_BINARY_DIR}/xgboost.pc @ONLY)
|
||||||
|
|
||||||
add_test(TestXGBoost testxgboost)
|
install(
|
||||||
endif()
|
FILES ${xgboost_BINARY_DIR}/xgboost.pc
|
||||||
|
DESTINATION ${CMAKE_INSTALL_LIBDIR}/pkgconfig)
|
||||||
|
endif (ADD_PKGCONFIG)
|
||||||
# Group sources
|
|
||||||
auto_source_group("${SOURCES}")
|
|
||||||
|
|||||||
@@ -2,25 +2,44 @@ Contributors of DMLC/XGBoost
|
|||||||
============================
|
============================
|
||||||
XGBoost has been developed and used by a group of active community. Everyone is more than welcomed to is a great way to make the project better and more accessible to more users.
|
XGBoost has been developed and used by a group of active community. Everyone is more than welcomed to is a great way to make the project better and more accessible to more users.
|
||||||
|
|
||||||
|
Project Management Committee(PMC)
|
||||||
|
----------
|
||||||
|
The Project Management Committee(PMC) consists group of active committers that moderate the discussion, manage the project release, and proposes new committer/PMC members.
|
||||||
|
|
||||||
|
* [Tianqi Chen](https://github.com/tqchen), University of Washington
|
||||||
|
- Tianqi is a Ph.D. student working on large-scale machine learning. He is the creator of the project.
|
||||||
|
* [Michael Benesty](https://github.com/pommedeterresautee)
|
||||||
|
- Michael is a lawyer and data scientist in France. He is the creator of XGBoost interactive analysis module in R.
|
||||||
|
* [Yuan Tang](https://github.com/terrytangyuan), Ant Group
|
||||||
|
- Yuan is a software engineer in Ant Group. He contributed mostly in R and Python packages.
|
||||||
|
* [Nan Zhu](https://github.com/CodingCat), Uber
|
||||||
|
- Nan is a software engineer in Uber. He contributed mostly in JVM packages.
|
||||||
|
* [Jiaming Yuan](https://github.com/trivialfis)
|
||||||
|
- Jiaming contributed to the GPU algorithms. He has also introduced new abstractions to improve the quality of the C++ codebase.
|
||||||
|
* [Hyunsu Cho](http://hyunsu-cho.io/), NVIDIA
|
||||||
|
- Hyunsu is the maintainer of the XGBoost Python package. He also manages the Jenkins continuous integration system (https://xgboost-ci.net/). He is the initial author of the CPU 'hist' updater.
|
||||||
|
* [Rory Mitchell](https://github.com/RAMitchell), University of Waikato
|
||||||
|
- Rory is a Ph.D. student at University of Waikato. He is the original creator of the GPU training algorithms. He improved the CMake build system and continuous integration.
|
||||||
|
* [Hongliang Liu](https://github.com/phunterlau)
|
||||||
|
|
||||||
|
|
||||||
Committers
|
Committers
|
||||||
----------
|
----------
|
||||||
Committers are people who have made substantial contribution to the project and granted write access to the project.
|
Committers are people who have made substantial contribution to the project and granted write access to the project.
|
||||||
* [Tianqi Chen](https://github.com/tqchen), University of Washington
|
|
||||||
- Tianqi is a PhD working on large-scale machine learning, he is the creator of the project.
|
|
||||||
* [Tong He](https://github.com/hetong007), Amazon AI
|
* [Tong He](https://github.com/hetong007), Amazon AI
|
||||||
- Tong is an applied scientist in Amazon AI, he is the maintainer of xgboost R package.
|
- Tong is an applied scientist in Amazon AI. He is the maintainer of XGBoost R package.
|
||||||
* [Vadim Khotilovich](https://github.com/khotilov)
|
* [Vadim Khotilovich](https://github.com/khotilov)
|
||||||
- Vadim contributes many improvements in R and core packages.
|
- Vadim contributes many improvements in R and core packages.
|
||||||
* [Bing Xu](https://github.com/antinucleon)
|
* [Bing Xu](https://github.com/antinucleon)
|
||||||
- Bing is the original creator of xgboost python package and currently the maintainer of [XGBoost.jl](https://github.com/antinucleon/XGBoost.jl).
|
- Bing is the original creator of XGBoost Python package and currently the maintainer of [XGBoost.jl](https://github.com/antinucleon/XGBoost.jl).
|
||||||
* [Michael Benesty](https://github.com/pommedeterresautee)
|
* [Sergei Lebedev](https://github.com/superbobry), Criteo
|
||||||
- Micheal is a lawyer, data scientist in France, he is the creator of xgboost interactive analysis module in R.
|
- Sergei is a software engineer in Criteo. He contributed mostly in JVM packages.
|
||||||
* [Yuan Tang](https://github.com/terrytangyuan)
|
* [Scott Lundberg](http://scottlundberg.com/), University of Washington
|
||||||
- Yuan is a data scientist in Chicago, US. He contributed mostly in R and Python packages.
|
- Scott is a Ph.D. student at University of Washington. He is the creator of SHAP, a unified approach to explain the output of machine learning models such as decision tree ensembles. He also helps maintain the XGBoost Julia package.
|
||||||
* [Nan Zhu](https://github.com/CodingCat)
|
* [Egor Smirnov](https://github.com/SmirnovEgorRu), Intel
|
||||||
- Nan is a software engineer in Microsoft. He contributed mostly in JVM packages.
|
- Egor has led a major effort to improve the performance of XGBoost on multi-core CPUs.
|
||||||
* [Sergei Lebedev](https://github.com/superbobry)
|
|
||||||
- Serget is a software engineer in Criteo. He contributed mostly in JVM packages.
|
|
||||||
|
|
||||||
Become a Committer
|
Become a Committer
|
||||||
------------------
|
------------------
|
||||||
@@ -36,28 +55,25 @@ List of Contributors
|
|||||||
* [Full List of Contributors](https://github.com/dmlc/xgboost/graphs/contributors)
|
* [Full List of Contributors](https://github.com/dmlc/xgboost/graphs/contributors)
|
||||||
- To contributors: please add your name to the list when you submit a patch to the project:)
|
- To contributors: please add your name to the list when you submit a patch to the project:)
|
||||||
* [Kailong Chen](https://github.com/kalenhaha)
|
* [Kailong Chen](https://github.com/kalenhaha)
|
||||||
- Kailong is an early contributor of xgboost, he is creator of ranking objectives in xgboost.
|
- Kailong is an early contributor of XGBoost, he is creator of ranking objectives in XGBoost.
|
||||||
* [Skipper Seabold](https://github.com/jseabold)
|
* [Skipper Seabold](https://github.com/jseabold)
|
||||||
- Skipper is the major contributor to the scikit-learn module of xgboost.
|
- Skipper is the major contributor to the scikit-learn module of XGBoost.
|
||||||
* [Zygmunt Zając](https://github.com/zygmuntz)
|
* [Zygmunt Zając](https://github.com/zygmuntz)
|
||||||
- Zygmunt is the master behind the early stopping feature frequently used by kagglers.
|
- Zygmunt is the master behind the early stopping feature frequently used by kagglers.
|
||||||
* [Ajinkya Kale](https://github.com/ajkl)
|
* [Ajinkya Kale](https://github.com/ajkl)
|
||||||
* [Boliang Chen](https://github.com/cblsjtu)
|
* [Boliang Chen](https://github.com/cblsjtu)
|
||||||
* [Yangqing Men](https://github.com/yanqingmen)
|
* [Yangqing Men](https://github.com/yanqingmen)
|
||||||
- Yangqing is the creator of xgboost java package.
|
- Yangqing is the creator of XGBoost java package.
|
||||||
* [Engpeng Yao](https://github.com/yepyao)
|
* [Engpeng Yao](https://github.com/yepyao)
|
||||||
* [Giulio](https://github.com/giuliohome)
|
* [Giulio](https://github.com/giuliohome)
|
||||||
- Giulio is the creator of windows project of xgboost
|
- Giulio is the creator of Windows project of XGBoost
|
||||||
* [Jamie Hall](https://github.com/nerdcha)
|
* [Jamie Hall](https://github.com/nerdcha)
|
||||||
- Jamie is the initial creator of xgboost sklearn module.
|
- Jamie is the initial creator of XGBoost scikit-learn module.
|
||||||
* [Yen-Ying Lee](https://github.com/white1033)
|
* [Yen-Ying Lee](https://github.com/white1033)
|
||||||
* [Masaaki Horikoshi](https://github.com/sinhrks)
|
* [Masaaki Horikoshi](https://github.com/sinhrks)
|
||||||
- Masaaki is the initial creator of xgboost python plotting module.
|
- Masaaki is the initial creator of XGBoost Python plotting module.
|
||||||
* [Hongliang Liu](https://github.com/phunterlau)
|
|
||||||
* [Hyunsu Cho](http://hyunsu-cho.io/)
|
|
||||||
- Hyunsu is the maintainer of the XGBoost Python package. He is in charge of submitting the Python package to Python Package Index (PyPI). He is also the initial author of the CPU 'hist' updater.
|
|
||||||
* [daiyl0320](https://github.com/daiyl0320)
|
* [daiyl0320](https://github.com/daiyl0320)
|
||||||
- daiyl0320 contributed patch to xgboost distributed version more robust, and scales stably on TB scale datasets.
|
- daiyl0320 contributed patch to XGBoost distributed version more robust, and scales stably on TB scale datasets.
|
||||||
* [Huayi Zhang](https://github.com/irachex)
|
* [Huayi Zhang](https://github.com/irachex)
|
||||||
* [Johan Manders](https://github.com/johanmanders)
|
* [Johan Manders](https://github.com/johanmanders)
|
||||||
* [yoori](https://github.com/yoori)
|
* [yoori](https://github.com/yoori)
|
||||||
@@ -68,8 +84,23 @@ List of Contributors
|
|||||||
* [Alex Bain](https://github.com/convexquad)
|
* [Alex Bain](https://github.com/convexquad)
|
||||||
* [Baltazar Bieniek](https://github.com/bbieniek)
|
* [Baltazar Bieniek](https://github.com/bbieniek)
|
||||||
* [Adam Pocock](https://github.com/Craigacp)
|
* [Adam Pocock](https://github.com/Craigacp)
|
||||||
* [Rory Mitchell](https://github.com/RAMitchell)
|
|
||||||
- Rory is the author of the GPU plugin and also contributed the cmake build system and windows continuous integration
|
|
||||||
* [Gideon Whitehead](https://github.com/gaw89)
|
* [Gideon Whitehead](https://github.com/gaw89)
|
||||||
* [Yi-Lin Juang](https://github.com/frankyjuang)
|
* [Yi-Lin Juang](https://github.com/frankyjuang)
|
||||||
* [Andrew Hannigan](https://github.com/andrewhannigan)
|
* [Andrew Hannigan](https://github.com/andrewhannigan)
|
||||||
|
* [Andy Adinets](https://github.com/canonizer)
|
||||||
|
* [Henry Gouk](https://github.com/henrygouk)
|
||||||
|
* [Pierre de Sahb](https://github.com/pdesahb)
|
||||||
|
* [liuliang01](https://github.com/liuliang01)
|
||||||
|
- liuliang01 added support for the qid column for LibSVM input format. This makes ranking task easier in distributed setting.
|
||||||
|
* [Andrew Thia](https://github.com/BlueTea88)
|
||||||
|
- Andrew Thia implemented feature interaction constraints
|
||||||
|
* [Wei Tian](https://github.com/weitian)
|
||||||
|
* [Chen Qin](https://github.com/chenqin)
|
||||||
|
* [Sam Wilkinson](https://samwilkinson.io)
|
||||||
|
* [Matthew Jones](https://github.com/mt-jones)
|
||||||
|
* [Jiaxiang Li](https://github.com/JiaxiangBU)
|
||||||
|
* [Bryan Woods](https://github.com/bryan-woods)
|
||||||
|
- Bryan added support for cross-validation for the ranking objective
|
||||||
|
* [Haoda Fu](https://github.com/fuhaoda)
|
||||||
|
* [Evan Kepner](https://github.com/EvanKepner)
|
||||||
|
- Evan Kepner added support for os.PathLike file paths in Python
|
||||||
|
|||||||
@@ -1,44 +0,0 @@
|
|||||||
For bugs or installation issues, please provide the following information.
|
|
||||||
The more information you provide, the more easily we will be able to offer
|
|
||||||
help and advice.
|
|
||||||
|
|
||||||
## Environment info
|
|
||||||
Operating System:
|
|
||||||
|
|
||||||
Compiler:
|
|
||||||
|
|
||||||
Package used (python/R/jvm/C++):
|
|
||||||
|
|
||||||
`xgboost` version used:
|
|
||||||
|
|
||||||
If installing from source, please provide
|
|
||||||
|
|
||||||
1. The commit hash (`git rev-parse HEAD`)
|
|
||||||
2. Logs will be helpful (If logs are large, please upload as attachment).
|
|
||||||
|
|
||||||
If you are using jvm package, please
|
|
||||||
|
|
||||||
1. add [jvm-packages] in the title to make it quickly be identified
|
|
||||||
2. the gcc version and distribution
|
|
||||||
|
|
||||||
If you are using python package, please provide
|
|
||||||
|
|
||||||
1. The python version and distribution
|
|
||||||
2. The command to install `xgboost` if you are not installing from source
|
|
||||||
|
|
||||||
If you are using R package, please provide
|
|
||||||
|
|
||||||
1. The R `sessionInfo()`
|
|
||||||
2. The command to install `xgboost` if you are not installing from source
|
|
||||||
|
|
||||||
## Steps to reproduce
|
|
||||||
|
|
||||||
1.
|
|
||||||
2.
|
|
||||||
3.
|
|
||||||
|
|
||||||
## What have you tried?
|
|
||||||
|
|
||||||
1.
|
|
||||||
2.
|
|
||||||
3.
|
|
||||||
536
Jenkinsfile
vendored
536
Jenkinsfile
vendored
@@ -3,86 +3,482 @@
|
|||||||
// Jenkins pipeline
|
// Jenkins pipeline
|
||||||
// See documents at https://jenkins.io/doc/book/pipeline/jenkinsfile/
|
// See documents at https://jenkins.io/doc/book/pipeline/jenkinsfile/
|
||||||
|
|
||||||
|
// Command to run command inside a docker container
|
||||||
|
dockerRun = 'tests/ci_build/ci_build.sh'
|
||||||
|
|
||||||
|
// Which CUDA version to use when building reference distribution wheel
|
||||||
|
ref_cuda_ver = '10.0'
|
||||||
|
|
||||||
import groovy.transform.Field
|
import groovy.transform.Field
|
||||||
|
|
||||||
/* Unrestricted tasks: tasks that do NOT generate artifacts */
|
|
||||||
|
|
||||||
// Command to run command inside a docker container
|
|
||||||
def dockerRun = 'tests/ci_build/ci_build.sh'
|
|
||||||
// Utility functions
|
|
||||||
@Field
|
@Field
|
||||||
def utils
|
def commit_id // necessary to pass a variable from one stage to another
|
||||||
|
|
||||||
def buildMatrix = [
|
|
||||||
[ "enabled": true, "os" : "linux", "withGpu": true, "withNccl": true, "withOmp": true, "pythonVersion": "2.7", "cudaVersion": "9.1" ],
|
|
||||||
[ "enabled": true, "os" : "linux", "withGpu": true, "withNccl": true, "withOmp": true, "pythonVersion": "2.7", "cudaVersion": "8.0" ],
|
|
||||||
[ "enabled": true, "os" : "linux", "withGpu": true, "withNccl": false, "withOmp": true, "pythonVersion": "2.7", "cudaVersion": "8.0" ],
|
|
||||||
]
|
|
||||||
|
|
||||||
pipeline {
|
pipeline {
|
||||||
// Each stage specify its own agent
|
// Each stage specify its own agent
|
||||||
agent none
|
agent none
|
||||||
|
|
||||||
// Setup common job properties
|
environment {
|
||||||
options {
|
DOCKER_CACHE_ECR_ID = '492475357299'
|
||||||
ansiColor('xterm')
|
DOCKER_CACHE_ECR_REGION = 'us-west-2'
|
||||||
timestamps()
|
}
|
||||||
timeout(time: 120, unit: 'MINUTES')
|
|
||||||
buildDiscarder(logRotator(numToKeepStr: '10'))
|
|
||||||
}
|
|
||||||
|
|
||||||
// Build stages
|
// Setup common job properties
|
||||||
stages {
|
options {
|
||||||
stage('Jenkins: Get sources') {
|
ansiColor('xterm')
|
||||||
agent {
|
timestamps()
|
||||||
label 'unrestricted'
|
timeout(time: 240, unit: 'MINUTES')
|
||||||
}
|
buildDiscarder(logRotator(numToKeepStr: '10'))
|
||||||
steps {
|
preserveStashes()
|
||||||
script {
|
}
|
||||||
utils = load('tests/ci_build/jenkins_tools.Groovy')
|
|
||||||
utils.checkoutSrcs()
|
// Build stages
|
||||||
}
|
stages {
|
||||||
stash name: 'srcs', excludes: '.git/'
|
stage('Jenkins Linux: Initialize') {
|
||||||
milestone label: 'Sources ready', ordinal: 1
|
agent { label 'job_initializer' }
|
||||||
}
|
steps {
|
||||||
}
|
script {
|
||||||
stage('Jenkins: Build & Test') {
|
checkoutSrcs()
|
||||||
steps {
|
commit_id = "${GIT_COMMIT}"
|
||||||
script {
|
|
||||||
parallel (buildMatrix.findAll{it['enabled']}.collectEntries{ c ->
|
|
||||||
def buildName = utils.getBuildName(c)
|
|
||||||
utils.buildFactory(buildName, c, false, this.&buildPlatformCmake)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
sh 'python3 tests/jenkins_get_approval.py'
|
||||||
|
stash name: 'srcs'
|
||||||
|
milestone ordinal: 1
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
stage('Jenkins Linux: Formatting Check') {
|
||||||
|
agent none
|
||||||
|
steps {
|
||||||
|
script {
|
||||||
|
parallel ([
|
||||||
|
'clang-tidy': { ClangTidy() },
|
||||||
|
'lint': { Lint() },
|
||||||
|
'sphinx-doc': { SphinxDoc() },
|
||||||
|
'doxygen': { Doxygen() }
|
||||||
|
])
|
||||||
|
}
|
||||||
|
milestone ordinal: 2
|
||||||
|
}
|
||||||
|
}
|
||||||
|
stage('Jenkins Linux: Build') {
|
||||||
|
agent none
|
||||||
|
steps {
|
||||||
|
script {
|
||||||
|
parallel ([
|
||||||
|
'build-cpu': { BuildCPU() },
|
||||||
|
'build-cpu-rabit-mock': { BuildCPUMock() },
|
||||||
|
'build-cpu-non-omp': { BuildCPUNonOmp() },
|
||||||
|
// Build reference, distribution-ready Python wheel with CUDA 10.0
|
||||||
|
// using CentOS 6 image
|
||||||
|
'build-gpu-cuda10.0': { BuildCUDA(cuda_version: '10.0') },
|
||||||
|
// The build-gpu-* builds below use Ubuntu image
|
||||||
|
'build-gpu-cuda10.1': { BuildCUDA(cuda_version: '10.1') },
|
||||||
|
'build-gpu-cuda10.2': { BuildCUDA(cuda_version: '10.2') },
|
||||||
|
'build-gpu-cuda11.0': { BuildCUDA(cuda_version: '11.0') },
|
||||||
|
'build-jvm-packages-gpu-cuda10.0': { BuildJVMPackagesWithCUDA(spark_version: '3.0.0', cuda_version: '10.0') },
|
||||||
|
'build-jvm-packages': { BuildJVMPackages(spark_version: '3.0.0') },
|
||||||
|
'build-jvm-doc': { BuildJVMDoc() }
|
||||||
|
])
|
||||||
|
}
|
||||||
|
milestone ordinal: 3
|
||||||
|
}
|
||||||
|
}
|
||||||
|
stage('Jenkins Linux: Test') {
|
||||||
|
agent none
|
||||||
|
steps {
|
||||||
|
script {
|
||||||
|
parallel ([
|
||||||
|
'test-python-cpu': { TestPythonCPU() },
|
||||||
|
'test-python-gpu-cuda10.2': { TestPythonGPU(host_cuda_version: '10.2') },
|
||||||
|
'test-python-gpu-cuda11.0-cross': { TestPythonGPU(artifact_cuda_version: '10.0', host_cuda_version: '11.0') },
|
||||||
|
'test-python-gpu-cuda11.0': { TestPythonGPU(artifact_cuda_version: '11.0', host_cuda_version: '11.0') },
|
||||||
|
'test-python-mgpu-cuda10.2': { TestPythonGPU(artifact_cuda_version: '10.2', host_cuda_version: '10.2', multi_gpu: true) },
|
||||||
|
'test-cpp-gpu-cuda10.2': { TestCppGPU(artifact_cuda_version: '10.2', host_cuda_version: '10.2') },
|
||||||
|
'test-cpp-gpu-cuda11.0': { TestCppGPU(artifact_cuda_version: '11.0', host_cuda_version: '11.0') },
|
||||||
|
'test-jvm-jdk8-cuda10.0': { CrossTestJVMwithJDKGPU(artifact_cuda_version: '10.0', host_cuda_version: '10.0') },
|
||||||
|
'test-jvm-jdk8': { CrossTestJVMwithJDK(jdk_version: '8', spark_version: '3.0.0') },
|
||||||
|
'test-jvm-jdk11': { CrossTestJVMwithJDK(jdk_version: '11') },
|
||||||
|
'test-jvm-jdk12': { CrossTestJVMwithJDK(jdk_version: '12') },
|
||||||
|
'test-r-3.5.3': { TestR(use_r35: true) }
|
||||||
|
])
|
||||||
|
}
|
||||||
|
milestone ordinal: 4
|
||||||
|
}
|
||||||
|
}
|
||||||
|
stage('Jenkins Linux: Deploy') {
|
||||||
|
agent none
|
||||||
|
steps {
|
||||||
|
script {
|
||||||
|
parallel ([
|
||||||
|
'deploy-jvm-packages': { DeployJVMPackages(spark_version: '3.0.0') }
|
||||||
|
])
|
||||||
|
}
|
||||||
|
milestone ordinal: 5
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
// check out source code from git
|
||||||
* Build platform and test it via cmake.
|
def checkoutSrcs() {
|
||||||
*/
|
retry(5) {
|
||||||
def buildPlatformCmake(buildName, conf, nodeReq, dockerTarget) {
|
try {
|
||||||
def opts = utils.cmakeOptions(conf)
|
timeout(time: 2, unit: 'MINUTES') {
|
||||||
// Destination dir for artifacts
|
checkout scm
|
||||||
def distDir = "dist/${buildName}"
|
sh 'git submodule update --init'
|
||||||
def dockerArgs = ""
|
}
|
||||||
if(conf["withGpu"]){
|
} catch (exc) {
|
||||||
dockerArgs = "--build-arg CUDA_VERSION=" + conf["cudaVersion"]
|
deleteDir()
|
||||||
}
|
error "Failed to fetch source codes"
|
||||||
// Build node - this is returned result
|
|
||||||
node(nodeReq) {
|
|
||||||
unstash name: 'srcs'
|
|
||||||
echo """
|
|
||||||
|===== XGBoost CMake build =====
|
|
||||||
| dockerTarget: ${dockerTarget}
|
|
||||||
| cmakeOpts : ${opts}
|
|
||||||
|=========================
|
|
||||||
""".stripMargin('|')
|
|
||||||
// Invoke command inside docker
|
|
||||||
sh """
|
|
||||||
${dockerRun} ${dockerTarget} ${dockerArgs} tests/ci_build/build_via_cmake.sh ${opts}
|
|
||||||
${dockerRun} ${dockerTarget} ${dockerArgs} tests/ci_build/test_${dockerTarget}.sh
|
|
||||||
"""
|
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
def GetCUDABuildContainerType(cuda_version) {
|
||||||
|
return (cuda_version == ref_cuda_ver) ? 'gpu_build_centos6' : 'gpu_build'
|
||||||
|
}
|
||||||
|
|
||||||
|
def ClangTidy() {
|
||||||
|
node('linux && cpu_build') {
|
||||||
|
unstash name: 'srcs'
|
||||||
|
echo "Running clang-tidy job..."
|
||||||
|
def container_type = "clang_tidy"
|
||||||
|
def docker_binary = "docker"
|
||||||
|
def dockerArgs = "--build-arg CUDA_VERSION=10.1"
|
||||||
|
sh """
|
||||||
|
${dockerRun} ${container_type} ${docker_binary} ${dockerArgs} python3 tests/ci_build/tidy.py
|
||||||
|
"""
|
||||||
|
deleteDir()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
def Lint() {
|
||||||
|
node('linux && cpu') {
|
||||||
|
unstash name: 'srcs'
|
||||||
|
echo "Running lint..."
|
||||||
|
def container_type = "cpu"
|
||||||
|
def docker_binary = "docker"
|
||||||
|
sh """
|
||||||
|
${dockerRun} ${container_type} ${docker_binary} bash -c "source activate cpu_test && make lint"
|
||||||
|
"""
|
||||||
|
deleteDir()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
def SphinxDoc() {
|
||||||
|
node('linux && cpu') {
|
||||||
|
unstash name: 'srcs'
|
||||||
|
echo "Running sphinx-doc..."
|
||||||
|
def container_type = "cpu"
|
||||||
|
def docker_binary = "docker"
|
||||||
|
def docker_extra_params = "CI_DOCKER_EXTRA_PARAMS_INIT='-e SPHINX_GIT_BRANCH=${BRANCH_NAME}'"
|
||||||
|
sh """#!/bin/bash
|
||||||
|
${docker_extra_params} ${dockerRun} ${container_type} ${docker_binary} bash -c "source activate cpu_test && make -C doc html"
|
||||||
|
"""
|
||||||
|
deleteDir()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
def Doxygen() {
|
||||||
|
node('linux && cpu') {
|
||||||
|
unstash name: 'srcs'
|
||||||
|
echo "Running doxygen..."
|
||||||
|
def container_type = "cpu"
|
||||||
|
def docker_binary = "docker"
|
||||||
|
sh """
|
||||||
|
${dockerRun} ${container_type} ${docker_binary} tests/ci_build/doxygen.sh ${BRANCH_NAME}
|
||||||
|
"""
|
||||||
|
if (env.BRANCH_NAME == 'master' || env.BRANCH_NAME.startsWith('release')) {
|
||||||
|
echo 'Uploading doc...'
|
||||||
|
s3Upload file: "build/${BRANCH_NAME}.tar.bz2", bucket: 'xgboost-docs', acl: 'PublicRead', path: "doxygen/${BRANCH_NAME}.tar.bz2"
|
||||||
|
}
|
||||||
|
deleteDir()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
def BuildCPU() {
|
||||||
|
node('linux && cpu') {
|
||||||
|
unstash name: 'srcs'
|
||||||
|
echo "Build CPU"
|
||||||
|
def container_type = "cpu"
|
||||||
|
def docker_binary = "docker"
|
||||||
|
sh """
|
||||||
|
${dockerRun} ${container_type} ${docker_binary} rm -fv dmlc-core/include/dmlc/build_config_default.h
|
||||||
|
# This step is not necessary, but here we include it, to ensure that DMLC_CORE_USE_CMAKE flag is correctly propagated
|
||||||
|
# We want to make sure that we use the configured header build/dmlc/build_config.h instead of include/dmlc/build_config_default.h.
|
||||||
|
# See discussion at https://github.com/dmlc/xgboost/issues/5510
|
||||||
|
${dockerRun} ${container_type} ${docker_binary} tests/ci_build/build_via_cmake.sh -DPLUGIN_LZ4=ON -DPLUGIN_DENSE_PARSER=ON
|
||||||
|
${dockerRun} ${container_type} ${docker_binary} build/testxgboost
|
||||||
|
"""
|
||||||
|
// Sanitizer test
|
||||||
|
def docker_extra_params = "CI_DOCKER_EXTRA_PARAMS_INIT='-e ASAN_SYMBOLIZER_PATH=/usr/bin/llvm-symbolizer -e ASAN_OPTIONS=symbolize=1 -e UBSAN_OPTIONS=print_stacktrace=1:log_path=ubsan_error.log --cap-add SYS_PTRACE'"
|
||||||
|
sh """
|
||||||
|
${dockerRun} ${container_type} ${docker_binary} tests/ci_build/build_via_cmake.sh -DUSE_SANITIZER=ON -DENABLED_SANITIZERS="address;leak;undefined" \
|
||||||
|
-DCMAKE_BUILD_TYPE=Debug -DSANITIZER_PATH=/usr/lib/x86_64-linux-gnu/
|
||||||
|
${docker_extra_params} ${dockerRun} ${container_type} ${docker_binary} build/testxgboost
|
||||||
|
"""
|
||||||
|
|
||||||
|
stash name: 'xgboost_cli', includes: 'xgboost'
|
||||||
|
deleteDir()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
def BuildCPUMock() {
|
||||||
|
node('linux && cpu') {
|
||||||
|
unstash name: 'srcs'
|
||||||
|
echo "Build CPU with rabit mock"
|
||||||
|
def container_type = "cpu"
|
||||||
|
def docker_binary = "docker"
|
||||||
|
sh """
|
||||||
|
${dockerRun} ${container_type} ${docker_binary} tests/ci_build/build_mock_cmake.sh
|
||||||
|
"""
|
||||||
|
echo 'Stashing rabit C++ test executable (xgboost)...'
|
||||||
|
stash name: 'xgboost_rabit_tests', includes: 'xgboost'
|
||||||
|
deleteDir()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
def BuildCPUNonOmp() {
|
||||||
|
node('linux && cpu') {
|
||||||
|
unstash name: 'srcs'
|
||||||
|
echo "Build CPU without OpenMP"
|
||||||
|
def container_type = "cpu"
|
||||||
|
def docker_binary = "docker"
|
||||||
|
sh """
|
||||||
|
${dockerRun} ${container_type} ${docker_binary} tests/ci_build/build_via_cmake.sh -DUSE_OPENMP=OFF
|
||||||
|
"""
|
||||||
|
echo "Running Non-OpenMP C++ test..."
|
||||||
|
sh """
|
||||||
|
${dockerRun} ${container_type} ${docker_binary} build/testxgboost
|
||||||
|
"""
|
||||||
|
deleteDir()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
def BuildCUDA(args) {
|
||||||
|
node('linux && cpu_build') {
|
||||||
|
unstash name: 'srcs'
|
||||||
|
echo "Build with CUDA ${args.cuda_version}"
|
||||||
|
def container_type = GetCUDABuildContainerType(args.cuda_version)
|
||||||
|
def docker_binary = "docker"
|
||||||
|
def docker_args = "--build-arg CUDA_VERSION=${args.cuda_version}"
|
||||||
|
def arch_flag = ""
|
||||||
|
if (env.BRANCH_NAME != 'master' && !(env.BRANCH_NAME.startsWith('release'))) {
|
||||||
|
arch_flag = "-DGPU_COMPUTE_VER=75"
|
||||||
|
}
|
||||||
|
sh """
|
||||||
|
${dockerRun} ${container_type} ${docker_binary} ${docker_args} tests/ci_build/build_via_cmake.sh -DUSE_CUDA=ON -DUSE_NCCL=ON -DOPEN_MP:BOOL=ON -DHIDE_CXX_SYMBOLS=ON ${arch_flag}
|
||||||
|
${dockerRun} ${container_type} ${docker_binary} ${docker_args} bash -c "cd python-package && rm -rf dist/* && python setup.py bdist_wheel --universal"
|
||||||
|
${dockerRun} ${container_type} ${docker_binary} ${docker_args} python tests/ci_build/rename_whl.py python-package/dist/*.whl ${commit_id} manylinux2010_x86_64
|
||||||
|
"""
|
||||||
|
echo 'Stashing Python wheel...'
|
||||||
|
stash name: "xgboost_whl_cuda${args.cuda_version}", includes: 'python-package/dist/*.whl'
|
||||||
|
if (args.cuda_version == ref_cuda_ver && (env.BRANCH_NAME == 'master' || env.BRANCH_NAME.startsWith('release'))) {
|
||||||
|
echo 'Uploading Python wheel...'
|
||||||
|
path = ("${BRANCH_NAME}" == 'master') ? '' : "${BRANCH_NAME}/"
|
||||||
|
s3Upload bucket: 'xgboost-nightly-builds', path: path, acl: 'PublicRead', workingDir: 'python-package/dist', includePathPattern:'**/*.whl'
|
||||||
|
}
|
||||||
|
echo 'Stashing C++ test executable (testxgboost)...'
|
||||||
|
stash name: "xgboost_cpp_tests_cuda${args.cuda_version}", includes: 'build/testxgboost'
|
||||||
|
deleteDir()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
def BuildJVMPackagesWithCUDA(args) {
|
||||||
|
node('linux && mgpu') {
|
||||||
|
unstash name: 'srcs'
|
||||||
|
echo "Build XGBoost4J-Spark with Spark ${args.spark_version}, CUDA ${args.cuda_version}"
|
||||||
|
def container_type = "jvm_gpu_build"
|
||||||
|
def docker_binary = "nvidia-docker"
|
||||||
|
def docker_args = "--build-arg CUDA_VERSION=${args.cuda_version}"
|
||||||
|
def arch_flag = ""
|
||||||
|
if (env.BRANCH_NAME != 'master' && !(env.BRANCH_NAME.startsWith('release'))) {
|
||||||
|
arch_flag = "-DGPU_COMPUTE_VER=75"
|
||||||
|
}
|
||||||
|
// Use only 4 CPU cores
|
||||||
|
def docker_extra_params = "CI_DOCKER_EXTRA_PARAMS_INIT='--cpuset-cpus 0-3'"
|
||||||
|
sh """
|
||||||
|
${docker_extra_params} ${dockerRun} ${container_type} ${docker_binary} ${docker_args} tests/ci_build/build_jvm_packages.sh ${args.spark_version} -Duse.cuda=ON $arch_flag
|
||||||
|
"""
|
||||||
|
echo "Stashing XGBoost4J JAR with CUDA ${args.cuda_version} ..."
|
||||||
|
stash name: 'xgboost4j_jar_gpu', includes: "jvm-packages/xgboost4j/target/*.jar,jvm-packages/xgboost4j-spark/target/*.jar,jvm-packages/xgboost4j-example/target/*.jar"
|
||||||
|
deleteDir()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
def BuildJVMPackages(args) {
|
||||||
|
node('linux && cpu') {
|
||||||
|
unstash name: 'srcs'
|
||||||
|
echo "Build XGBoost4J-Spark with Spark ${args.spark_version}"
|
||||||
|
def container_type = "jvm"
|
||||||
|
def docker_binary = "docker"
|
||||||
|
// Use only 4 CPU cores
|
||||||
|
def docker_extra_params = "CI_DOCKER_EXTRA_PARAMS_INIT='--cpuset-cpus 0-3'"
|
||||||
|
sh """
|
||||||
|
${docker_extra_params} ${dockerRun} ${container_type} ${docker_binary} tests/ci_build/build_jvm_packages.sh ${args.spark_version}
|
||||||
|
"""
|
||||||
|
echo 'Stashing XGBoost4J JAR...'
|
||||||
|
stash name: 'xgboost4j_jar', includes: "jvm-packages/xgboost4j/target/*.jar,jvm-packages/xgboost4j-spark/target/*.jar,jvm-packages/xgboost4j-example/target/*.jar"
|
||||||
|
deleteDir()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
def BuildJVMDoc() {
|
||||||
|
node('linux && cpu') {
|
||||||
|
unstash name: 'srcs'
|
||||||
|
echo "Building JVM doc..."
|
||||||
|
def container_type = "jvm"
|
||||||
|
def docker_binary = "docker"
|
||||||
|
sh """
|
||||||
|
${dockerRun} ${container_type} ${docker_binary} tests/ci_build/build_jvm_doc.sh ${BRANCH_NAME}
|
||||||
|
"""
|
||||||
|
if (env.BRANCH_NAME == 'master' || env.BRANCH_NAME.startsWith('release')) {
|
||||||
|
echo 'Uploading doc...'
|
||||||
|
s3Upload file: "jvm-packages/${BRANCH_NAME}.tar.bz2", bucket: 'xgboost-docs', acl: 'PublicRead', path: "${BRANCH_NAME}.tar.bz2"
|
||||||
|
}
|
||||||
|
deleteDir()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
def TestPythonCPU() {
|
||||||
|
node('linux && cpu') {
|
||||||
|
unstash name: "xgboost_whl_cuda${ref_cuda_ver}"
|
||||||
|
unstash name: 'srcs'
|
||||||
|
unstash name: 'xgboost_cli'
|
||||||
|
echo "Test Python CPU"
|
||||||
|
def container_type = "cpu"
|
||||||
|
def docker_binary = "docker"
|
||||||
|
sh """
|
||||||
|
${dockerRun} ${container_type} ${docker_binary} tests/ci_build/test_python.sh cpu
|
||||||
|
"""
|
||||||
|
deleteDir()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
def TestPythonGPU(args) {
|
||||||
|
def nodeReq = (args.multi_gpu) ? 'linux && mgpu' : 'linux && gpu'
|
||||||
|
def artifact_cuda_version = (args.artifact_cuda_version) ?: ref_cuda_ver
|
||||||
|
node(nodeReq) {
|
||||||
|
unstash name: "xgboost_whl_cuda${artifact_cuda_version}"
|
||||||
|
unstash name: "xgboost_cpp_tests_cuda${artifact_cuda_version}"
|
||||||
|
unstash name: 'srcs'
|
||||||
|
echo "Test Python GPU: CUDA ${args.host_cuda_version}"
|
||||||
|
def container_type = "gpu"
|
||||||
|
def docker_binary = "nvidia-docker"
|
||||||
|
def docker_args = "--build-arg CUDA_VERSION=${args.host_cuda_version}"
|
||||||
|
if (args.multi_gpu) {
|
||||||
|
echo "Using multiple GPUs"
|
||||||
|
// Allocate extra space in /dev/shm to enable NCCL
|
||||||
|
def docker_extra_params = "CI_DOCKER_EXTRA_PARAMS_INIT='--shm-size=4g'"
|
||||||
|
sh """
|
||||||
|
${docker_extra_params} ${dockerRun} ${container_type} ${docker_binary} ${docker_args} tests/ci_build/test_python.sh mgpu
|
||||||
|
"""
|
||||||
|
} else {
|
||||||
|
echo "Using a single GPU"
|
||||||
|
sh """
|
||||||
|
${dockerRun} ${container_type} ${docker_binary} ${docker_args} tests/ci_build/test_python.sh gpu
|
||||||
|
"""
|
||||||
|
}
|
||||||
|
deleteDir()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
def TestCppRabit() {
|
||||||
|
node(nodeReq) {
|
||||||
|
unstash name: 'xgboost_rabit_tests'
|
||||||
|
unstash name: 'srcs'
|
||||||
|
echo "Test C++, rabit mock on"
|
||||||
|
def container_type = "cpu"
|
||||||
|
def docker_binary = "docker"
|
||||||
|
sh """
|
||||||
|
${dockerRun} ${container_type} ${docker_binary} tests/ci_build/runxgb.sh xgboost tests/ci_build/approx.conf.in
|
||||||
|
"""
|
||||||
|
deleteDir()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
def TestCppGPU(args) {
|
||||||
|
def nodeReq = 'linux && mgpu'
|
||||||
|
def artifact_cuda_version = (args.artifact_cuda_version) ?: ref_cuda_ver
|
||||||
|
node(nodeReq) {
|
||||||
|
unstash name: "xgboost_cpp_tests_cuda${artifact_cuda_version}"
|
||||||
|
unstash name: 'srcs'
|
||||||
|
echo "Test C++, CUDA ${args.host_cuda_version}"
|
||||||
|
def container_type = "gpu"
|
||||||
|
def docker_binary = "nvidia-docker"
|
||||||
|
def docker_args = "--build-arg CUDA_VERSION=${args.host_cuda_version}"
|
||||||
|
sh "${dockerRun} ${container_type} ${docker_binary} ${docker_args} build/testxgboost"
|
||||||
|
deleteDir()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
def CrossTestJVMwithJDKGPU(args) {
|
||||||
|
def nodeReq = 'linux && mgpu'
|
||||||
|
node(nodeReq) {
|
||||||
|
unstash name: "xgboost4j_jar_gpu"
|
||||||
|
unstash name: 'srcs'
|
||||||
|
if (args.spark_version != null) {
|
||||||
|
echo "Test XGBoost4J on a machine with JDK ${args.jdk_version}, Spark ${args.spark_version}, CUDA ${args.host_cuda_version}"
|
||||||
|
} else {
|
||||||
|
echo "Test XGBoost4J on a machine with JDK ${args.jdk_version}, CUDA ${args.host_cuda_version}"
|
||||||
|
}
|
||||||
|
def container_type = "gpu_jvm"
|
||||||
|
def docker_binary = "nvidia-docker"
|
||||||
|
def docker_args = "--build-arg CUDA_VERSION=${args.host_cuda_version}"
|
||||||
|
sh "${dockerRun} ${container_type} ${docker_binary} ${docker_args} tests/ci_build/test_jvm_gpu_cross.sh"
|
||||||
|
deleteDir()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
def CrossTestJVMwithJDK(args) {
|
||||||
|
node('linux && cpu') {
|
||||||
|
unstash name: 'xgboost4j_jar'
|
||||||
|
unstash name: 'srcs'
|
||||||
|
if (args.spark_version != null) {
|
||||||
|
echo "Test XGBoost4J on a machine with JDK ${args.jdk_version}, Spark ${args.spark_version}"
|
||||||
|
} else {
|
||||||
|
echo "Test XGBoost4J on a machine with JDK ${args.jdk_version}"
|
||||||
|
}
|
||||||
|
def container_type = "jvm_cross"
|
||||||
|
def docker_binary = "docker"
|
||||||
|
def spark_arg = (args.spark_version != null) ? "--build-arg SPARK_VERSION=${args.spark_version}" : ""
|
||||||
|
def docker_args = "--build-arg JDK_VERSION=${args.jdk_version} ${spark_arg}"
|
||||||
|
// Run integration tests only when spark_version is given
|
||||||
|
def docker_extra_params = (args.spark_version != null) ? "CI_DOCKER_EXTRA_PARAMS_INIT='-e RUN_INTEGRATION_TEST=1'" : ""
|
||||||
|
sh """
|
||||||
|
${docker_extra_params} ${dockerRun} ${container_type} ${docker_binary} ${docker_args} tests/ci_build/test_jvm_cross.sh
|
||||||
|
"""
|
||||||
|
deleteDir()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
def TestR(args) {
|
||||||
|
node('linux && cpu') {
|
||||||
|
unstash name: 'srcs'
|
||||||
|
echo "Test R package"
|
||||||
|
def container_type = "rproject"
|
||||||
|
def docker_binary = "docker"
|
||||||
|
def use_r35_flag = (args.use_r35) ? "1" : "0"
|
||||||
|
def docker_args = "--build-arg USE_R35=${use_r35_flag}"
|
||||||
|
sh """
|
||||||
|
${dockerRun} ${container_type} ${docker_binary} ${docker_args} tests/ci_build/build_test_rpkg.sh || tests/ci_build/print_r_stacktrace.sh
|
||||||
|
"""
|
||||||
|
deleteDir()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
def DeployJVMPackages(args) {
|
||||||
|
node('linux && cpu') {
|
||||||
|
unstash name: 'srcs'
|
||||||
|
if (env.BRANCH_NAME == 'master' || env.BRANCH_NAME.startsWith('release')) {
|
||||||
|
echo 'Deploying to xgboost-maven-repo S3 repo...'
|
||||||
|
sh """
|
||||||
|
${dockerRun} jvm docker tests/ci_build/deploy_jvm_packages.sh ${args.spark_version} 0
|
||||||
|
"""
|
||||||
|
sh """
|
||||||
|
${dockerRun} jvm_gpu_build docker --build-arg CUDA_VERSION=10.0 tests/ci_build/deploy_jvm_packages.sh ${args.spark_version} 1
|
||||||
|
"""
|
||||||
|
}
|
||||||
|
deleteDir()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,121 +0,0 @@
|
|||||||
#!/usr/bin/groovy
|
|
||||||
// -*- mode: groovy -*-
|
|
||||||
// Jenkins pipeline
|
|
||||||
// See documents at https://jenkins.io/doc/book/pipeline/jenkinsfile/
|
|
||||||
|
|
||||||
import groovy.transform.Field
|
|
||||||
|
|
||||||
/* Restricted tasks: tasks generating artifacts, such as binary wheels and
|
|
||||||
documentation */
|
|
||||||
|
|
||||||
// Command to run command inside a docker container
|
|
||||||
def dockerRun = 'tests/ci_build/ci_build.sh'
|
|
||||||
// Utility functions
|
|
||||||
@Field
|
|
||||||
def utils
|
|
||||||
|
|
||||||
def buildMatrix = [
|
|
||||||
[ "enabled": true, "os" : "linux", "withGpu": true, "withNccl": true, "withOmp": true, "pythonVersion": "2.7", "cudaVersion": "9.2" ],
|
|
||||||
[ "enabled": true, "os" : "linux", "withGpu": true, "withNccl": true, "withOmp": true, "pythonVersion": "2.7", "cudaVersion": "8.0" ],
|
|
||||||
[ "enabled": true, "os" : "linux", "withGpu": true, "withNccl": false, "withOmp": true, "pythonVersion": "2.7", "cudaVersion": "8.0" ],
|
|
||||||
]
|
|
||||||
|
|
||||||
pipeline {
|
|
||||||
// Each stage specify its own agent
|
|
||||||
agent none
|
|
||||||
|
|
||||||
// Setup common job properties
|
|
||||||
options {
|
|
||||||
ansiColor('xterm')
|
|
||||||
timestamps()
|
|
||||||
timeout(time: 120, unit: 'MINUTES')
|
|
||||||
buildDiscarder(logRotator(numToKeepStr: '10'))
|
|
||||||
}
|
|
||||||
|
|
||||||
// Build stages
|
|
||||||
stages {
|
|
||||||
stage('Jenkins: Get sources') {
|
|
||||||
agent {
|
|
||||||
label 'restricted'
|
|
||||||
}
|
|
||||||
steps {
|
|
||||||
script {
|
|
||||||
utils = load('tests/ci_build/jenkins_tools.Groovy')
|
|
||||||
utils.checkoutSrcs()
|
|
||||||
}
|
|
||||||
stash name: 'srcs', excludes: '.git/'
|
|
||||||
milestone label: 'Sources ready', ordinal: 1
|
|
||||||
}
|
|
||||||
}
|
|
||||||
stage('Jenkins: Build doc') {
|
|
||||||
agent {
|
|
||||||
label 'linux && cpu && restricted'
|
|
||||||
}
|
|
||||||
steps {
|
|
||||||
unstash name: 'srcs'
|
|
||||||
script {
|
|
||||||
def commit_id = "${GIT_COMMIT}"
|
|
||||||
def branch_name = "${GIT_LOCAL_BRANCH}"
|
|
||||||
echo 'Building doc...'
|
|
||||||
dir ('jvm-packages') {
|
|
||||||
sh "bash ./build_doc.sh ${commit_id}"
|
|
||||||
archiveArtifacts artifacts: "${commit_id}.tar.bz2", allowEmptyArchive: true
|
|
||||||
echo 'Deploying doc...'
|
|
||||||
withAWS(credentials:'xgboost-doc-bucket') {
|
|
||||||
s3Upload file: "${commit_id}.tar.bz2", bucket: 'xgboost-docs', acl: 'PublicRead', path: "${branch_name}.tar.bz2"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
stage('Jenkins: Build artifacts') {
|
|
||||||
steps {
|
|
||||||
script {
|
|
||||||
parallel (buildMatrix.findAll{it['enabled']}.collectEntries{ c ->
|
|
||||||
def buildName = utils.getBuildName(c)
|
|
||||||
utils.buildFactory(buildName, c, true, this.&buildPlatformCmake)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Build platform and test it via cmake.
|
|
||||||
*/
|
|
||||||
def buildPlatformCmake(buildName, conf, nodeReq, dockerTarget) {
|
|
||||||
def opts = utils.cmakeOptions(conf)
|
|
||||||
// Destination dir for artifacts
|
|
||||||
def distDir = "dist/${buildName}"
|
|
||||||
def dockerArgs = ""
|
|
||||||
if(conf["withGpu"]){
|
|
||||||
dockerArgs = "--build-arg CUDA_VERSION=" + conf["cudaVersion"]
|
|
||||||
}
|
|
||||||
// Build node - this is returned result
|
|
||||||
node(nodeReq) {
|
|
||||||
unstash name: 'srcs'
|
|
||||||
echo """
|
|
||||||
|===== XGBoost CMake build =====
|
|
||||||
| dockerTarget: ${dockerTarget}
|
|
||||||
| cmakeOpts : ${opts}
|
|
||||||
|=========================
|
|
||||||
""".stripMargin('|')
|
|
||||||
// Invoke command inside docker
|
|
||||||
sh """
|
|
||||||
${dockerRun} ${dockerTarget} ${dockerArgs} tests/ci_build/build_via_cmake.sh ${opts}
|
|
||||||
${dockerRun} ${dockerTarget} ${dockerArgs} bash -c "cd python-package; rm -f dist/*; python setup.py bdist_wheel --universal"
|
|
||||||
rm -rf "${distDir}"; mkdir -p "${distDir}/py"
|
|
||||||
cp xgboost "${distDir}"
|
|
||||||
cp -r lib "${distDir}"
|
|
||||||
cp -r python-package/dist "${distDir}/py"
|
|
||||||
# Test the wheel for compatibility on a barebones CPU container
|
|
||||||
${dockerRun} release ${dockerArgs} bash -c " \
|
|
||||||
auditwheel show xgboost-*-py2-none-any.whl
|
|
||||||
pip install --user python-package/dist/xgboost-*-none-any.whl && \
|
|
||||||
python -m nose tests/python"
|
|
||||||
"""
|
|
||||||
archiveArtifacts artifacts: "${distDir}/**/*.*", allowEmptyArchive: true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
143
Jenkinsfile-win64
Normal file
143
Jenkinsfile-win64
Normal file
@@ -0,0 +1,143 @@
|
|||||||
|
#!/usr/bin/groovy
|
||||||
|
// -*- mode: groovy -*-
|
||||||
|
|
||||||
|
/* Jenkins pipeline for Windows AMD64 target */
|
||||||
|
|
||||||
|
import groovy.transform.Field
|
||||||
|
|
||||||
|
@Field
|
||||||
|
def commit_id // necessary to pass a variable from one stage to another
|
||||||
|
|
||||||
|
pipeline {
|
||||||
|
agent none
|
||||||
|
|
||||||
|
// Setup common job properties
|
||||||
|
options {
|
||||||
|
timestamps()
|
||||||
|
timeout(time: 240, unit: 'MINUTES')
|
||||||
|
buildDiscarder(logRotator(numToKeepStr: '10'))
|
||||||
|
preserveStashes()
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build stages
|
||||||
|
stages {
|
||||||
|
stage('Jenkins Win64: Initialize') {
|
||||||
|
agent { label 'job_initializer' }
|
||||||
|
steps {
|
||||||
|
script {
|
||||||
|
checkoutSrcs()
|
||||||
|
commit_id = "${GIT_COMMIT}"
|
||||||
|
}
|
||||||
|
sh 'python3 tests/jenkins_get_approval.py'
|
||||||
|
stash name: 'srcs'
|
||||||
|
milestone ordinal: 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
stage('Jenkins Win64: Build') {
|
||||||
|
agent none
|
||||||
|
steps {
|
||||||
|
script {
|
||||||
|
parallel ([
|
||||||
|
'build-win64-cuda10.1': { BuildWin64() }
|
||||||
|
])
|
||||||
|
}
|
||||||
|
milestone ordinal: 2
|
||||||
|
}
|
||||||
|
}
|
||||||
|
stage('Jenkins Win64: Test') {
|
||||||
|
agent none
|
||||||
|
steps {
|
||||||
|
script {
|
||||||
|
parallel ([
|
||||||
|
'test-win64-cuda10.1': { TestWin64() },
|
||||||
|
])
|
||||||
|
}
|
||||||
|
milestone ordinal: 3
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// check out source code from git
|
||||||
|
def checkoutSrcs() {
|
||||||
|
retry(5) {
|
||||||
|
try {
|
||||||
|
timeout(time: 2, unit: 'MINUTES') {
|
||||||
|
checkout scm
|
||||||
|
sh 'git submodule update --init'
|
||||||
|
}
|
||||||
|
} catch (exc) {
|
||||||
|
deleteDir()
|
||||||
|
error "Failed to fetch source codes"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
def BuildWin64() {
|
||||||
|
node('win64 && cuda10_unified') {
|
||||||
|
unstash name: 'srcs'
|
||||||
|
echo "Building XGBoost for Windows AMD64 target..."
|
||||||
|
bat "nvcc --version"
|
||||||
|
def arch_flag = ""
|
||||||
|
if (env.BRANCH_NAME != 'master' && !(env.BRANCH_NAME.startsWith('release'))) {
|
||||||
|
arch_flag = "-DGPU_COMPUTE_VER=75"
|
||||||
|
}
|
||||||
|
bat """
|
||||||
|
mkdir build
|
||||||
|
cd build
|
||||||
|
cmake .. -G"Visual Studio 15 2017 Win64" -DUSE_CUDA=ON -DCMAKE_VERBOSE_MAKEFILE=ON -DGOOGLE_TEST=ON -DUSE_DMLC_GTEST=ON ${arch_flag}
|
||||||
|
"""
|
||||||
|
bat """
|
||||||
|
cd build
|
||||||
|
"C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\Community\\MSBuild\\15.0\\Bin\\MSBuild.exe" xgboost.sln /m /p:Configuration=Release /nodeReuse:false
|
||||||
|
"""
|
||||||
|
bat """
|
||||||
|
cd python-package
|
||||||
|
conda activate && python setup.py bdist_wheel --universal && for /R %%i in (dist\\*.whl) DO python ../tests/ci_build/rename_whl.py "%%i" ${commit_id} win_amd64
|
||||||
|
"""
|
||||||
|
echo "Insert vcomp140.dll (OpenMP runtime) into the wheel..."
|
||||||
|
bat """
|
||||||
|
cd python-package\\dist
|
||||||
|
COPY /B ..\\..\\tests\\ci_build\\insert_vcomp140.py
|
||||||
|
conda activate && python insert_vcomp140.py *.whl
|
||||||
|
"""
|
||||||
|
echo 'Stashing Python wheel...'
|
||||||
|
stash name: 'xgboost_whl', includes: 'python-package/dist/*.whl'
|
||||||
|
if (env.BRANCH_NAME == 'master' || env.BRANCH_NAME.startsWith('release')) {
|
||||||
|
echo 'Uploading Python wheel...'
|
||||||
|
path = ("${BRANCH_NAME}" == 'master') ? '' : "${BRANCH_NAME}/"
|
||||||
|
s3Upload bucket: 'xgboost-nightly-builds', path: path, acl: 'PublicRead', workingDir: 'python-package/dist', includePathPattern:'**/*.whl'
|
||||||
|
}
|
||||||
|
echo 'Stashing C++ test executable (testxgboost)...'
|
||||||
|
stash name: 'xgboost_cpp_tests', includes: 'build/testxgboost.exe'
|
||||||
|
stash name: 'xgboost_cli', includes: 'xgboost.exe'
|
||||||
|
deleteDir()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
def TestWin64() {
|
||||||
|
node('win64 && cuda10_unified') {
|
||||||
|
unstash name: 'srcs'
|
||||||
|
unstash name: 'xgboost_whl'
|
||||||
|
unstash name: 'xgboost_cli'
|
||||||
|
unstash name: 'xgboost_cpp_tests'
|
||||||
|
echo "Test Win64"
|
||||||
|
bat "nvcc --version"
|
||||||
|
echo "Running C++ tests..."
|
||||||
|
bat "build\\testxgboost.exe"
|
||||||
|
echo "Installing Python dependencies..."
|
||||||
|
def env_name = 'win64_' + UUID.randomUUID().toString().replaceAll('-', '')
|
||||||
|
bat "conda env create -n ${env_name} --file=tests/ci_build/conda_env/win64_test.yml"
|
||||||
|
echo "Installing Python wheel..."
|
||||||
|
bat """
|
||||||
|
conda activate ${env_name} && for /R %%i in (python-package\\dist\\*.whl) DO python -m pip install "%%i"
|
||||||
|
"""
|
||||||
|
echo "Running Python tests..."
|
||||||
|
bat "conda activate ${env_name} && python -m pytest -v -s -rxXs --fulltrace tests\\python"
|
||||||
|
bat """
|
||||||
|
conda activate ${env_name} && python -m pytest -v -s -rxXs --fulltrace -m "(not slow) and (not mgpu)" tests\\python-gpu
|
||||||
|
"""
|
||||||
|
bat "conda env remove --name ${env_name}"
|
||||||
|
deleteDir()
|
||||||
|
}
|
||||||
|
}
|
||||||
208
LICENSE
208
LICENSE
@@ -1,13 +1,201 @@
|
|||||||
Copyright (c) 2016 by Contributors
|
Apache License
|
||||||
|
Version 2.0, January 2004
|
||||||
|
http://www.apache.org/licenses/
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||||
you may not use this file except in compliance with the License.
|
|
||||||
You may obtain a copy of the License at
|
|
||||||
|
|
||||||
http://www.apache.org/licenses/LICENSE-2.0
|
1. Definitions.
|
||||||
|
|
||||||
Unless required by applicable law or agreed to in writing, software
|
"License" shall mean the terms and conditions for use, reproduction,
|
||||||
distributed under the License is distributed on an "AS IS" BASIS,
|
and distribution as defined by Sections 1 through 9 of this document.
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
See the License for the specific language governing permissions and
|
"Licensor" shall mean the copyright owner or entity authorized by
|
||||||
limitations under the License.
|
the copyright owner that is granting the License.
|
||||||
|
|
||||||
|
"Legal Entity" shall mean the union of the acting entity and all
|
||||||
|
other entities that control, are controlled by, or are under common
|
||||||
|
control with that entity. For the purposes of this definition,
|
||||||
|
"control" means (i) the power, direct or indirect, to cause the
|
||||||
|
direction or management of such entity, whether by contract or
|
||||||
|
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||||
|
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||||
|
|
||||||
|
"You" (or "Your") shall mean an individual or Legal Entity
|
||||||
|
exercising permissions granted by this License.
|
||||||
|
|
||||||
|
"Source" form shall mean the preferred form for making modifications,
|
||||||
|
including but not limited to software source code, documentation
|
||||||
|
source, and configuration files.
|
||||||
|
|
||||||
|
"Object" form shall mean any form resulting from mechanical
|
||||||
|
transformation or translation of a Source form, including but
|
||||||
|
not limited to compiled object code, generated documentation,
|
||||||
|
and conversions to other media types.
|
||||||
|
|
||||||
|
"Work" shall mean the work of authorship, whether in Source or
|
||||||
|
Object form, made available under the License, as indicated by a
|
||||||
|
copyright notice that is included in or attached to the work
|
||||||
|
(an example is provided in the Appendix below).
|
||||||
|
|
||||||
|
"Derivative Works" shall mean any work, whether in Source or Object
|
||||||
|
form, that is based on (or derived from) the Work and for which the
|
||||||
|
editorial revisions, annotations, elaborations, or other modifications
|
||||||
|
represent, as a whole, an original work of authorship. For the purposes
|
||||||
|
of this License, Derivative Works shall not include works that remain
|
||||||
|
separable from, or merely link (or bind by name) to the interfaces of,
|
||||||
|
the Work and Derivative Works thereof.
|
||||||
|
|
||||||
|
"Contribution" shall mean any work of authorship, including
|
||||||
|
the original version of the Work and any modifications or additions
|
||||||
|
to that Work or Derivative Works thereof, that is intentionally
|
||||||
|
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||||
|
or by an individual or Legal Entity authorized to submit on behalf of
|
||||||
|
the copyright owner. For the purposes of this definition, "submitted"
|
||||||
|
means any form of electronic, verbal, or written communication sent
|
||||||
|
to the Licensor or its representatives, including but not limited to
|
||||||
|
communication on electronic mailing lists, source code control systems,
|
||||||
|
and issue tracking systems that are managed by, or on behalf of, the
|
||||||
|
Licensor for the purpose of discussing and improving the Work, but
|
||||||
|
excluding communication that is conspicuously marked or otherwise
|
||||||
|
designated in writing by the copyright owner as "Not a Contribution."
|
||||||
|
|
||||||
|
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||||
|
on behalf of whom a Contribution has been received by Licensor and
|
||||||
|
subsequently incorporated within the Work.
|
||||||
|
|
||||||
|
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||||
|
this License, each Contributor hereby grants to You a perpetual,
|
||||||
|
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||||
|
copyright license to reproduce, prepare Derivative Works of,
|
||||||
|
publicly display, publicly perform, sublicense, and distribute the
|
||||||
|
Work and such Derivative Works in Source or Object form.
|
||||||
|
|
||||||
|
3. Grant of Patent License. Subject to the terms and conditions of
|
||||||
|
this License, each Contributor hereby grants to You a perpetual,
|
||||||
|
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||||
|
(except as stated in this section) patent license to make, have made,
|
||||||
|
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||||
|
where such license applies only to those patent claims licensable
|
||||||
|
by such Contributor that are necessarily infringed by their
|
||||||
|
Contribution(s) alone or by combination of their Contribution(s)
|
||||||
|
with the Work to which such Contribution(s) was submitted. If You
|
||||||
|
institute patent litigation against any entity (including a
|
||||||
|
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||||
|
or a Contribution incorporated within the Work constitutes direct
|
||||||
|
or contributory patent infringement, then any patent licenses
|
||||||
|
granted to You under this License for that Work shall terminate
|
||||||
|
as of the date such litigation is filed.
|
||||||
|
|
||||||
|
4. Redistribution. You may reproduce and distribute copies of the
|
||||||
|
Work or Derivative Works thereof in any medium, with or without
|
||||||
|
modifications, and in Source or Object form, provided that You
|
||||||
|
meet the following conditions:
|
||||||
|
|
||||||
|
(a) You must give any other recipients of the Work or
|
||||||
|
Derivative Works a copy of this License; and
|
||||||
|
|
||||||
|
(b) You must cause any modified files to carry prominent notices
|
||||||
|
stating that You changed the files; and
|
||||||
|
|
||||||
|
(c) You must retain, in the Source form of any Derivative Works
|
||||||
|
that You distribute, all copyright, patent, trademark, and
|
||||||
|
attribution notices from the Source form of the Work,
|
||||||
|
excluding those notices that do not pertain to any part of
|
||||||
|
the Derivative Works; and
|
||||||
|
|
||||||
|
(d) If the Work includes a "NOTICE" text file as part of its
|
||||||
|
distribution, then any Derivative Works that You distribute must
|
||||||
|
include a readable copy of the attribution notices contained
|
||||||
|
within such NOTICE file, excluding those notices that do not
|
||||||
|
pertain to any part of the Derivative Works, in at least one
|
||||||
|
of the following places: within a NOTICE text file distributed
|
||||||
|
as part of the Derivative Works; within the Source form or
|
||||||
|
documentation, if provided along with the Derivative Works; or,
|
||||||
|
within a display generated by the Derivative Works, if and
|
||||||
|
wherever such third-party notices normally appear. The contents
|
||||||
|
of the NOTICE file are for informational purposes only and
|
||||||
|
do not modify the License. You may add Your own attribution
|
||||||
|
notices within Derivative Works that You distribute, alongside
|
||||||
|
or as an addendum to the NOTICE text from the Work, provided
|
||||||
|
that such additional attribution notices cannot be construed
|
||||||
|
as modifying the License.
|
||||||
|
|
||||||
|
You may add Your own copyright statement to Your modifications and
|
||||||
|
may provide additional or different license terms and conditions
|
||||||
|
for use, reproduction, or distribution of Your modifications, or
|
||||||
|
for any such Derivative Works as a whole, provided Your use,
|
||||||
|
reproduction, and distribution of the Work otherwise complies with
|
||||||
|
the conditions stated in this License.
|
||||||
|
|
||||||
|
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||||
|
any Contribution intentionally submitted for inclusion in the Work
|
||||||
|
by You to the Licensor shall be under the terms and conditions of
|
||||||
|
this License, without any additional terms or conditions.
|
||||||
|
Notwithstanding the above, nothing herein shall supersede or modify
|
||||||
|
the terms of any separate license agreement you may have executed
|
||||||
|
with Licensor regarding such Contributions.
|
||||||
|
|
||||||
|
6. Trademarks. This License does not grant permission to use the trade
|
||||||
|
names, trademarks, service marks, or product names of the Licensor,
|
||||||
|
except as required for reasonable and customary use in describing the
|
||||||
|
origin of the Work and reproducing the content of the NOTICE file.
|
||||||
|
|
||||||
|
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||||
|
agreed to in writing, Licensor provides the Work (and each
|
||||||
|
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||||
|
implied, including, without limitation, any warranties or conditions
|
||||||
|
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||||
|
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||||
|
appropriateness of using or redistributing the Work and assume any
|
||||||
|
risks associated with Your exercise of permissions under this License.
|
||||||
|
|
||||||
|
8. Limitation of Liability. In no event and under no legal theory,
|
||||||
|
whether in tort (including negligence), contract, or otherwise,
|
||||||
|
unless required by applicable law (such as deliberate and grossly
|
||||||
|
negligent acts) or agreed to in writing, shall any Contributor be
|
||||||
|
liable to You for damages, including any direct, indirect, special,
|
||||||
|
incidental, or consequential damages of any character arising as a
|
||||||
|
result of this License or out of the use or inability to use the
|
||||||
|
Work (including but not limited to damages for loss of goodwill,
|
||||||
|
work stoppage, computer failure or malfunction, or any and all
|
||||||
|
other commercial damages or losses), even if such Contributor
|
||||||
|
has been advised of the possibility of such damages.
|
||||||
|
|
||||||
|
9. Accepting Warranty or Additional Liability. While redistributing
|
||||||
|
the Work or Derivative Works thereof, You may choose to offer,
|
||||||
|
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||||
|
or other liability obligations and/or rights consistent with this
|
||||||
|
License. However, in accepting such obligations, You may act only
|
||||||
|
on Your own behalf and on Your sole responsibility, not on behalf
|
||||||
|
of any other Contributor, and only if You agree to indemnify,
|
||||||
|
defend, and hold each Contributor harmless for any liability
|
||||||
|
incurred by, or claims asserted against, such Contributor by reason
|
||||||
|
of your accepting any such warranty or additional liability.
|
||||||
|
|
||||||
|
END OF TERMS AND CONDITIONS
|
||||||
|
|
||||||
|
APPENDIX: How to apply the Apache License to your work.
|
||||||
|
|
||||||
|
To apply the Apache License to your work, attach the following
|
||||||
|
boilerplate notice, with the fields enclosed by brackets "{}"
|
||||||
|
replaced with your own identifying information. (Don't include
|
||||||
|
the brackets!) The text should be enclosed in the appropriate
|
||||||
|
comment syntax for the file format. We also recommend that a
|
||||||
|
file or class name and description of purpose be included on the
|
||||||
|
same "printed page" as the copyright notice for easier
|
||||||
|
identification within third-party archives.
|
||||||
|
|
||||||
|
Copyright (c) 2019 by Contributors
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
|
|||||||
169
Makefile
169
Makefile
@@ -1,11 +1,3 @@
|
|||||||
ifndef config
|
|
||||||
ifneq ("$(wildcard ./config.mk)","")
|
|
||||||
config = config.mk
|
|
||||||
else
|
|
||||||
config = make/config.mk
|
|
||||||
endif
|
|
||||||
endif
|
|
||||||
|
|
||||||
ifndef DMLC_CORE
|
ifndef DMLC_CORE
|
||||||
DMLC_CORE = dmlc-core
|
DMLC_CORE = dmlc-core
|
||||||
endif
|
endif
|
||||||
@@ -30,23 +22,8 @@ ifndef MAKE_OK
|
|||||||
endif
|
endif
|
||||||
$(warning MAKE [$(MAKE)] - $(if $(MAKE_OK),checked OK,PROBLEM))
|
$(warning MAKE [$(MAKE)] - $(if $(MAKE_OK),checked OK,PROBLEM))
|
||||||
|
|
||||||
ifeq ($(OS), Windows_NT)
|
|
||||||
UNAME="Windows"
|
|
||||||
else
|
|
||||||
UNAME=$(shell uname)
|
|
||||||
endif
|
|
||||||
|
|
||||||
include $(config)
|
|
||||||
ifeq ($(USE_OPENMP), 0)
|
|
||||||
export NO_OPENMP = 1
|
|
||||||
endif
|
|
||||||
include $(DMLC_CORE)/make/dmlc.mk
|
include $(DMLC_CORE)/make/dmlc.mk
|
||||||
|
|
||||||
# include the plugins
|
|
||||||
ifdef XGB_PLUGINS
|
|
||||||
include $(XGB_PLUGINS)
|
|
||||||
endif
|
|
||||||
|
|
||||||
# set compiler defaults for OSX versus *nix
|
# set compiler defaults for OSX versus *nix
|
||||||
# let people override either
|
# let people override either
|
||||||
OS := $(shell uname)
|
OS := $(shell uname)
|
||||||
@@ -67,126 +44,40 @@ export CXX = g++
|
|||||||
endif
|
endif
|
||||||
endif
|
endif
|
||||||
|
|
||||||
export LDFLAGS= -pthread -lm $(ADD_LDFLAGS) $(DMLC_LDFLAGS) $(PLUGIN_LDFLAGS)
|
export CFLAGS= -DDMLC_LOG_CUSTOMIZE=1 -std=c++14 -Wall -Wno-unknown-pragmas -Iinclude $(ADD_CFLAGS)
|
||||||
export CFLAGS= -std=c++11 -Wall -Wno-unknown-pragmas -Iinclude $(ADD_CFLAGS) $(PLUGIN_CFLAGS)
|
|
||||||
CFLAGS += -I$(DMLC_CORE)/include -I$(RABIT)/include -I$(GTEST_PATH)/include
|
CFLAGS += -I$(DMLC_CORE)/include -I$(RABIT)/include -I$(GTEST_PATH)/include
|
||||||
#java include path
|
|
||||||
export JAVAINCFLAGS = -I${JAVA_HOME}/include -I./java
|
|
||||||
|
|
||||||
ifeq ($(TEST_COVER), 1)
|
ifeq ($(TEST_COVER), 1)
|
||||||
CFLAGS += -g -O0 -fprofile-arcs -ftest-coverage
|
CFLAGS += -g -O0 -fprofile-arcs -ftest-coverage
|
||||||
else
|
else
|
||||||
CFLAGS += -O3 -funroll-loops
|
CFLAGS += -O3 -funroll-loops
|
||||||
ifeq ($(USE_SSE), 1)
|
|
||||||
CFLAGS += -msse2
|
|
||||||
endif
|
|
||||||
endif
|
endif
|
||||||
|
|
||||||
ifndef LINT_LANG
|
ifndef LINT_LANG
|
||||||
LINT_LANG= "all"
|
LINT_LANG= "all"
|
||||||
endif
|
endif
|
||||||
|
|
||||||
ifeq ($(UNAME), Windows)
|
|
||||||
XGBOOST_DYLIB = lib/xgboost.dll
|
|
||||||
JAVAINCFLAGS += -I${JAVA_HOME}/include/win32
|
|
||||||
else
|
|
||||||
ifeq ($(UNAME), Darwin)
|
|
||||||
XGBOOST_DYLIB = lib/libxgboost.dylib
|
|
||||||
CFLAGS += -fPIC
|
|
||||||
else
|
|
||||||
XGBOOST_DYLIB = lib/libxgboost.so
|
|
||||||
CFLAGS += -fPIC
|
|
||||||
endif
|
|
||||||
endif
|
|
||||||
|
|
||||||
ifeq ($(UNAME), Linux)
|
|
||||||
LDFLAGS += -lrt
|
|
||||||
JAVAINCFLAGS += -I${JAVA_HOME}/include/linux
|
|
||||||
endif
|
|
||||||
|
|
||||||
ifeq ($(UNAME), Darwin)
|
|
||||||
JAVAINCFLAGS += -I${JAVA_HOME}/include/darwin
|
|
||||||
endif
|
|
||||||
|
|
||||||
OPENMP_FLAGS =
|
|
||||||
ifeq ($(USE_OPENMP), 1)
|
|
||||||
OPENMP_FLAGS = -fopenmp
|
|
||||||
else
|
|
||||||
OPENMP_FLAGS = -DDISABLE_OPENMP
|
|
||||||
endif
|
|
||||||
CFLAGS += $(OPENMP_FLAGS)
|
|
||||||
|
|
||||||
# specify tensor path
|
# specify tensor path
|
||||||
.PHONY: clean all lint clean_all doxygen rcpplint pypack Rpack Rbuild Rcheck java pylint
|
.PHONY: clean all lint clean_all doxygen rcpplint pypack Rpack Rbuild Rcheck
|
||||||
|
|
||||||
all: lib/libxgboost.a $(XGBOOST_DYLIB) xgboost
|
|
||||||
|
|
||||||
$(DMLC_CORE)/libdmlc.a: $(wildcard $(DMLC_CORE)/src/*.cc $(DMLC_CORE)/src/*/*.cc)
|
|
||||||
+ cd $(DMLC_CORE); "$(MAKE)" libdmlc.a config=$(ROOTDIR)/$(config); cd $(ROOTDIR)
|
|
||||||
|
|
||||||
$(RABIT)/lib/$(LIB_RABIT): $(wildcard $(RABIT)/src/*.cc)
|
|
||||||
+ cd $(RABIT); "$(MAKE)" lib/$(LIB_RABIT) USE_SSE=$(USE_SSE); cd $(ROOTDIR)
|
|
||||||
|
|
||||||
jvm: jvm-packages/lib/libxgboost4j.so
|
|
||||||
|
|
||||||
SRC = $(wildcard src/*.cc src/*/*.cc)
|
|
||||||
ALL_OBJ = $(patsubst src/%.cc, build/%.o, $(SRC)) $(PLUGIN_OBJS)
|
|
||||||
AMALGA_OBJ = amalgamation/xgboost-all0.o
|
|
||||||
LIB_DEP = $(DMLC_CORE)/libdmlc.a $(RABIT)/lib/$(LIB_RABIT)
|
|
||||||
ALL_DEP = $(filter-out build/cli_main.o, $(ALL_OBJ)) $(LIB_DEP)
|
|
||||||
CLI_OBJ = build/cli_main.o
|
|
||||||
include tests/cpp/xgboost_test.mk
|
|
||||||
|
|
||||||
build/%.o: src/%.cc
|
build/%.o: src/%.cc
|
||||||
@mkdir -p $(@D)
|
@mkdir -p $(@D)
|
||||||
$(CXX) $(CFLAGS) -MM -MT build/$*.o $< >build/$*.d
|
$(CXX) $(CFLAGS) -MM -MT build/$*.o $< >build/$*.d
|
||||||
$(CXX) -c $(CFLAGS) $< -o $@
|
$(CXX) -c $(CFLAGS) $< -o $@
|
||||||
|
|
||||||
build_plugin/%.o: plugin/%.cc
|
|
||||||
@mkdir -p $(@D)
|
|
||||||
$(CXX) $(CFLAGS) -MM -MT build_plugin/$*.o $< >build_plugin/$*.d
|
|
||||||
$(CXX) -c $(CFLAGS) $< -o $@
|
|
||||||
|
|
||||||
# The should be equivalent to $(ALL_OBJ) except for build/cli_main.o
|
# The should be equivalent to $(ALL_OBJ) except for build/cli_main.o
|
||||||
amalgamation/xgboost-all0.o: amalgamation/xgboost-all0.cc
|
amalgamation/xgboost-all0.o: amalgamation/xgboost-all0.cc
|
||||||
$(CXX) -c $(CFLAGS) $< -o $@
|
$(CXX) -c $(CFLAGS) $< -o $@
|
||||||
|
|
||||||
# Equivalent to lib/libxgboost_all.so
|
|
||||||
lib/libxgboost_all.so: $(AMALGA_OBJ) $(LIB_DEP)
|
|
||||||
@mkdir -p $(@D)
|
|
||||||
$(CXX) $(CFLAGS) -shared -o $@ $(filter %.o %.a, $^) $(LDFLAGS)
|
|
||||||
|
|
||||||
lib/libxgboost.a: $(ALL_DEP)
|
|
||||||
@mkdir -p $(@D)
|
|
||||||
ar crv $@ $(filter %.o, $?)
|
|
||||||
|
|
||||||
lib/xgboost.dll lib/libxgboost.so lib/libxgboost.dylib: $(ALL_DEP)
|
|
||||||
@mkdir -p $(@D)
|
|
||||||
$(CXX) $(CFLAGS) -shared -o $@ $(filter %.o %a, $^) $(LDFLAGS)
|
|
||||||
|
|
||||||
jvm-packages/lib/libxgboost4j.so: jvm-packages/xgboost4j/src/native/xgboost4j.cpp $(ALL_DEP)
|
|
||||||
@mkdir -p $(@D)
|
|
||||||
$(CXX) $(CFLAGS) $(JAVAINCFLAGS) -shared -o $@ $(filter %.cpp %.o %.a, $^) $(LDFLAGS)
|
|
||||||
|
|
||||||
|
|
||||||
xgboost: $(CLI_OBJ) $(ALL_DEP)
|
|
||||||
$(CXX) $(CFLAGS) -o $@ $(filter %.o %.a, $^) $(LDFLAGS)
|
|
||||||
|
|
||||||
rcpplint:
|
rcpplint:
|
||||||
python2 dmlc-core/scripts/lint.py xgboost ${LINT_LANG} R-package/src
|
python3 dmlc-core/scripts/lint.py xgboost ${LINT_LANG} R-package/src
|
||||||
|
|
||||||
lint: rcpplint
|
lint: rcpplint
|
||||||
python2 dmlc-core/scripts/lint.py xgboost ${LINT_LANG} include src plugin python-package
|
python3 dmlc-core/scripts/lint.py --exclude_path python-package/xgboost/dmlc-core \
|
||||||
|
python-package/xgboost/include python-package/xgboost/lib \
|
||||||
pylint:
|
python-package/xgboost/make python-package/xgboost/rabit \
|
||||||
flake8 --ignore E501 python-package
|
python-package/xgboost/src --pylint-rc ${PWD}/python-package/.pylintrc xgboost \
|
||||||
flake8 --ignore E501 tests/python
|
${LINT_LANG} include src python-package
|
||||||
|
|
||||||
test: $(ALL_TEST)
|
|
||||||
$(ALL_TEST)
|
|
||||||
|
|
||||||
check: test
|
|
||||||
./tests/cpp/xgboost_test
|
|
||||||
|
|
||||||
ifeq ($(TEST_COVER), 1)
|
ifeq ($(TEST_COVER), 1)
|
||||||
cover: check
|
cover: check
|
||||||
@@ -196,7 +87,7 @@ cover: check
|
|||||||
endif
|
endif
|
||||||
|
|
||||||
clean:
|
clean:
|
||||||
$(RM) -rf build build_plugin lib bin *~ */*~ */*/*~ */*/*/*~ */*.o */*/*.o */*/*/*.o #xgboost
|
$(RM) -rf build lib bin *~ */*~ */*/*~ */*/*/*~ */*.o */*/*.o */*/*/*.o #xgboost
|
||||||
$(RM) -rf build_tests *.gcov tests/cpp/xgboost_test
|
$(RM) -rf build_tests *.gcov tests/cpp/xgboost_test
|
||||||
if [ -d "R-package/src" ]; then \
|
if [ -d "R-package/src" ]; then \
|
||||||
cd R-package/src; \
|
cd R-package/src; \
|
||||||
@@ -208,36 +99,9 @@ clean_all: clean
|
|||||||
cd $(DMLC_CORE); "$(MAKE)" clean; cd $(ROOTDIR)
|
cd $(DMLC_CORE); "$(MAKE)" clean; cd $(ROOTDIR)
|
||||||
cd $(RABIT); "$(MAKE)" clean; cd $(ROOTDIR)
|
cd $(RABIT); "$(MAKE)" clean; cd $(ROOTDIR)
|
||||||
|
|
||||||
doxygen:
|
|
||||||
doxygen doc/Doxyfile
|
|
||||||
|
|
||||||
# create standalone python tar file.
|
|
||||||
pypack: ${XGBOOST_DYLIB}
|
|
||||||
cp ${XGBOOST_DYLIB} python-package/xgboost
|
|
||||||
cd python-package; tar cf xgboost.tar xgboost; cd ..
|
|
||||||
|
|
||||||
# create pip source dist (sdist) pack for PyPI
|
# create pip source dist (sdist) pack for PyPI
|
||||||
pippack: clean_all
|
pippack: clean_all
|
||||||
rm -rf xgboost-python
|
cd python-package; python setup.py sdist; mv dist/*.tar.gz ..; cd ..
|
||||||
# remove symlinked directories in python-package/xgboost
|
|
||||||
rm -rf python-package/xgboost/lib
|
|
||||||
rm -rf python-package/xgboost/dmlc-core
|
|
||||||
rm -rf python-package/xgboost/include
|
|
||||||
rm -rf python-package/xgboost/make
|
|
||||||
rm -rf python-package/xgboost/rabit
|
|
||||||
rm -rf python-package/xgboost/src
|
|
||||||
cp -r python-package xgboost-python
|
|
||||||
cp -r Makefile xgboost-python/xgboost/
|
|
||||||
cp -r make xgboost-python/xgboost/
|
|
||||||
cp -r src xgboost-python/xgboost/
|
|
||||||
cp -r tests xgboost-python/xgboost/
|
|
||||||
cp -r include xgboost-python/xgboost/
|
|
||||||
cp -r dmlc-core xgboost-python/xgboost/
|
|
||||||
cp -r rabit xgboost-python/xgboost/
|
|
||||||
# Use setup_pip.py instead of setup.py
|
|
||||||
mv xgboost-python/setup_pip.py xgboost-python/setup.py
|
|
||||||
# Build sdist tarball
|
|
||||||
cd xgboost-python; python setup.py sdist; mv dist/*.tar.gz ..; cd ..
|
|
||||||
|
|
||||||
# Script to make a clean installable R package.
|
# Script to make a clean installable R package.
|
||||||
Rpack: clean_all
|
Rpack: clean_all
|
||||||
@@ -258,9 +122,17 @@ Rpack: clean_all
|
|||||||
cp -r dmlc-core/include xgboost/src/dmlc-core/include
|
cp -r dmlc-core/include xgboost/src/dmlc-core/include
|
||||||
cp -r dmlc-core/src xgboost/src/dmlc-core/src
|
cp -r dmlc-core/src xgboost/src/dmlc-core/src
|
||||||
cp ./LICENSE xgboost
|
cp ./LICENSE xgboost
|
||||||
cat R-package/src/Makevars.in|sed '2s/.*/PKGROOT=./' | sed '3s/.*/ENABLE_STD_THREAD=0/' > xgboost/src/Makevars.in
|
# Modify PKGROOT in Makevars.in
|
||||||
|
cat R-package/src/Makevars.in|sed '2s/.*/PKGROOT=./' > xgboost/src/Makevars.in
|
||||||
|
# Configure Makevars.win (Windows-specific Makevars, likely using MinGW)
|
||||||
cp xgboost/src/Makevars.in xgboost/src/Makevars.win
|
cp xgboost/src/Makevars.in xgboost/src/Makevars.win
|
||||||
sed -i -e 's/@OPENMP_CXXFLAGS@/$$\(SHLIB_OPENMP_CFLAGS\)/g' xgboost/src/Makevars.win
|
cat xgboost/src/Makevars.in| sed '3s/.*/ENABLE_STD_THREAD=0/' > xgboost/src/Makevars.win
|
||||||
|
sed -i -e 's/@OPENMP_CXXFLAGS@/$$\(SHLIB_OPENMP_CXXFLAGS\)/g' xgboost/src/Makevars.win
|
||||||
|
sed -i -e 's/-pthread/$$\(SHLIB_PTHREAD_FLAGS\)/g' xgboost/src/Makevars.win
|
||||||
|
sed -i -e 's/@ENDIAN_FLAG@/-DDMLC_CMAKE_LITTLE_ENDIAN=1/g' xgboost/src/Makevars.win
|
||||||
|
sed -i -e 's/@BACKTRACE_LIB@//g' xgboost/src/Makevars.win
|
||||||
|
sed -i -e 's/@OPENMP_LIB@//g' xgboost/src/Makevars.win
|
||||||
|
rm -f xgboost/src/Makevars.win-e # OSX sed create this extra file; remove it
|
||||||
bash R-package/remove_warning_suppression_pragma.sh
|
bash R-package/remove_warning_suppression_pragma.sh
|
||||||
rm xgboost/remove_warning_suppression_pragma.sh
|
rm xgboost/remove_warning_suppression_pragma.sh
|
||||||
|
|
||||||
@@ -273,4 +145,3 @@ Rcheck: Rbuild
|
|||||||
|
|
||||||
-include build/*.d
|
-include build/*.d
|
||||||
-include build/*/*.d
|
-include build/*/*.d
|
||||||
-include build_plugin/*/*.d
|
|
||||||
|
|||||||
44
R-package/CMakeLists.txt
Normal file
44
R-package/CMakeLists.txt
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
find_package(LibR REQUIRED)
|
||||||
|
message(STATUS "LIBR_CORE_LIBRARY " ${LIBR_CORE_LIBRARY})
|
||||||
|
|
||||||
|
file(GLOB_RECURSE R_SOURCES
|
||||||
|
${CMAKE_CURRENT_LIST_DIR}/src/*.cc
|
||||||
|
${CMAKE_CURRENT_LIST_DIR}/src/*.c)
|
||||||
|
# Use object library to expose symbols
|
||||||
|
add_library(xgboost-r OBJECT ${R_SOURCES})
|
||||||
|
if (ENABLE_ALL_WARNINGS)
|
||||||
|
target_compile_options(xgboost-r PRIVATE -Wall -Wextra)
|
||||||
|
endif (ENABLE_ALL_WARNINGS)
|
||||||
|
target_compile_definitions(xgboost-r
|
||||||
|
PUBLIC
|
||||||
|
-DXGBOOST_STRICT_R_MODE=1
|
||||||
|
-DXGBOOST_CUSTOMIZE_GLOBAL_PRNG=1
|
||||||
|
-DDMLC_LOG_BEFORE_THROW=0
|
||||||
|
-DDMLC_DISABLE_STDIN=1
|
||||||
|
-DDMLC_LOG_CUSTOMIZE=1
|
||||||
|
-DRABIT_CUSTOMIZE_MSG_
|
||||||
|
-DRABIT_STRICT_CXX98_)
|
||||||
|
target_include_directories(xgboost-r
|
||||||
|
PRIVATE
|
||||||
|
${LIBR_INCLUDE_DIRS}
|
||||||
|
${PROJECT_SOURCE_DIR}/include
|
||||||
|
${PROJECT_SOURCE_DIR}/dmlc-core/include
|
||||||
|
${PROJECT_SOURCE_DIR}/rabit/include)
|
||||||
|
target_link_libraries(xgboost-r PUBLIC ${LIBR_CORE_LIBRARY})
|
||||||
|
if (USE_OPENMP)
|
||||||
|
find_package(OpenMP REQUIRED)
|
||||||
|
target_link_libraries(xgboost-r PUBLIC OpenMP::OpenMP_CXX OpenMP::OpenMP_C)
|
||||||
|
endif (USE_OPENMP)
|
||||||
|
set_target_properties(
|
||||||
|
xgboost-r PROPERTIES
|
||||||
|
CXX_STANDARD 14
|
||||||
|
CXX_STANDARD_REQUIRED ON
|
||||||
|
POSITION_INDEPENDENT_CODE ON)
|
||||||
|
|
||||||
|
# Get compilation and link flags of xgboost-r and propagate to objxgboost
|
||||||
|
target_link_libraries(objxgboost PUBLIC xgboost-r)
|
||||||
|
# Add all objects of xgboost-r to objxgboost
|
||||||
|
target_sources(objxgboost INTERFACE $<TARGET_OBJECTS:xgboost-r>)
|
||||||
|
|
||||||
|
set(LIBR_HOME "${LIBR_HOME}" PARENT_SCOPE)
|
||||||
|
set(LIBR_EXECUTABLE "${LIBR_EXECUTABLE}" PARENT_SCOPE)
|
||||||
@@ -1,8 +1,8 @@
|
|||||||
Package: xgboost
|
Package: xgboost
|
||||||
Type: Package
|
Type: Package
|
||||||
Title: Extreme Gradient Boosting
|
Title: Extreme Gradient Boosting
|
||||||
Version: 0.71.1
|
Version: 1.2.0.1
|
||||||
Date: 2018-05-11
|
Date: 2020-02-21
|
||||||
Authors@R: c(
|
Authors@R: c(
|
||||||
person("Tianqi", "Chen", role = c("aut"),
|
person("Tianqi", "Chen", role = c("aut"),
|
||||||
email = "tianqi.tchen@gmail.com"),
|
email = "tianqi.tchen@gmail.com"),
|
||||||
@@ -51,7 +51,11 @@ Suggests:
|
|||||||
Ckmeans.1d.dp (>= 3.3.1),
|
Ckmeans.1d.dp (>= 3.3.1),
|
||||||
vcd (>= 1.3),
|
vcd (>= 1.3),
|
||||||
testthat,
|
testthat,
|
||||||
igraph (>= 1.0.1)
|
lintr,
|
||||||
|
igraph (>= 1.0.1),
|
||||||
|
jsonlite,
|
||||||
|
float,
|
||||||
|
crayon
|
||||||
Depends:
|
Depends:
|
||||||
R (>= 3.3.0)
|
R (>= 3.3.0)
|
||||||
Imports:
|
Imports:
|
||||||
@@ -60,5 +64,5 @@ Imports:
|
|||||||
data.table (>= 1.9.6),
|
data.table (>= 1.9.6),
|
||||||
magrittr (>= 1.5),
|
magrittr (>= 1.5),
|
||||||
stringi (>= 0.5.2)
|
stringi (>= 0.5.2)
|
||||||
RoxygenNote: 6.0.1
|
RoxygenNote: 7.1.1
|
||||||
SystemRequirements: GNU make, C++11
|
SystemRequirements: GNU make, C++14
|
||||||
|
|||||||
@@ -14,6 +14,7 @@ S3method(setinfo,xgb.DMatrix)
|
|||||||
S3method(slice,xgb.DMatrix)
|
S3method(slice,xgb.DMatrix)
|
||||||
export("xgb.attr<-")
|
export("xgb.attr<-")
|
||||||
export("xgb.attributes<-")
|
export("xgb.attributes<-")
|
||||||
|
export("xgb.config<-")
|
||||||
export("xgb.parameters<-")
|
export("xgb.parameters<-")
|
||||||
export(cb.cv.predict)
|
export(cb.cv.predict)
|
||||||
export(cb.early.stop)
|
export(cb.early.stop)
|
||||||
@@ -30,6 +31,7 @@ export(xgb.DMatrix)
|
|||||||
export(xgb.DMatrix.save)
|
export(xgb.DMatrix.save)
|
||||||
export(xgb.attr)
|
export(xgb.attr)
|
||||||
export(xgb.attributes)
|
export(xgb.attributes)
|
||||||
|
export(xgb.config)
|
||||||
export(xgb.create.features)
|
export(xgb.create.features)
|
||||||
export(xgb.cv)
|
export(xgb.cv)
|
||||||
export(xgb.dump)
|
export(xgb.dump)
|
||||||
@@ -38,6 +40,7 @@ export(xgb.ggplot.deepness)
|
|||||||
export(xgb.ggplot.importance)
|
export(xgb.ggplot.importance)
|
||||||
export(xgb.importance)
|
export(xgb.importance)
|
||||||
export(xgb.load)
|
export(xgb.load)
|
||||||
|
export(xgb.load.raw)
|
||||||
export(xgb.model.dt.tree)
|
export(xgb.model.dt.tree)
|
||||||
export(xgb.plot.deepness)
|
export(xgb.plot.deepness)
|
||||||
export(xgb.plot.importance)
|
export(xgb.plot.importance)
|
||||||
@@ -46,7 +49,9 @@ export(xgb.plot.shap)
|
|||||||
export(xgb.plot.tree)
|
export(xgb.plot.tree)
|
||||||
export(xgb.save)
|
export(xgb.save)
|
||||||
export(xgb.save.raw)
|
export(xgb.save.raw)
|
||||||
|
export(xgb.serialize)
|
||||||
export(xgb.train)
|
export(xgb.train)
|
||||||
|
export(xgb.unserialize)
|
||||||
export(xgboost)
|
export(xgboost)
|
||||||
import(methods)
|
import(methods)
|
||||||
importClassesFrom(Matrix,dgCMatrix)
|
importClassesFrom(Matrix,dgCMatrix)
|
||||||
|
|||||||
@@ -14,7 +14,7 @@
|
|||||||
#' WARNING: side-effects!!! Be aware that these callback functions access and modify things in
|
#' WARNING: side-effects!!! Be aware that these callback functions access and modify things in
|
||||||
#' the environment from which they are called from, which is a fairly uncommon thing to do in R.
|
#' the environment from which they are called from, which is a fairly uncommon thing to do in R.
|
||||||
#'
|
#'
|
||||||
#' To write a custom callback closure, make sure you first understand the main concepts about R envoronments.
|
#' To write a custom callback closure, make sure you first understand the main concepts about R environments.
|
||||||
#' Check either R documentation on \code{\link[base]{environment}} or the
|
#' Check either R documentation on \code{\link[base]{environment}} or the
|
||||||
#' \href{http://adv-r.had.co.nz/Environments.html}{Environments chapter} from the "Advanced R"
|
#' \href{http://adv-r.had.co.nz/Environments.html}{Environments chapter} from the "Advanced R"
|
||||||
#' book by Hadley Wickham. Further, the best option is to read the code of some of the existing callbacks -
|
#' book by Hadley Wickham. Further, the best option is to read the code of some of the existing callbacks -
|
||||||
@@ -62,11 +62,11 @@ cb.print.evaluation <- function(period = 1, showsd = TRUE) {
|
|||||||
callback <- function(env = parent.frame()) {
|
callback <- function(env = parent.frame()) {
|
||||||
if (length(env$bst_evaluation) == 0 ||
|
if (length(env$bst_evaluation) == 0 ||
|
||||||
period == 0 ||
|
period == 0 ||
|
||||||
NVL(env$rank, 0) != 0 )
|
NVL(env$rank, 0) != 0)
|
||||||
return()
|
return()
|
||||||
|
|
||||||
i <- env$iteration
|
i <- env$iteration
|
||||||
if ((i-1) %% period == 0 ||
|
if ((i - 1) %% period == 0 ||
|
||||||
i == env$begin_iteration ||
|
i == env$begin_iteration ||
|
||||||
i == env$end_iteration) {
|
i == env$end_iteration) {
|
||||||
stdev <- if (showsd) env$bst_evaluation_err else NULL
|
stdev <- if (showsd) env$bst_evaluation_err else NULL
|
||||||
@@ -115,7 +115,7 @@ cb.evaluation.log <- function() {
|
|||||||
stop("bst_evaluation must have non-empty names")
|
stop("bst_evaluation must have non-empty names")
|
||||||
|
|
||||||
mnames <<- gsub('-', '_', names(env$bst_evaluation))
|
mnames <<- gsub('-', '_', names(env$bst_evaluation))
|
||||||
if(!is.null(env$bst_evaluation_err))
|
if (!is.null(env$bst_evaluation_err))
|
||||||
mnames <<- c(paste0(mnames, '_mean'), paste0(mnames, '_std'))
|
mnames <<- c(paste0(mnames, '_mean'), paste0(mnames, '_std'))
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -123,12 +123,12 @@ cb.evaluation.log <- function() {
|
|||||||
env$evaluation_log <- as.data.table(t(simplify2array(env$evaluation_log)))
|
env$evaluation_log <- as.data.table(t(simplify2array(env$evaluation_log)))
|
||||||
setnames(env$evaluation_log, c('iter', mnames))
|
setnames(env$evaluation_log, c('iter', mnames))
|
||||||
|
|
||||||
if(!is.null(env$bst_evaluation_err)) {
|
if (!is.null(env$bst_evaluation_err)) {
|
||||||
# rearrange col order from _mean,_mean,...,_std,_std,...
|
# rearrange col order from _mean,_mean,...,_std,_std,...
|
||||||
# to be _mean,_std,_mean,_std,...
|
# to be _mean,_std,_mean,_std,...
|
||||||
len <- length(mnames)
|
len <- length(mnames)
|
||||||
means <- mnames[seq_len(len/2)]
|
means <- mnames[seq_len(len / 2)]
|
||||||
stds <- mnames[(len/2 + 1):len]
|
stds <- mnames[(len / 2 + 1):len]
|
||||||
cnames <- numeric(len)
|
cnames <- numeric(len)
|
||||||
cnames[c(TRUE, FALSE)] <- means
|
cnames[c(TRUE, FALSE)] <- means
|
||||||
cnames[c(FALSE, TRUE)] <- stds
|
cnames[c(FALSE, TRUE)] <- stds
|
||||||
@@ -144,7 +144,7 @@ cb.evaluation.log <- function() {
|
|||||||
return(finalizer(env))
|
return(finalizer(env))
|
||||||
|
|
||||||
ev <- env$bst_evaluation
|
ev <- env$bst_evaluation
|
||||||
if(!is.null(env$bst_evaluation_err))
|
if (!is.null(env$bst_evaluation_err))
|
||||||
ev <- c(ev, env$bst_evaluation_err)
|
ev <- c(ev, env$bst_evaluation_err)
|
||||||
env$evaluation_log <- c(env$evaluation_log,
|
env$evaluation_log <- c(env$evaluation_log,
|
||||||
list(c(iter = env$iteration, ev)))
|
list(c(iter = env$iteration, ev)))
|
||||||
@@ -154,7 +154,7 @@ cb.evaluation.log <- function() {
|
|||||||
callback
|
callback
|
||||||
}
|
}
|
||||||
|
|
||||||
#' Callback closure for restetting the booster's parameters at each iteration.
|
#' Callback closure for resetting the booster's parameters at each iteration.
|
||||||
#'
|
#'
|
||||||
#' @param new_params a list where each element corresponds to a parameter that needs to be reset.
|
#' @param new_params a list where each element corresponds to a parameter that needs to be reset.
|
||||||
#' Each element's value must be either a vector of values of length \code{nrounds}
|
#' Each element's value must be either a vector of values of length \code{nrounds}
|
||||||
@@ -168,7 +168,7 @@ cb.evaluation.log <- function() {
|
|||||||
#' at the beginning of each iteration.
|
#' at the beginning of each iteration.
|
||||||
#'
|
#'
|
||||||
#' Note that when training is resumed from some previous model, and a function is used to
|
#' Note that when training is resumed from some previous model, and a function is used to
|
||||||
#' reset a parameter value, the \code{nround} argument in this function would be the
|
#' reset a parameter value, the \code{nrounds} argument in this function would be the
|
||||||
#' the number of boosting rounds in the current training.
|
#' the number of boosting rounds in the current training.
|
||||||
#'
|
#'
|
||||||
#' Callback function expects the following values to be set in its calling frame:
|
#' Callback function expects the following values to be set in its calling frame:
|
||||||
@@ -351,13 +351,13 @@ cb.early.stop <- function(stopping_rounds, maximize = FALSE,
|
|||||||
|
|
||||||
finalizer <- function(env) {
|
finalizer <- function(env) {
|
||||||
if (!is.null(env$bst)) {
|
if (!is.null(env$bst)) {
|
||||||
attr_best_score = as.numeric(xgb.attr(env$bst$handle, 'best_score'))
|
attr_best_score <- as.numeric(xgb.attr(env$bst$handle, 'best_score'))
|
||||||
if (best_score != attr_best_score)
|
if (best_score != attr_best_score)
|
||||||
stop("Inconsistent 'best_score' values between the closure state: ", best_score,
|
stop("Inconsistent 'best_score' values between the closure state: ", best_score,
|
||||||
" and the xgb.attr: ", attr_best_score)
|
" and the xgb.attr: ", attr_best_score)
|
||||||
env$bst$best_iteration = best_iteration
|
env$bst$best_iteration <- best_iteration
|
||||||
env$bst$best_ntreelimit = best_ntreelimit
|
env$bst$best_ntreelimit <- best_ntreelimit
|
||||||
env$bst$best_score = best_score
|
env$bst$best_score <- best_score
|
||||||
} else {
|
} else {
|
||||||
env$basket$best_iteration <- best_iteration
|
env$basket$best_iteration <- best_iteration
|
||||||
env$basket$best_ntreelimit <- best_ntreelimit
|
env$basket$best_ntreelimit <- best_ntreelimit
|
||||||
@@ -372,9 +372,9 @@ cb.early.stop <- function(stopping_rounds, maximize = FALSE,
|
|||||||
return(finalizer(env))
|
return(finalizer(env))
|
||||||
|
|
||||||
i <- env$iteration
|
i <- env$iteration
|
||||||
score = env$bst_evaluation[metric_idx]
|
score <- env$bst_evaluation[metric_idx]
|
||||||
|
|
||||||
if (( maximize && score > best_score) ||
|
if ((maximize && score > best_score) ||
|
||||||
(!maximize && score < best_score)) {
|
(!maximize && score < best_score)) {
|
||||||
|
|
||||||
best_msg <<- format.eval.string(i, env$bst_evaluation, env$bst_evaluation_err)
|
best_msg <<- format.eval.string(i, env$bst_evaluation, env$bst_evaluation_err)
|
||||||
@@ -470,7 +470,7 @@ cb.save.model <- function(save_period = 0, save_name = "xgboost.model") {
|
|||||||
#' to the order of rows in the original dataset. Note that when a custom \code{folds} list is
|
#' to the order of rows in the original dataset. Note that when a custom \code{folds} list is
|
||||||
#' provided in \code{xgb.cv}, the predictions would only be returned properly when this list is a
|
#' provided in \code{xgb.cv}, the predictions would only be returned properly when this list is a
|
||||||
#' non-overlapping list of k sets of indices, as in a standard k-fold CV. The predictions would not be
|
#' non-overlapping list of k sets of indices, as in a standard k-fold CV. The predictions would not be
|
||||||
#' meaningful when user-profided folds have overlapping indices as in, e.g., random sampling splits.
|
#' meaningful when user-provided folds have overlapping indices as in, e.g., random sampling splits.
|
||||||
#' When some of the indices in the training dataset are not included into user-provided \code{folds},
|
#' When some of the indices in the training dataset are not included into user-provided \code{folds},
|
||||||
#' their prediction value would be \code{NA}.
|
#' their prediction value would be \code{NA}.
|
||||||
#'
|
#'
|
||||||
@@ -500,7 +500,7 @@ cb.cv.predict <- function(save_models = FALSE) {
|
|||||||
for (fd in env$bst_folds) {
|
for (fd in env$bst_folds) {
|
||||||
pr <- predict(fd$bst, fd$watchlist[[2]], ntreelimit = ntreelimit, reshape = TRUE)
|
pr <- predict(fd$bst, fd$watchlist[[2]], ntreelimit = ntreelimit, reshape = TRUE)
|
||||||
if (is.matrix(pred)) {
|
if (is.matrix(pred)) {
|
||||||
pred[fd$index,] <- pr
|
pred[fd$index, ] <- pr
|
||||||
} else {
|
} else {
|
||||||
pred[fd$index] <- pr
|
pred[fd$index] <- pr
|
||||||
}
|
}
|
||||||
@@ -613,9 +613,7 @@ cb.gblinear.history <- function(sparse=FALSE) {
|
|||||||
|
|
||||||
init <- function(env) {
|
init <- function(env) {
|
||||||
if (!is.null(env$bst)) { # xgb.train:
|
if (!is.null(env$bst)) { # xgb.train:
|
||||||
coef_path <- list()
|
|
||||||
} else if (!is.null(env$bst_folds)) { # xgb.cv:
|
} else if (!is.null(env$bst_folds)) { # xgb.cv:
|
||||||
coef_path <- rep(list(), length(env$bst_folds))
|
|
||||||
} else stop("Parent frame has neither 'bst' nor 'bst_folds'")
|
} else stop("Parent frame has neither 'bst' nor 'bst_folds'")
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -681,7 +679,7 @@ cb.gblinear.history <- function(sparse=FALSE) {
|
|||||||
#' using the \code{cb.gblinear.history()} callback.
|
#' using the \code{cb.gblinear.history()} callback.
|
||||||
#' @param class_index zero-based class index to extract the coefficients for only that
|
#' @param class_index zero-based class index to extract the coefficients for only that
|
||||||
#' specific class in a multinomial multiclass model. When it is NULL, all the
|
#' specific class in a multinomial multiclass model. When it is NULL, all the
|
||||||
#' coeffients are returned. Has no effect in non-multiclass models.
|
#' coefficients are returned. Has no effect in non-multiclass models.
|
||||||
#'
|
#'
|
||||||
#' @return
|
#' @return
|
||||||
#' For an \code{xgb.train} result, a matrix (either dense or sparse) with the columns
|
#' For an \code{xgb.train} result, a matrix (either dense or sparse) with the columns
|
||||||
@@ -705,11 +703,11 @@ xgb.gblinear.history <- function(model, class_index = NULL) {
|
|||||||
if (!is_cv) {
|
if (!is_cv) {
|
||||||
# extract num_class & num_feat from the internal model
|
# extract num_class & num_feat from the internal model
|
||||||
dmp <- xgb.dump(model)
|
dmp <- xgb.dump(model)
|
||||||
if(length(dmp) < 2 || dmp[2] != "bias:")
|
if (length(dmp) < 2 || dmp[2] != "bias:")
|
||||||
stop("It does not appear to be a gblinear model")
|
stop("It does not appear to be a gblinear model")
|
||||||
dmp <- dmp[-c(1,2)]
|
dmp <- dmp[-c(1, 2)]
|
||||||
n <- which(dmp == 'weight:')
|
n <- which(dmp == 'weight:')
|
||||||
if(length(n) != 1)
|
if (length(n) != 1)
|
||||||
stop("It does not appear to be a gblinear model")
|
stop("It does not appear to be a gblinear model")
|
||||||
num_class <- n - 1
|
num_class <- n - 1
|
||||||
num_feat <- (length(dmp) - 4) / num_class
|
num_feat <- (length(dmp) - 4) / num_class
|
||||||
@@ -732,9 +730,9 @@ xgb.gblinear.history <- function(model, class_index = NULL) {
|
|||||||
if (!is.null(class_index) && num_class > 1) {
|
if (!is.null(class_index) && num_class > 1) {
|
||||||
coef_path <- if (is.list(coef_path)) {
|
coef_path <- if (is.list(coef_path)) {
|
||||||
lapply(coef_path,
|
lapply(coef_path,
|
||||||
function(x) x[, seq(1 + class_index, by=num_class, length.out=num_feat)])
|
function(x) x[, seq(1 + class_index, by = num_class, length.out = num_feat)])
|
||||||
} else {
|
} else {
|
||||||
coef_path <- coef_path[, seq(1 + class_index, by=num_class, length.out=num_feat)]
|
coef_path <- coef_path[, seq(1 + class_index, by = num_class, length.out = num_feat)]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
coef_path
|
coef_path
|
||||||
|
|||||||
@@ -28,7 +28,7 @@ NVL <- function(x, val) {
|
|||||||
# Merges booster params with whatever is provided in ...
|
# Merges booster params with whatever is provided in ...
|
||||||
# plus runs some checks
|
# plus runs some checks
|
||||||
check.booster.params <- function(params, ...) {
|
check.booster.params <- function(params, ...) {
|
||||||
if (typeof(params) != "list")
|
if (!identical(class(params), "list"))
|
||||||
stop("params must be a list")
|
stop("params must be a list")
|
||||||
|
|
||||||
# in R interface, allow for '.' instead of '_' in parameter names
|
# in R interface, allow for '.' instead of '_' in parameter names
|
||||||
@@ -69,11 +69,24 @@ check.booster.params <- function(params, ...) {
|
|||||||
|
|
||||||
if (!is.null(params[['monotone_constraints']]) &&
|
if (!is.null(params[['monotone_constraints']]) &&
|
||||||
typeof(params[['monotone_constraints']]) != "character") {
|
typeof(params[['monotone_constraints']]) != "character") {
|
||||||
vec2str = paste(params[['monotone_constraints']], collapse = ',')
|
vec2str <- paste(params[['monotone_constraints']], collapse = ',')
|
||||||
vec2str = paste0('(', vec2str, ')')
|
vec2str <- paste0('(', vec2str, ')')
|
||||||
params[['monotone_constraints']] = vec2str
|
params[['monotone_constraints']] <- vec2str
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# interaction constraints parser (convert from list of column indices to string)
|
||||||
|
if (!is.null(params[['interaction_constraints']]) &&
|
||||||
|
typeof(params[['interaction_constraints']]) != "character"){
|
||||||
|
# check input class
|
||||||
|
if (!identical(class(params[['interaction_constraints']]), 'list')) stop('interaction_constraints should be class list')
|
||||||
|
if (!all(unique(sapply(params[['interaction_constraints']], class)) %in% c('numeric', 'integer'))) {
|
||||||
|
stop('interaction_constraints should be a list of numeric/integer vectors')
|
||||||
|
}
|
||||||
|
|
||||||
|
# recast parameter as string
|
||||||
|
interaction_constraints <- sapply(params[['interaction_constraints']], function(x) paste0('[', paste(x, collapse = ','), ']'))
|
||||||
|
params[['interaction_constraints']] <- paste0('[', paste(interaction_constraints, collapse = ','), ']')
|
||||||
|
}
|
||||||
return(params)
|
return(params)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -132,7 +145,8 @@ xgb.iter.update <- function(booster_handle, dtrain, iter, obj = NULL) {
|
|||||||
if (is.null(obj)) {
|
if (is.null(obj)) {
|
||||||
.Call(XGBoosterUpdateOneIter_R, booster_handle, as.integer(iter), dtrain)
|
.Call(XGBoosterUpdateOneIter_R, booster_handle, as.integer(iter), dtrain)
|
||||||
} else {
|
} else {
|
||||||
pred <- predict(booster_handle, dtrain)
|
pred <- predict(booster_handle, dtrain, outputmargin = TRUE, training = TRUE,
|
||||||
|
ntreelimit = 0)
|
||||||
gpair <- obj(pred, dtrain)
|
gpair <- obj(pred, dtrain)
|
||||||
.Call(XGBoosterBoostOneIter_R, booster_handle, dtrain, gpair$grad, gpair$hess)
|
.Call(XGBoosterBoostOneIter_R, booster_handle, dtrain, gpair$grad, gpair$hess)
|
||||||
}
|
}
|
||||||
@@ -154,12 +168,12 @@ xgb.iter.eval <- function(booster_handle, watchlist, iter, feval = NULL) {
|
|||||||
if (is.null(feval)) {
|
if (is.null(feval)) {
|
||||||
msg <- .Call(XGBoosterEvalOneIter_R, booster_handle, as.integer(iter), watchlist, as.list(evnames))
|
msg <- .Call(XGBoosterEvalOneIter_R, booster_handle, as.integer(iter), watchlist, as.list(evnames))
|
||||||
msg <- stri_split_regex(msg, '(\\s+|:|\\s+)')[[1]][-1]
|
msg <- stri_split_regex(msg, '(\\s+|:|\\s+)')[[1]][-1]
|
||||||
res <- as.numeric(msg[c(FALSE,TRUE)]) # even indices are the values
|
res <- as.numeric(msg[c(FALSE, TRUE)]) # even indices are the values
|
||||||
names(res) <- msg[c(TRUE,FALSE)] # odds are the names
|
names(res) <- msg[c(TRUE, FALSE)] # odds are the names
|
||||||
} else {
|
} else {
|
||||||
res <- sapply(seq_along(watchlist), function(j) {
|
res <- sapply(seq_along(watchlist), function(j) {
|
||||||
w <- watchlist[[j]]
|
w <- watchlist[[j]]
|
||||||
preds <- predict(booster_handle, w) # predict using all trees
|
preds <- predict(booster_handle, w, outputmargin = TRUE, ntreelimit = 0) # predict using all trees
|
||||||
eval_res <- feval(preds, w)
|
eval_res <- feval(preds, w)
|
||||||
out <- eval_res$value
|
out <- eval_res$value
|
||||||
names(out) <- paste0(evnames[j], "-", eval_res$metric)
|
names(out) <- paste0(evnames[j], "-", eval_res$metric)
|
||||||
@@ -196,13 +210,14 @@ generate.cv.folds <- function(nfold, nrows, stratified, label, params) {
|
|||||||
if (exists('objective', where = params) &&
|
if (exists('objective', where = params) &&
|
||||||
is.character(params$objective)) {
|
is.character(params$objective)) {
|
||||||
# If 'objective' provided in params, assume that y is a classification label
|
# If 'objective' provided in params, assume that y is a classification label
|
||||||
# unless objective is reg:linear
|
# unless objective is reg:squarederror
|
||||||
if (params$objective != 'reg:linear')
|
if (params$objective != 'reg:squarederror')
|
||||||
y <- factor(y)
|
y <- factor(y)
|
||||||
} else {
|
} else {
|
||||||
# If no 'objective' given in params, it means that user either wants to use
|
# If no 'objective' given in params, it means that user either wants to
|
||||||
# the default 'reg:linear' objective or has provided a custom obj function.
|
# use the default 'reg:squarederror' objective or has provided a custom
|
||||||
# Here, assume classification setting when y has 5 or less unique values:
|
# obj function. Here, assume classification setting when y has 5 or less
|
||||||
|
# unique values:
|
||||||
if (length(unique(y)) <= 5)
|
if (length(unique(y)) <= 5)
|
||||||
y <- factor(y)
|
y <- factor(y)
|
||||||
}
|
}
|
||||||
@@ -262,7 +277,8 @@ xgb.createFolds <- function(y, k = 10)
|
|||||||
## add enough random integers to get length(seqVector) == numInClass[i]
|
## add enough random integers to get length(seqVector) == numInClass[i]
|
||||||
if (numInClass[i] %% k > 0) seqVector <- c(seqVector, sample.int(k, numInClass[i] %% k))
|
if (numInClass[i] %% k > 0) seqVector <- c(seqVector, sample.int(k, numInClass[i] %% k))
|
||||||
## shuffle the integers for fold assignment and assign to this classes's data
|
## shuffle the integers for fold assignment and assign to this classes's data
|
||||||
foldVector[y == dimnames(numInClass)$y[i]] <- sample(seqVector)
|
## seqVector[sample.int(length(seqVector))] is used to handle length(seqVector) == 1
|
||||||
|
foldVector[y == dimnames(numInClass)$y[i]] <- seqVector[sample.int(length(seqVector))]
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
foldVector <- seq(along = y)
|
foldVector <- seq(along = y)
|
||||||
@@ -292,6 +308,66 @@ xgb.createFolds <- function(y, k = 10)
|
|||||||
#' @name xgboost-deprecated
|
#' @name xgboost-deprecated
|
||||||
NULL
|
NULL
|
||||||
|
|
||||||
|
#' Do not use \code{\link[base]{saveRDS}} or \code{\link[base]{save}} for long-term archival of
|
||||||
|
#' models. Instead, use \code{\link{xgb.save}} or \code{\link{xgb.save.raw}}.
|
||||||
|
#'
|
||||||
|
#' It is a common practice to use the built-in \code{\link[base]{saveRDS}} function (or
|
||||||
|
#' \code{\link[base]{save}}) to persist R objects to the disk. While it is possible to persist
|
||||||
|
#' \code{xgb.Booster} objects using \code{\link[base]{saveRDS}}, it is not advisable to do so if
|
||||||
|
#' the model is to be accessed in the future. If you train a model with the current version of
|
||||||
|
#' XGBoost and persist it with \code{\link[base]{saveRDS}}, the model is not guaranteed to be
|
||||||
|
#' accessible in later releases of XGBoost. To ensure that your model can be accessed in future
|
||||||
|
#' releases of XGBoost, use \code{\link{xgb.save}} or \code{\link{xgb.save.raw}} instead.
|
||||||
|
#'
|
||||||
|
#' @details
|
||||||
|
#' Use \code{\link{xgb.save}} to save the XGBoost model as a stand-alone file. You may opt into
|
||||||
|
#' the JSON format by specifying the JSON extension. To read the model back, use
|
||||||
|
#' \code{\link{xgb.load}}.
|
||||||
|
#'
|
||||||
|
#' Use \code{\link{xgb.save.raw}} to save the XGBoost model as a sequence (vector) of raw bytes
|
||||||
|
#' in a future-proof manner. Future releases of XGBoost will be able to read the raw bytes and
|
||||||
|
#' re-construct the corresponding model. To read the model back, use \code{\link{xgb.load.raw}}.
|
||||||
|
#' The \code{\link{xgb.save.raw}} function is useful if you'd like to persist the XGBoost model
|
||||||
|
#' as part of another R object.
|
||||||
|
#'
|
||||||
|
#' Note: Do not use \code{\link{xgb.serialize}} to store models long-term. It persists not only the
|
||||||
|
#' model but also internal configurations and parameters, and its format is not stable across
|
||||||
|
#' multiple XGBoost versions. Use \code{\link{xgb.serialize}} only for checkpointing.
|
||||||
|
#'
|
||||||
|
#' For more details and explanation about model persistence and archival, consult the page
|
||||||
|
#' \url{https://xgboost.readthedocs.io/en/latest/tutorials/saving_model.html}.
|
||||||
|
#'
|
||||||
|
#' @examples
|
||||||
|
#' data(agaricus.train, package='xgboost')
|
||||||
|
#' bst <- xgboost(data = agaricus.train$data, label = agaricus.train$label, max_depth = 2,
|
||||||
|
#' eta = 1, nthread = 2, nrounds = 2, objective = "binary:logistic")
|
||||||
|
#'
|
||||||
|
#' # Save as a stand-alone file; load it with xgb.load()
|
||||||
|
#' xgb.save(bst, 'xgb.model')
|
||||||
|
#' bst2 <- xgb.load('xgb.model')
|
||||||
|
#'
|
||||||
|
#' # Save as a stand-alone file (JSON); load it with xgb.load()
|
||||||
|
#' xgb.save(bst, 'xgb.model.json')
|
||||||
|
#' bst2 <- xgb.load('xgb.model.json')
|
||||||
|
#'
|
||||||
|
#' # Save as a raw byte vector; load it with xgb.load.raw()
|
||||||
|
#' xgb_bytes <- xgb.save.raw(bst)
|
||||||
|
#' bst2 <- xgb.load.raw(xgb_bytes)
|
||||||
|
#'
|
||||||
|
#' # Persist XGBoost model as part of another R object
|
||||||
|
#' obj <- list(xgb_model_bytes = xgb.save.raw(bst), description = "My first XGBoost model")
|
||||||
|
#' # Persist the R object. Here, saveRDS() is okay, since it doesn't persist
|
||||||
|
#' # xgb.Booster directly. What's being persisted is the future-proof byte representation
|
||||||
|
#' # as given by xgb.save.raw().
|
||||||
|
#' saveRDS(obj, 'my_object.rds')
|
||||||
|
#' # Read back the R object
|
||||||
|
#' obj2 <- readRDS('my_object.rds')
|
||||||
|
#' # Re-construct xgb.Booster object from the bytes
|
||||||
|
#' bst2 <- xgb.load.raw(obj2$xgb_model_bytes)
|
||||||
|
#'
|
||||||
|
#' @name a-compatibility-note-for-saveRDS-save
|
||||||
|
NULL
|
||||||
|
|
||||||
# Lookup table for the deprecated parameters bookkeeping
|
# Lookup table for the deprecated parameters bookkeeping
|
||||||
depr_par_lut <- matrix(c(
|
depr_par_lut <- matrix(c(
|
||||||
'print.every.n', 'print_every_n',
|
'print.every.n', 'print_every_n',
|
||||||
@@ -300,8 +376,8 @@ depr_par_lut <- matrix(c(
|
|||||||
'with.stats', 'with_stats',
|
'with.stats', 'with_stats',
|
||||||
'numberOfClusters', 'n_clusters',
|
'numberOfClusters', 'n_clusters',
|
||||||
'features.keep', 'features_keep',
|
'features.keep', 'features_keep',
|
||||||
'plot.height','plot_height',
|
'plot.height', 'plot_height',
|
||||||
'plot.width','plot_width',
|
'plot.width', 'plot_width',
|
||||||
'n_first_tree', 'trees',
|
'n_first_tree', 'trees',
|
||||||
'dummy', 'DUMMY'
|
'dummy', 'DUMMY'
|
||||||
), ncol = 2, byrow = TRUE)
|
), ncol = 2, byrow = TRUE)
|
||||||
@@ -314,20 +390,20 @@ colnames(depr_par_lut) <- c('old', 'new')
|
|||||||
check.deprecation <- function(..., env = parent.frame()) {
|
check.deprecation <- function(..., env = parent.frame()) {
|
||||||
pars <- list(...)
|
pars <- list(...)
|
||||||
# exact and partial matches
|
# exact and partial matches
|
||||||
all_match <- pmatch(names(pars), depr_par_lut[,1])
|
all_match <- pmatch(names(pars), depr_par_lut[, 1])
|
||||||
# indices of matched pars' names
|
# indices of matched pars' names
|
||||||
idx_pars <- which(!is.na(all_match))
|
idx_pars <- which(!is.na(all_match))
|
||||||
if (length(idx_pars) == 0) return()
|
if (length(idx_pars) == 0) return()
|
||||||
# indices of matched LUT rows
|
# indices of matched LUT rows
|
||||||
idx_lut <- all_match[idx_pars]
|
idx_lut <- all_match[idx_pars]
|
||||||
# which of idx_lut were the exact matches?
|
# which of idx_lut were the exact matches?
|
||||||
ex_match <- depr_par_lut[idx_lut,1] %in% names(pars)
|
ex_match <- depr_par_lut[idx_lut, 1] %in% names(pars)
|
||||||
for (i in seq_along(idx_pars)) {
|
for (i in seq_along(idx_pars)) {
|
||||||
pars_par <- names(pars)[idx_pars[i]]
|
pars_par <- names(pars)[idx_pars[i]]
|
||||||
old_par <- depr_par_lut[idx_lut[i], 1]
|
old_par <- depr_par_lut[idx_lut[i], 1]
|
||||||
new_par <- depr_par_lut[idx_lut[i], 2]
|
new_par <- depr_par_lut[idx_lut[i], 2]
|
||||||
if (!ex_match[i]) {
|
if (!ex_match[i]) {
|
||||||
warning("'", pars_par, "' was partially matched to '", old_par,"'")
|
warning("'", pars_par, "' was partially matched to '", old_par, "'")
|
||||||
}
|
}
|
||||||
.Deprecated(new_par, old = old_par, package = 'xgboost')
|
.Deprecated(new_par, old = old_par, package = 'xgboost')
|
||||||
if (new_par != 'NULL') {
|
if (new_par != 'NULL') {
|
||||||
|
|||||||
@@ -1,24 +1,39 @@
|
|||||||
# Construct an internal xgboost Booster and return a handle to it.
|
# Construct an internal xgboost Booster and return a handle to it.
|
||||||
# internal utility function
|
# internal utility function
|
||||||
xgb.Booster.handle <- function(params = list(), cachelist = list(), modelfile = NULL) {
|
xgb.Booster.handle <- function(params = list(), cachelist = list(),
|
||||||
|
modelfile = NULL) {
|
||||||
if (typeof(cachelist) != "list" ||
|
if (typeof(cachelist) != "list" ||
|
||||||
!all(vapply(cachelist, inherits, logical(1), what = 'xgb.DMatrix'))) {
|
!all(vapply(cachelist, inherits, logical(1), what = 'xgb.DMatrix'))) {
|
||||||
stop("cachelist must be a list of xgb.DMatrix objects")
|
stop("cachelist must be a list of xgb.DMatrix objects")
|
||||||
}
|
}
|
||||||
|
## Load existing model, dispatch for on disk model file and in memory buffer
|
||||||
handle <- .Call(XGBoosterCreate_R, cachelist)
|
|
||||||
if (!is.null(modelfile)) {
|
if (!is.null(modelfile)) {
|
||||||
if (typeof(modelfile) == "character") {
|
if (typeof(modelfile) == "character") {
|
||||||
|
## A filename
|
||||||
|
handle <- .Call(XGBoosterCreate_R, cachelist)
|
||||||
.Call(XGBoosterLoadModel_R, handle, modelfile[1])
|
.Call(XGBoosterLoadModel_R, handle, modelfile[1])
|
||||||
|
class(handle) <- "xgb.Booster.handle"
|
||||||
|
if (length(params) > 0) {
|
||||||
|
xgb.parameters(handle) <- params
|
||||||
|
}
|
||||||
|
return(handle)
|
||||||
} else if (typeof(modelfile) == "raw") {
|
} else if (typeof(modelfile) == "raw") {
|
||||||
.Call(XGBoosterLoadModelFromRaw_R, handle, modelfile)
|
## A memory buffer
|
||||||
|
bst <- xgb.unserialize(modelfile)
|
||||||
|
xgb.parameters(bst) <- params
|
||||||
|
return (bst)
|
||||||
} else if (inherits(modelfile, "xgb.Booster")) {
|
} else if (inherits(modelfile, "xgb.Booster")) {
|
||||||
|
## A booster object
|
||||||
bst <- xgb.Booster.complete(modelfile, saveraw = TRUE)
|
bst <- xgb.Booster.complete(modelfile, saveraw = TRUE)
|
||||||
.Call(XGBoosterLoadModelFromRaw_R, handle, bst$raw)
|
bst <- xgb.unserialize(bst$raw)
|
||||||
|
xgb.parameters(bst) <- params
|
||||||
|
return (bst)
|
||||||
} else {
|
} else {
|
||||||
stop("modelfile must be either character filename, or raw booster dump, or xgb.Booster object")
|
stop("modelfile must be either character filename, or raw booster dump, or xgb.Booster object")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
## Create new model
|
||||||
|
handle <- .Call(XGBoosterCreate_R, cachelist)
|
||||||
class(handle) <- "xgb.Booster.handle"
|
class(handle) <- "xgb.Booster.handle"
|
||||||
if (length(params) > 0) {
|
if (length(params) > 0) {
|
||||||
xgb.parameters(handle) <- params
|
xgb.parameters(handle) <- params
|
||||||
@@ -48,14 +63,16 @@ is.null.handle <- function(handle) {
|
|||||||
return(FALSE)
|
return(FALSE)
|
||||||
}
|
}
|
||||||
|
|
||||||
# Return a verified to be valid handle out of either xgb.Booster.handle or xgb.Booster
|
# Return a verified to be valid handle out of either xgb.Booster.handle or
|
||||||
# internal utility function
|
# xgb.Booster internal utility function
|
||||||
xgb.get.handle <- function(object) {
|
xgb.get.handle <- function(object) {
|
||||||
handle <- switch(class(object)[1],
|
if (inherits(object, "xgb.Booster")) {
|
||||||
xgb.Booster = object$handle,
|
handle <- object$handle
|
||||||
xgb.Booster.handle = object,
|
} else if (inherits(object, "xgb.Booster.handle")) {
|
||||||
|
handle <- object
|
||||||
|
} else {
|
||||||
stop("argument must be of either xgb.Booster or xgb.Booster.handle class")
|
stop("argument must be of either xgb.Booster or xgb.Booster.handle class")
|
||||||
)
|
}
|
||||||
if (is.null.handle(handle)) {
|
if (is.null.handle(handle)) {
|
||||||
stop("invalid xgb.Booster.handle")
|
stop("invalid xgb.Booster.handle")
|
||||||
}
|
}
|
||||||
@@ -81,7 +98,7 @@ xgb.get.handle <- function(object) {
|
|||||||
#' its handle (pointer) to an internal xgboost model would be invalid. The majority of xgboost methods
|
#' its handle (pointer) to an internal xgboost model would be invalid. The majority of xgboost methods
|
||||||
#' should still work for such a model object since those methods would be using
|
#' should still work for such a model object since those methods would be using
|
||||||
#' \code{xgb.Booster.complete} internally. However, one might find it to be more efficient to call the
|
#' \code{xgb.Booster.complete} internally. However, one might find it to be more efficient to call the
|
||||||
#' \code{xgb.Booster.complete} function explicitely once after loading a model as an R-object.
|
#' \code{xgb.Booster.complete} function explicitly once after loading a model as an R-object.
|
||||||
#' That would prevent further repeated implicit reconstruction of an internal booster model.
|
#' That would prevent further repeated implicit reconstruction of an internal booster model.
|
||||||
#'
|
#'
|
||||||
#' @return
|
#' @return
|
||||||
@@ -94,7 +111,10 @@ xgb.get.handle <- function(object) {
|
|||||||
#' eta = 1, nthread = 2, nrounds = 2, objective = "binary:logistic")
|
#' eta = 1, nthread = 2, nrounds = 2, objective = "binary:logistic")
|
||||||
#' saveRDS(bst, "xgb.model.rds")
|
#' saveRDS(bst, "xgb.model.rds")
|
||||||
#'
|
#'
|
||||||
|
#' # Warning: The resulting RDS file is only compatible with the current XGBoost version.
|
||||||
|
#' # Refer to the section titled "a-compatibility-note-for-saveRDS-save".
|
||||||
#' bst1 <- readRDS("xgb.model.rds")
|
#' bst1 <- readRDS("xgb.model.rds")
|
||||||
|
#' if (file.exists("xgb.model.rds")) file.remove("xgb.model.rds")
|
||||||
#' # the handle is invalid:
|
#' # the handle is invalid:
|
||||||
#' print(bst1$handle)
|
#' print(bst1$handle)
|
||||||
#'
|
#'
|
||||||
@@ -110,9 +130,29 @@ xgb.Booster.complete <- function(object, saveraw = TRUE) {
|
|||||||
if (is.null.handle(object$handle)) {
|
if (is.null.handle(object$handle)) {
|
||||||
object$handle <- xgb.Booster.handle(modelfile = object$raw)
|
object$handle <- xgb.Booster.handle(modelfile = object$raw)
|
||||||
} else {
|
} else {
|
||||||
if (is.null(object$raw) && saveraw)
|
if (is.null(object$raw) && saveraw) {
|
||||||
object$raw <- xgb.save.raw(object$handle)
|
object$raw <- xgb.serialize(object$handle)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
attrs <- xgb.attributes(object)
|
||||||
|
if (!is.null(attrs$best_ntreelimit)) {
|
||||||
|
object$best_ntreelimit <- as.integer(attrs$best_ntreelimit)
|
||||||
|
}
|
||||||
|
if (!is.null(attrs$best_iteration)) {
|
||||||
|
## Convert from 0 based back to 1 based.
|
||||||
|
object$best_iteration <- as.integer(attrs$best_iteration) + 1
|
||||||
|
}
|
||||||
|
if (!is.null(attrs$best_score)) {
|
||||||
|
object$best_score <- as.numeric(attrs$best_score)
|
||||||
|
}
|
||||||
|
if (!is.null(attrs$best_msg)) {
|
||||||
|
object$best_msg <- attrs$best_msg
|
||||||
|
}
|
||||||
|
if (!is.null(attrs$niter)) {
|
||||||
|
object$niter <- as.integer(attrs$niter)
|
||||||
|
}
|
||||||
|
|
||||||
return(object)
|
return(object)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -129,11 +169,15 @@ xgb.Booster.complete <- function(object, saveraw = TRUE) {
|
|||||||
#' logistic regression would result in predictions for log-odds instead of probabilities.
|
#' logistic regression would result in predictions for log-odds instead of probabilities.
|
||||||
#' @param ntreelimit limit the number of model's trees or boosting iterations used in prediction (see Details).
|
#' @param ntreelimit limit the number of model's trees or boosting iterations used in prediction (see Details).
|
||||||
#' It will use all the trees by default (\code{NULL} value).
|
#' It will use all the trees by default (\code{NULL} value).
|
||||||
#' @param predleaf whether predict leaf index instead.
|
#' @param predleaf whether predict leaf index.
|
||||||
#' @param predcontrib whether to return feature contributions to individual predictions instead (see Details).
|
#' @param predcontrib whether to return feature contributions to individual predictions (see Details).
|
||||||
#' @param approxcontrib whether to use a fast approximation for feature contributions (see Details).
|
#' @param approxcontrib whether to use a fast approximation for feature contributions (see Details).
|
||||||
|
#' @param predinteraction whether to return contributions of feature interactions to individual predictions (see Details).
|
||||||
#' @param reshape whether to reshape the vector of predictions to a matrix form when there are several
|
#' @param reshape whether to reshape the vector of predictions to a matrix form when there are several
|
||||||
#' prediction outputs per case. This option has no effect when \code{predleaf = TRUE}.
|
#' prediction outputs per case. This option has no effect when either of predleaf, predcontrib,
|
||||||
|
#' or predinteraction flags is TRUE.
|
||||||
|
#' @param training whether is the prediction result used for training. For dart booster,
|
||||||
|
#' training predicting will perform dropout.
|
||||||
#' @param ... Parameters passed to \code{predict.xgb.Booster}
|
#' @param ... Parameters passed to \code{predict.xgb.Booster}
|
||||||
#'
|
#'
|
||||||
#' @details
|
#' @details
|
||||||
@@ -158,6 +202,11 @@ xgb.Booster.complete <- function(object, saveraw = TRUE) {
|
|||||||
#' Setting \code{approxcontrib = TRUE} approximates these values following the idea explained
|
#' Setting \code{approxcontrib = TRUE} approximates these values following the idea explained
|
||||||
#' in \url{http://blog.datadive.net/interpreting-random-forests/}.
|
#' in \url{http://blog.datadive.net/interpreting-random-forests/}.
|
||||||
#'
|
#'
|
||||||
|
#' With \code{predinteraction = TRUE}, SHAP values of contributions of interaction of each pair of features
|
||||||
|
#' are computed. Note that this operation might be rather expensive in terms of compute and memory.
|
||||||
|
#' Since it quadratically depends on the number of features, it is recommended to perform selection
|
||||||
|
#' of the most important features first. See below about the format of the returned results.
|
||||||
|
#'
|
||||||
#' @return
|
#' @return
|
||||||
#' For regression or binary classification, it returns a vector of length \code{nrows(newdata)}.
|
#' For regression or binary classification, it returns a vector of length \code{nrows(newdata)}.
|
||||||
#' For multiclass classification, either a \code{num_class * nrows(newdata)} vector or
|
#' For multiclass classification, either a \code{num_class * nrows(newdata)} vector or
|
||||||
@@ -173,6 +222,14 @@ xgb.Booster.complete <- function(object, saveraw = TRUE) {
|
|||||||
#' such a matrix. The contribution values are on the scale of untransformed margin
|
#' such a matrix. The contribution values are on the scale of untransformed margin
|
||||||
#' (e.g., for binary classification would mean that the contributions are log-odds deviations from bias).
|
#' (e.g., for binary classification would mean that the contributions are log-odds deviations from bias).
|
||||||
#'
|
#'
|
||||||
|
#' When \code{predinteraction = TRUE} and it is not a multiclass setting, the output is a 3d array with
|
||||||
|
#' dimensions \code{c(nrow, num_features + 1, num_features + 1)}. The off-diagonal (in the last two dimensions)
|
||||||
|
#' elements represent different features interaction contributions. The array is symmetric WRT the last
|
||||||
|
#' two dimensions. The "+ 1" columns corresponds to bias. Summing this array along the last dimension should
|
||||||
|
#' produce practically the same result as predict with \code{predcontrib = TRUE}.
|
||||||
|
#' For a multiclass case, a list of \code{num_class} elements is returned, where each element is
|
||||||
|
#' such an array.
|
||||||
|
#'
|
||||||
#' @seealso
|
#' @seealso
|
||||||
#' \code{\link{xgb.train}}.
|
#' \code{\link{xgb.train}}.
|
||||||
#'
|
#'
|
||||||
@@ -269,7 +326,8 @@ xgb.Booster.complete <- function(object, saveraw = TRUE) {
|
|||||||
#' @rdname predict.xgb.Booster
|
#' @rdname predict.xgb.Booster
|
||||||
#' @export
|
#' @export
|
||||||
predict.xgb.Booster <- function(object, newdata, missing = NA, outputmargin = FALSE, ntreelimit = NULL,
|
predict.xgb.Booster <- function(object, newdata, missing = NA, outputmargin = FALSE, ntreelimit = NULL,
|
||||||
predleaf = FALSE, predcontrib = FALSE, approxcontrib = FALSE, reshape = FALSE, ...) {
|
predleaf = FALSE, predcontrib = FALSE, approxcontrib = FALSE, predinteraction = FALSE,
|
||||||
|
reshape = FALSE, training = FALSE, ...) {
|
||||||
|
|
||||||
object <- xgb.Booster.complete(object, saveraw = FALSE)
|
object <- xgb.Booster.complete(object, saveraw = FALSE)
|
||||||
if (!inherits(newdata, "xgb.DMatrix"))
|
if (!inherits(newdata, "xgb.DMatrix"))
|
||||||
@@ -285,9 +343,11 @@ predict.xgb.Booster <- function(object, newdata, missing = NA, outputmargin = FA
|
|||||||
if (ntreelimit < 0)
|
if (ntreelimit < 0)
|
||||||
stop("ntreelimit cannot be negative")
|
stop("ntreelimit cannot be negative")
|
||||||
|
|
||||||
option <- 0L + 1L * as.logical(outputmargin) + 2L * as.logical(predleaf) + 4L * as.logical(predcontrib) + 8L * as.logical(approxcontrib)
|
option <- 0L + 1L * as.logical(outputmargin) + 2L * as.logical(predleaf) + 4L * as.logical(predcontrib) +
|
||||||
|
8L * as.logical(approxcontrib) + 16L * as.logical(predinteraction)
|
||||||
|
|
||||||
ret <- .Call(XGBoosterPredict_R, object$handle, newdata, option[1], as.integer(ntreelimit))
|
ret <- .Call(XGBoosterPredict_R, object$handle, newdata, option[1],
|
||||||
|
as.integer(ntreelimit), as.integer(training))
|
||||||
|
|
||||||
n_ret <- length(ret)
|
n_ret <- length(ret)
|
||||||
n_row <- nrow(newdata)
|
n_row <- nrow(newdata)
|
||||||
@@ -305,17 +365,28 @@ predict.xgb.Booster <- function(object, newdata, missing = NA, outputmargin = FA
|
|||||||
} else if (predcontrib) {
|
} else if (predcontrib) {
|
||||||
n_col1 <- ncol(newdata) + 1
|
n_col1 <- ncol(newdata) + 1
|
||||||
n_group <- npred_per_case / n_col1
|
n_group <- npred_per_case / n_col1
|
||||||
dnames <- if (!is.null(colnames(newdata))) list(NULL, c(colnames(newdata), "BIAS")) else NULL
|
cnames <- if (!is.null(colnames(newdata))) c(colnames(newdata), "BIAS") else NULL
|
||||||
ret <- if (n_ret == n_row) {
|
ret <- if (n_ret == n_row) {
|
||||||
matrix(ret, ncol = 1, dimnames = dnames)
|
matrix(ret, ncol = 1, dimnames = list(NULL, cnames))
|
||||||
} else if (n_group == 1) {
|
} else if (n_group == 1) {
|
||||||
matrix(ret, nrow = n_row, byrow = TRUE, dimnames = dnames)
|
matrix(ret, nrow = n_row, byrow = TRUE, dimnames = list(NULL, cnames))
|
||||||
} else {
|
} else {
|
||||||
grp_mask <- rep(seq_len(n_col1), n_row) +
|
arr <- array(ret, c(n_col1, n_group, n_row),
|
||||||
rep((seq_len(n_row) - 1) * n_col1 * n_group, each = n_col1)
|
dimnames = list(cnames, NULL, NULL)) %>% aperm(c(2, 3, 1)) # [group, row, col]
|
||||||
lapply(seq_len(n_group), function(g) {
|
lapply(seq_len(n_group), function(g) arr[g, , ])
|
||||||
matrix(ret[grp_mask + n_col1 * (g - 1)], nrow = n_row, byrow = TRUE, dimnames = dnames)
|
}
|
||||||
})
|
} else if (predinteraction) {
|
||||||
|
n_col1 <- ncol(newdata) + 1
|
||||||
|
n_group <- npred_per_case / n_col1^2
|
||||||
|
cnames <- if (!is.null(colnames(newdata))) c(colnames(newdata), "BIAS") else NULL
|
||||||
|
ret <- if (n_ret == n_row) {
|
||||||
|
matrix(ret, ncol = 1, dimnames = list(NULL, cnames))
|
||||||
|
} else if (n_group == 1) {
|
||||||
|
array(ret, c(n_col1, n_col1, n_row), dimnames = list(cnames, cnames, NULL)) %>% aperm(c(3, 1, 2))
|
||||||
|
} else {
|
||||||
|
arr <- array(ret, c(n_col1, n_col1, n_group, n_row),
|
||||||
|
dimnames = list(cnames, cnames, NULL, NULL)) %>% aperm(c(3, 4, 1, 2)) # [group, row, col1, col2]
|
||||||
|
lapply(seq_len(n_group), function(g) arr[g, , , ])
|
||||||
}
|
}
|
||||||
} else if (reshape && npred_per_case > 1) {
|
} else if (reshape && npred_per_case > 1) {
|
||||||
ret <- matrix(ret, nrow = n_row, byrow = TRUE)
|
ret <- matrix(ret, nrow = n_row, byrow = TRUE)
|
||||||
@@ -365,7 +436,7 @@ predict.xgb.Booster.handle <- function(object, ...) {
|
|||||||
#' That would only matter if attributes need to be set many times.
|
#' That would only matter if attributes need to be set many times.
|
||||||
#' Note, however, that when feeding a handle of an \code{xgb.Booster} object to the attribute setters,
|
#' Note, however, that when feeding a handle of an \code{xgb.Booster} object to the attribute setters,
|
||||||
#' the raw model cache of an \code{xgb.Booster} object would not be automatically updated,
|
#' the raw model cache of an \code{xgb.Booster} object would not be automatically updated,
|
||||||
#' and it would be user's responsibility to call \code{xgb.save.raw} to update it.
|
#' and it would be user's responsibility to call \code{xgb.serialize} to update it.
|
||||||
#'
|
#'
|
||||||
#' The \code{xgb.attributes<-} setter either updates the existing or adds one or several attributes,
|
#' The \code{xgb.attributes<-} setter either updates the existing or adds one or several attributes,
|
||||||
#' but it doesn't delete the other existing attributes.
|
#' but it doesn't delete the other existing attributes.
|
||||||
@@ -390,6 +461,7 @@ predict.xgb.Booster.handle <- function(object, ...) {
|
|||||||
#'
|
#'
|
||||||
#' xgb.save(bst, 'xgb.model')
|
#' xgb.save(bst, 'xgb.model')
|
||||||
#' bst1 <- xgb.load('xgb.model')
|
#' bst1 <- xgb.load('xgb.model')
|
||||||
|
#' if (file.exists('xgb.model')) file.remove('xgb.model')
|
||||||
#' print(xgb.attr(bst1, "my_attribute"))
|
#' print(xgb.attr(bst1, "my_attribute"))
|
||||||
#' print(xgb.attributes(bst1))
|
#' print(xgb.attributes(bst1))
|
||||||
#'
|
#'
|
||||||
@@ -423,7 +495,7 @@ xgb.attr <- function(object, name) {
|
|||||||
}
|
}
|
||||||
.Call(XGBoosterSetAttr_R, handle, as.character(name[1]), value)
|
.Call(XGBoosterSetAttr_R, handle, as.character(name[1]), value)
|
||||||
if (is(object, 'xgb.Booster') && !is.null(object$raw)) {
|
if (is(object, 'xgb.Booster') && !is.null(object$raw)) {
|
||||||
object$raw <- xgb.save.raw(object$handle)
|
object$raw <- xgb.serialize(object$handle)
|
||||||
}
|
}
|
||||||
object
|
object
|
||||||
}
|
}
|
||||||
@@ -463,11 +535,41 @@ xgb.attributes <- function(object) {
|
|||||||
.Call(XGBoosterSetAttr_R, handle, names(a[i]), a[[i]])
|
.Call(XGBoosterSetAttr_R, handle, names(a[i]), a[[i]])
|
||||||
}
|
}
|
||||||
if (is(object, 'xgb.Booster') && !is.null(object$raw)) {
|
if (is(object, 'xgb.Booster') && !is.null(object$raw)) {
|
||||||
object$raw <- xgb.save.raw(object$handle)
|
object$raw <- xgb.serialize(object$handle)
|
||||||
}
|
}
|
||||||
object
|
object
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#' Accessors for model parameters as JSON string.
|
||||||
|
#'
|
||||||
|
#' @param object Object of class \code{xgb.Booster}
|
||||||
|
#' @param value A JSON string.
|
||||||
|
#'
|
||||||
|
#' @examples
|
||||||
|
#' data(agaricus.train, package='xgboost')
|
||||||
|
#' train <- agaricus.train
|
||||||
|
#'
|
||||||
|
#' bst <- xgboost(data = train$data, label = train$label, max_depth = 2,
|
||||||
|
#' eta = 1, nthread = 2, nrounds = 2, objective = "binary:logistic")
|
||||||
|
#' config <- xgb.config(bst)
|
||||||
|
#'
|
||||||
|
#' @rdname xgb.config
|
||||||
|
#' @export
|
||||||
|
xgb.config <- function(object) {
|
||||||
|
handle <- xgb.get.handle(object)
|
||||||
|
.Call(XGBoosterSaveJsonConfig_R, handle);
|
||||||
|
}
|
||||||
|
|
||||||
|
#' @rdname xgb.config
|
||||||
|
#' @export
|
||||||
|
`xgb.config<-` <- function(object, value) {
|
||||||
|
handle <- xgb.get.handle(object)
|
||||||
|
.Call(XGBoosterLoadJsonConfig_R, handle, value)
|
||||||
|
object$raw <- NULL # force renew the raw buffer
|
||||||
|
object <- xgb.Booster.complete(object)
|
||||||
|
object
|
||||||
|
}
|
||||||
|
|
||||||
#' Accessors for model parameters.
|
#' Accessors for model parameters.
|
||||||
#'
|
#'
|
||||||
#' Only the setter for xgboost parameters is currently implemented.
|
#' Only the setter for xgboost parameters is currently implemented.
|
||||||
@@ -504,7 +606,7 @@ xgb.attributes <- function(object) {
|
|||||||
.Call(XGBoosterSetParam_R, handle, names(p[i]), p[[i]])
|
.Call(XGBoosterSetParam_R, handle, names(p[i]), p[[i]])
|
||||||
}
|
}
|
||||||
if (is(object, 'xgb.Booster') && !is.null(object$raw)) {
|
if (is(object, 'xgb.Booster') && !is.null(object$raw)) {
|
||||||
object$raw <- xgb.save.raw(object$handle)
|
object$raw <- xgb.serialize(object$handle)
|
||||||
}
|
}
|
||||||
object
|
object
|
||||||
}
|
}
|
||||||
@@ -557,7 +659,7 @@ print.xgb.Booster <- function(x, verbose = FALSE, ...) {
|
|||||||
|
|
||||||
if (!is.null(x$params)) {
|
if (!is.null(x$params)) {
|
||||||
cat('params (as set within xgb.train):\n')
|
cat('params (as set within xgb.train):\n')
|
||||||
cat( ' ',
|
cat(' ',
|
||||||
paste(names(x$params),
|
paste(names(x$params),
|
||||||
paste0('"', unlist(x$params), '"'),
|
paste0('"', unlist(x$params), '"'),
|
||||||
sep = ' = ', collapse = ', '), '\n', sep = '')
|
sep = ' = ', collapse = ', '), '\n', sep = '')
|
||||||
@@ -570,9 +672,9 @@ print.xgb.Booster <- function(x, verbose = FALSE, ...) {
|
|||||||
if (length(attrs) > 0) {
|
if (length(attrs) > 0) {
|
||||||
cat('xgb.attributes:\n')
|
cat('xgb.attributes:\n')
|
||||||
if (verbose) {
|
if (verbose) {
|
||||||
cat( paste(paste0(' ',names(attrs)),
|
cat(paste(paste0(' ', names(attrs)),
|
||||||
paste0('"', unlist(attrs), '"'),
|
paste0('"', unlist(attrs), '"'),
|
||||||
sep = ' = ', collapse = '\n'), '\n', sep = '')
|
sep = ' = ', collapse = '\n'), '\n', sep = '')
|
||||||
} else {
|
} else {
|
||||||
cat(' ', paste(names(attrs), collapse = ', '), '\n', sep = '')
|
cat(' ', paste(names(attrs), collapse = ', '), '\n', sep = '')
|
||||||
}
|
}
|
||||||
@@ -594,7 +696,7 @@ print.xgb.Booster <- function(x, verbose = FALSE, ...) {
|
|||||||
#cat('ntree: ', xgb.ntree(x), '\n', sep='')
|
#cat('ntree: ', xgb.ntree(x), '\n', sep='')
|
||||||
|
|
||||||
for (n in setdiff(names(x), c('handle', 'raw', 'call', 'params', 'callbacks',
|
for (n in setdiff(names(x), c('handle', 'raw', 'call', 'params', 'callbacks',
|
||||||
'evaluation_log','niter','feature_names'))) {
|
'evaluation_log', 'niter', 'feature_names'))) {
|
||||||
if (is.atomic(x[[n]])) {
|
if (is.atomic(x[[n]])) {
|
||||||
cat(n, ':', x[[n]], '\n', sep = ' ')
|
cat(n, ':', x[[n]], '\n', sep = ' ')
|
||||||
} else {
|
} else {
|
||||||
|
|||||||
@@ -19,6 +19,7 @@
|
|||||||
#' dtrain <- xgb.DMatrix(train$data, label=train$label)
|
#' dtrain <- xgb.DMatrix(train$data, label=train$label)
|
||||||
#' xgb.DMatrix.save(dtrain, 'xgb.DMatrix.data')
|
#' xgb.DMatrix.save(dtrain, 'xgb.DMatrix.data')
|
||||||
#' dtrain <- xgb.DMatrix('xgb.DMatrix.data')
|
#' dtrain <- xgb.DMatrix('xgb.DMatrix.data')
|
||||||
|
#' if (file.exists('xgb.DMatrix.data')) file.remove('xgb.DMatrix.data')
|
||||||
#' @export
|
#' @export
|
||||||
xgb.DMatrix <- function(data, info = list(), missing = NA, silent = FALSE, ...) {
|
xgb.DMatrix <- function(data, info = list(), missing = NA, silent = FALSE, ...) {
|
||||||
cnames <- NULL
|
cnames <- NULL
|
||||||
@@ -104,7 +105,7 @@ dim.xgb.DMatrix <- function(x) {
|
|||||||
#' Handling of column names of \code{xgb.DMatrix}
|
#' Handling of column names of \code{xgb.DMatrix}
|
||||||
#'
|
#'
|
||||||
#' Only column names are supported for \code{xgb.DMatrix}, thus setting of
|
#' Only column names are supported for \code{xgb.DMatrix}, thus setting of
|
||||||
#' row names would have no effect and returnten row names would be NULL.
|
#' row names would have no effect and returned row names would be NULL.
|
||||||
#'
|
#'
|
||||||
#' @param x object of class \code{xgb.DMatrix}
|
#' @param x object of class \code{xgb.DMatrix}
|
||||||
#' @param value a list of two elements: the first one is ignored
|
#' @param value a list of two elements: the first one is ignored
|
||||||
@@ -187,9 +188,10 @@ getinfo <- function(object, ...) UseMethod("getinfo")
|
|||||||
getinfo.xgb.DMatrix <- function(object, name, ...) {
|
getinfo.xgb.DMatrix <- function(object, name, ...) {
|
||||||
if (typeof(name) != "character" ||
|
if (typeof(name) != "character" ||
|
||||||
length(name) != 1 ||
|
length(name) != 1 ||
|
||||||
!name %in% c('label', 'weight', 'base_margin', 'nrow')) {
|
!name %in% c('label', 'weight', 'base_margin', 'nrow',
|
||||||
|
'label_lower_bound', 'label_upper_bound')) {
|
||||||
stop("getinfo: name must be one of the following\n",
|
stop("getinfo: name must be one of the following\n",
|
||||||
" 'label', 'weight', 'base_margin', 'nrow'")
|
" 'label', 'weight', 'base_margin', 'nrow', 'label_lower_bound', 'label_upper_bound'")
|
||||||
}
|
}
|
||||||
if (name != "nrow"){
|
if (name != "nrow"){
|
||||||
ret <- .Call(XGDMatrixGetInfo_R, object, name)
|
ret <- .Call(XGDMatrixGetInfo_R, object, name)
|
||||||
@@ -242,9 +244,19 @@ setinfo.xgb.DMatrix <- function(object, name, info, ...) {
|
|||||||
.Call(XGDMatrixSetInfo_R, object, name, as.numeric(info))
|
.Call(XGDMatrixSetInfo_R, object, name, as.numeric(info))
|
||||||
return(TRUE)
|
return(TRUE)
|
||||||
}
|
}
|
||||||
if (name == "weight") {
|
if (name == "label_lower_bound") {
|
||||||
if (length(info) != nrow(object))
|
if (length(info) != nrow(object))
|
||||||
stop("The length of weights must equal to the number of rows in the input data")
|
stop("The length of lower-bound labels must equal to the number of rows in the input data")
|
||||||
|
.Call(XGDMatrixSetInfo_R, object, name, as.numeric(info))
|
||||||
|
return(TRUE)
|
||||||
|
}
|
||||||
|
if (name == "label_upper_bound") {
|
||||||
|
if (length(info) != nrow(object))
|
||||||
|
stop("The length of upper-bound labels must equal to the number of rows in the input data")
|
||||||
|
.Call(XGDMatrixSetInfo_R, object, name, as.numeric(info))
|
||||||
|
return(TRUE)
|
||||||
|
}
|
||||||
|
if (name == "weight") {
|
||||||
.Call(XGDMatrixSetInfo_R, object, name, as.numeric(info))
|
.Call(XGDMatrixSetInfo_R, object, name, as.numeric(info))
|
||||||
return(TRUE)
|
return(TRUE)
|
||||||
}
|
}
|
||||||
@@ -266,10 +278,10 @@ setinfo.xgb.DMatrix <- function(object, name, info, ...) {
|
|||||||
|
|
||||||
|
|
||||||
#' Get a new DMatrix containing the specified rows of
|
#' Get a new DMatrix containing the specified rows of
|
||||||
#' orginal xgb.DMatrix object
|
#' original xgb.DMatrix object
|
||||||
#'
|
#'
|
||||||
#' Get a new DMatrix containing the specified rows of
|
#' Get a new DMatrix containing the specified rows of
|
||||||
#' orginal xgb.DMatrix object
|
#' original xgb.DMatrix object
|
||||||
#'
|
#'
|
||||||
#' @param object Object of class "xgb.DMatrix"
|
#' @param object Object of class "xgb.DMatrix"
|
||||||
#' @param idxset a integer vector of indices of rows needed
|
#' @param idxset a integer vector of indices of rows needed
|
||||||
@@ -301,12 +313,17 @@ slice.xgb.DMatrix <- function(object, idxset, ...) {
|
|||||||
|
|
||||||
attr_list <- attributes(object)
|
attr_list <- attributes(object)
|
||||||
nr <- nrow(object)
|
nr <- nrow(object)
|
||||||
len <- sapply(attr_list, length)
|
len <- sapply(attr_list, NROW)
|
||||||
ind <- which(len == nr)
|
ind <- which(len == nr)
|
||||||
if (length(ind) > 0) {
|
if (length(ind) > 0) {
|
||||||
nms <- names(attr_list)[ind]
|
nms <- names(attr_list)[ind]
|
||||||
for (i in seq_along(ind)) {
|
for (i in seq_along(ind)) {
|
||||||
attr(ret, nms[i]) <- attr(object, nms[i])[idxset]
|
obj_attr <- attr(object, nms[i])
|
||||||
|
if (NCOL(obj_attr) > 1) {
|
||||||
|
attr(ret, nms[i]) <- obj_attr[idxset, ]
|
||||||
|
} else {
|
||||||
|
attr(ret, nms[i]) <- obj_attr[idxset]
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return(structure(ret, class = "xgb.DMatrix"))
|
return(structure(ret, class = "xgb.DMatrix"))
|
||||||
@@ -341,9 +358,9 @@ slice.xgb.DMatrix <- function(object, idxset, ...) {
|
|||||||
print.xgb.DMatrix <- function(x, verbose = FALSE, ...) {
|
print.xgb.DMatrix <- function(x, verbose = FALSE, ...) {
|
||||||
cat('xgb.DMatrix dim:', nrow(x), 'x', ncol(x), ' info: ')
|
cat('xgb.DMatrix dim:', nrow(x), 'x', ncol(x), ' info: ')
|
||||||
infos <- c()
|
infos <- c()
|
||||||
if(length(getinfo(x, 'label')) > 0) infos <- 'label'
|
if (length(getinfo(x, 'label')) > 0) infos <- 'label'
|
||||||
if(length(getinfo(x, 'weight')) > 0) infos <- c(infos, 'weight')
|
if (length(getinfo(x, 'weight')) > 0) infos <- c(infos, 'weight')
|
||||||
if(length(getinfo(x, 'base_margin')) > 0) infos <- c(infos, 'base_margin')
|
if (length(getinfo(x, 'base_margin')) > 0) infos <- c(infos, 'base_margin')
|
||||||
if (length(infos) == 0) infos <- 'NA'
|
if (length(infos) == 0) infos <- 'NA'
|
||||||
cat(infos)
|
cat(infos)
|
||||||
cnames <- colnames(x)
|
cnames <- colnames(x)
|
||||||
|
|||||||
@@ -11,6 +11,7 @@
|
|||||||
#' dtrain <- xgb.DMatrix(train$data, label=train$label)
|
#' dtrain <- xgb.DMatrix(train$data, label=train$label)
|
||||||
#' xgb.DMatrix.save(dtrain, 'xgb.DMatrix.data')
|
#' xgb.DMatrix.save(dtrain, 'xgb.DMatrix.data')
|
||||||
#' dtrain <- xgb.DMatrix('xgb.DMatrix.data')
|
#' dtrain <- xgb.DMatrix('xgb.DMatrix.data')
|
||||||
|
#' if (file.exists('xgb.DMatrix.data')) file.remove('xgb.DMatrix.data')
|
||||||
#' @export
|
#' @export
|
||||||
xgb.DMatrix.save <- function(dmatrix, fname) {
|
xgb.DMatrix.save <- function(dmatrix, fname) {
|
||||||
if (typeof(fname) != "character")
|
if (typeof(fname) != "character")
|
||||||
|
|||||||
@@ -52,9 +52,9 @@
|
|||||||
#' dtest <- xgb.DMatrix(data = agaricus.test$data, label = agaricus.test$label)
|
#' dtest <- xgb.DMatrix(data = agaricus.test$data, label = agaricus.test$label)
|
||||||
#'
|
#'
|
||||||
#' param <- list(max_depth=2, eta=1, silent=1, objective='binary:logistic')
|
#' param <- list(max_depth=2, eta=1, silent=1, objective='binary:logistic')
|
||||||
#' nround = 4
|
#' nrounds = 4
|
||||||
#'
|
#'
|
||||||
#' bst = xgb.train(params = param, data = dtrain, nrounds = nround, nthread = 2)
|
#' bst = xgb.train(params = param, data = dtrain, nrounds = nrounds, nthread = 2)
|
||||||
#'
|
#'
|
||||||
#' # Model accuracy without new features
|
#' # Model accuracy without new features
|
||||||
#' accuracy.before <- sum((predict(bst, agaricus.test$data) >= 0.5) == agaricus.test$label) /
|
#' accuracy.before <- sum((predict(bst, agaricus.test$data) >= 0.5) == agaricus.test$label) /
|
||||||
@@ -68,7 +68,7 @@
|
|||||||
#' new.dtrain <- xgb.DMatrix(data = new.features.train, label = agaricus.train$label)
|
#' new.dtrain <- xgb.DMatrix(data = new.features.train, label = agaricus.train$label)
|
||||||
#' new.dtest <- xgb.DMatrix(data = new.features.test, label = agaricus.test$label)
|
#' new.dtest <- xgb.DMatrix(data = new.features.test, label = agaricus.test$label)
|
||||||
#' watchlist <- list(train = new.dtrain)
|
#' watchlist <- list(train = new.dtrain)
|
||||||
#' bst <- xgb.train(params = param, data = new.dtrain, nrounds = nround, nthread = 2)
|
#' bst <- xgb.train(params = param, data = new.dtrain, nrounds = nrounds, nthread = 2)
|
||||||
#'
|
#'
|
||||||
#' # Model accuracy with new features
|
#' # Model accuracy with new features
|
||||||
#' accuracy.after <- sum((predict(bst, new.dtest) >= 0.5) == agaricus.test$label) /
|
#' accuracy.after <- sum((predict(bst, new.dtest) >= 0.5) == agaricus.test$label) /
|
||||||
@@ -83,5 +83,5 @@ xgb.create.features <- function(model, data, ...){
|
|||||||
check.deprecation(...)
|
check.deprecation(...)
|
||||||
pred_with_leaf <- predict(model, data, predleaf = TRUE)
|
pred_with_leaf <- predict(model, data, predleaf = TRUE)
|
||||||
cols <- lapply(as.data.frame(pred_with_leaf), factor)
|
cols <- lapply(as.data.frame(pred_with_leaf), factor)
|
||||||
cbind(data, sparse.model.matrix( ~ . -1, cols))
|
cbind(data, sparse.model.matrix(~ . -1, cols)) # nolint
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -2,12 +2,15 @@
|
|||||||
#'
|
#'
|
||||||
#' The cross validation function of xgboost
|
#' The cross validation function of xgboost
|
||||||
#'
|
#'
|
||||||
#' @param params the list of parameters. Commonly used ones are:
|
#' @param params the list of parameters. The complete list of parameters is
|
||||||
|
#' available in the \href{http://xgboost.readthedocs.io/en/latest/parameter.html}{online documentation}. Below
|
||||||
|
#' is a shorter summary:
|
||||||
#' \itemize{
|
#' \itemize{
|
||||||
#' \item \code{objective} objective function, common ones are
|
#' \item \code{objective} objective function, common ones are
|
||||||
#' \itemize{
|
#' \itemize{
|
||||||
#' \item \code{reg:linear} linear regression
|
#' \item \code{reg:squarederror} Regression with squared loss.
|
||||||
#' \item \code{binary:logistic} logistic regression for classification
|
#' \item \code{binary:logistic} logistic regression for classification.
|
||||||
|
#' \item See \code{\link[=xgb.train]{xgb.train}()} for complete list of objectives.
|
||||||
#' }
|
#' }
|
||||||
#' \item \code{eta} step size of each boosting step
|
#' \item \code{eta} step size of each boosting step
|
||||||
#' \item \code{max_depth} maximum depth of the tree
|
#' \item \code{max_depth} maximum depth of the tree
|
||||||
@@ -39,7 +42,7 @@
|
|||||||
#' }
|
#' }
|
||||||
#' @param obj customized objective function. Returns gradient and second order
|
#' @param obj customized objective function. Returns gradient and second order
|
||||||
#' gradient with given prediction and dtrain.
|
#' gradient with given prediction and dtrain.
|
||||||
#' @param feval custimized evaluation function. Returns
|
#' @param feval customized evaluation function. Returns
|
||||||
#' \code{list(metric='metric-name', value='metric-value')} with given
|
#' \code{list(metric='metric-name', value='metric-value')} with given
|
||||||
#' prediction and dtrain.
|
#' prediction and dtrain.
|
||||||
#' @param stratified a \code{boolean} indicating whether sampling of folds should be stratified
|
#' @param stratified a \code{boolean} indicating whether sampling of folds should be stratified
|
||||||
@@ -47,6 +50,8 @@
|
|||||||
#' @param folds \code{list} provides a possibility to use a list of pre-defined CV folds
|
#' @param folds \code{list} provides a possibility to use a list of pre-defined CV folds
|
||||||
#' (each element must be a vector of test fold's indices). When folds are supplied,
|
#' (each element must be a vector of test fold's indices). When folds are supplied,
|
||||||
#' the \code{nfold} and \code{stratified} parameters are ignored.
|
#' the \code{nfold} and \code{stratified} parameters are ignored.
|
||||||
|
#' @param train_folds \code{list} list specifying which indicies to use for training. If \code{NULL}
|
||||||
|
#' (the default) all indices not specified in \code{folds} will be used for training.
|
||||||
#' @param verbose \code{boolean}, print the statistics during the process
|
#' @param verbose \code{boolean}, print the statistics during the process
|
||||||
#' @param print_every_n Print each n-th iteration evaluation messages when \code{verbose>0}.
|
#' @param print_every_n Print each n-th iteration evaluation messages when \code{verbose>0}.
|
||||||
#' Default is 1 which means all messages are printed. This parameter is passed to the
|
#' Default is 1 which means all messages are printed. This parameter is passed to the
|
||||||
@@ -84,7 +89,7 @@
|
|||||||
#' capture parameters changed by the \code{\link{cb.reset.parameters}} callback.
|
#' capture parameters changed by the \code{\link{cb.reset.parameters}} callback.
|
||||||
#' \item \code{callbacks} callback functions that were either automatically assigned or
|
#' \item \code{callbacks} callback functions that were either automatically assigned or
|
||||||
#' explicitly passed.
|
#' explicitly passed.
|
||||||
#' \item \code{evaluation_log} evaluation history storead as a \code{data.table} with the
|
#' \item \code{evaluation_log} evaluation history stored as a \code{data.table} with the
|
||||||
#' first column corresponding to iteration number and the rest corresponding to the
|
#' first column corresponding to iteration number and the rest corresponding to the
|
||||||
#' CV-based evaluation means and standard deviations for the training and test CV-sets.
|
#' CV-based evaluation means and standard deviations for the training and test CV-sets.
|
||||||
#' It is created by the \code{\link{cb.evaluation.log}} callback.
|
#' It is created by the \code{\link{cb.evaluation.log}} callback.
|
||||||
@@ -99,7 +104,7 @@
|
|||||||
#' (only available with early stopping).
|
#' (only available with early stopping).
|
||||||
#' \item \code{pred} CV prediction values available when \code{prediction} is set.
|
#' \item \code{pred} CV prediction values available when \code{prediction} is set.
|
||||||
#' It is either vector or matrix (see \code{\link{cb.cv.predict}}).
|
#' It is either vector or matrix (see \code{\link{cb.cv.predict}}).
|
||||||
#' \item \code{models} a liost of the CV folds' models. It is only available with the explicit
|
#' \item \code{models} a list of the CV folds' models. It is only available with the explicit
|
||||||
#' setting of the \code{cb.cv.predict(save_models = TRUE)} callback.
|
#' setting of the \code{cb.cv.predict(save_models = TRUE)} callback.
|
||||||
#' }
|
#' }
|
||||||
#'
|
#'
|
||||||
@@ -114,7 +119,7 @@
|
|||||||
#' @export
|
#' @export
|
||||||
xgb.cv <- function(params=list(), data, nrounds, nfold, label = NULL, missing = NA,
|
xgb.cv <- function(params=list(), data, nrounds, nfold, label = NULL, missing = NA,
|
||||||
prediction = FALSE, showsd = TRUE, metrics=list(),
|
prediction = FALSE, showsd = TRUE, metrics=list(),
|
||||||
obj = NULL, feval = NULL, stratified = TRUE, folds = NULL,
|
obj = NULL, feval = NULL, stratified = TRUE, folds = NULL, train_folds = NULL,
|
||||||
verbose = TRUE, print_every_n=1L,
|
verbose = TRUE, print_every_n=1L,
|
||||||
early_stopping_rounds = NULL, maximize = NULL, callbacks = list(), ...) {
|
early_stopping_rounds = NULL, maximize = NULL, callbacks = list(), ...) {
|
||||||
|
|
||||||
@@ -132,19 +137,26 @@ xgb.cv <- function(params=list(), data, nrounds, nfold, label = NULL, missing =
|
|||||||
# stop("Either 'eval_metric' or 'feval' must be provided for CV")
|
# stop("Either 'eval_metric' or 'feval' must be provided for CV")
|
||||||
|
|
||||||
# Check the labels
|
# Check the labels
|
||||||
if ( (inherits(data, 'xgb.DMatrix') && is.null(getinfo(data, 'label'))) ||
|
if ((inherits(data, 'xgb.DMatrix') && is.null(getinfo(data, 'label'))) ||
|
||||||
(!inherits(data, 'xgb.DMatrix') && is.null(label)))
|
(!inherits(data, 'xgb.DMatrix') && is.null(label))) {
|
||||||
stop("Labels must be provided for CV either through xgb.DMatrix, or through 'label=' when 'data' is matrix")
|
stop("Labels must be provided for CV either through xgb.DMatrix, or through 'label=' when 'data' is matrix")
|
||||||
|
} else if (inherits(data, 'xgb.DMatrix')) {
|
||||||
|
if (!is.null(label))
|
||||||
|
warning("xgb.cv: label will be ignored, since data is of type xgb.DMatrix")
|
||||||
|
cv_label <- getinfo(data, 'label')
|
||||||
|
} else {
|
||||||
|
cv_label <- label
|
||||||
|
}
|
||||||
|
|
||||||
# CV folds
|
# CV folds
|
||||||
if(!is.null(folds)) {
|
if (!is.null(folds)) {
|
||||||
if(!is.list(folds) || length(folds) < 2)
|
if (!is.list(folds) || length(folds) < 2)
|
||||||
stop("'folds' must be a list with 2 or more elements that are vectors of indices for each CV-fold")
|
stop("'folds' must be a list with 2 or more elements that are vectors of indices for each CV-fold")
|
||||||
nfold <- length(folds)
|
nfold <- length(folds)
|
||||||
} else {
|
} else {
|
||||||
if (nfold <= 1)
|
if (nfold <= 1)
|
||||||
stop("'nfold' must be > 1")
|
stop("'nfold' must be > 1")
|
||||||
folds <- generate.cv.folds(nfold, nrow(data), stratified, label, params)
|
folds <- generate.cv.folds(nfold, nrow(data), stratified, cv_label, params)
|
||||||
}
|
}
|
||||||
|
|
||||||
# Potential TODO: sequential CV
|
# Potential TODO: sequential CV
|
||||||
@@ -153,7 +165,7 @@ xgb.cv <- function(params=list(), data, nrounds, nfold, label = NULL, missing =
|
|||||||
|
|
||||||
# verbosity & evaluation printing callback:
|
# verbosity & evaluation printing callback:
|
||||||
params <- c(params, list(silent = 1))
|
params <- c(params, list(silent = 1))
|
||||||
print_every_n <- max( as.integer(print_every_n), 1L)
|
print_every_n <- max(as.integer(print_every_n), 1L)
|
||||||
if (!has.callbacks(callbacks, 'cb.print.evaluation') && verbose) {
|
if (!has.callbacks(callbacks, 'cb.print.evaluation') && verbose) {
|
||||||
callbacks <- add.cb(callbacks, cb.print.evaluation(print_every_n, showsd = showsd))
|
callbacks <- add.cb(callbacks, cb.print.evaluation(print_every_n, showsd = showsd))
|
||||||
}
|
}
|
||||||
@@ -179,20 +191,25 @@ xgb.cv <- function(params=list(), data, nrounds, nfold, label = NULL, missing =
|
|||||||
|
|
||||||
|
|
||||||
# create the booster-folds
|
# create the booster-folds
|
||||||
|
# train_folds
|
||||||
dall <- xgb.get.DMatrix(data, label, missing)
|
dall <- xgb.get.DMatrix(data, label, missing)
|
||||||
bst_folds <- lapply(seq_along(folds), function(k) {
|
bst_folds <- lapply(seq_along(folds), function(k) {
|
||||||
dtest <- slice(dall, folds[[k]])
|
dtest <- slice(dall, folds[[k]])
|
||||||
dtrain <- slice(dall, unlist(folds[-k]))
|
# code originally contributed by @RolandASc on stackoverflow
|
||||||
|
if (is.null(train_folds))
|
||||||
|
dtrain <- slice(dall, unlist(folds[-k]))
|
||||||
|
else
|
||||||
|
dtrain <- slice(dall, train_folds[[k]])
|
||||||
handle <- xgb.Booster.handle(params, list(dtrain, dtest))
|
handle <- xgb.Booster.handle(params, list(dtrain, dtest))
|
||||||
list(dtrain = dtrain, bst = handle, watchlist = list(train = dtrain, test=dtest), index = folds[[k]])
|
list(dtrain = dtrain, bst = handle, watchlist = list(train = dtrain, test = dtest), index = folds[[k]])
|
||||||
})
|
})
|
||||||
rm(dall)
|
rm(dall)
|
||||||
# a "basket" to collect some results from callbacks
|
# a "basket" to collect some results from callbacks
|
||||||
basket <- list()
|
basket <- list()
|
||||||
|
|
||||||
# extract parameters that can affect the relationship b/w #trees and #iterations
|
# extract parameters that can affect the relationship b/w #trees and #iterations
|
||||||
num_class <- max(as.numeric(NVL(params[['num_class']], 1)), 1)
|
num_class <- max(as.numeric(NVL(params[['num_class']], 1)), 1) # nolint
|
||||||
num_parallel_tree <- max(as.numeric(NVL(params[['num_parallel_tree']], 1)), 1)
|
num_parallel_tree <- max(as.numeric(NVL(params[['num_parallel_tree']], 1)), 1) # nolint
|
||||||
|
|
||||||
# those are fixed for CV (no training continuation)
|
# those are fixed for CV (no training continuation)
|
||||||
begin_iteration <- 1
|
begin_iteration <- 1
|
||||||
@@ -209,7 +226,7 @@ xgb.cv <- function(params=list(), data, nrounds, nfold, label = NULL, missing =
|
|||||||
})
|
})
|
||||||
msg <- simplify2array(msg)
|
msg <- simplify2array(msg)
|
||||||
bst_evaluation <- rowMeans(msg)
|
bst_evaluation <- rowMeans(msg)
|
||||||
bst_evaluation_err <- sqrt(rowMeans(msg^2) - bst_evaluation^2)
|
bst_evaluation_err <- sqrt(rowMeans(msg^2) - bst_evaluation^2) # nolint
|
||||||
|
|
||||||
for (f in cb$post_iter) f()
|
for (f in cb$post_iter) f()
|
||||||
|
|
||||||
@@ -268,10 +285,10 @@ print.xgb.cv.synchronous <- function(x, verbose = FALSE, ...) {
|
|||||||
}
|
}
|
||||||
if (!is.null(x$params)) {
|
if (!is.null(x$params)) {
|
||||||
cat('params (as set within xgb.cv):\n')
|
cat('params (as set within xgb.cv):\n')
|
||||||
cat( ' ',
|
cat(' ',
|
||||||
paste(names(x$params),
|
paste(names(x$params),
|
||||||
paste0('"', unlist(x$params), '"'),
|
paste0('"', unlist(x$params), '"'),
|
||||||
sep = ' = ', collapse = ', '), '\n', sep = '')
|
sep = ' = ', collapse = ', '), '\n', sep = '')
|
||||||
}
|
}
|
||||||
if (!is.null(x$callbacks) && length(x$callbacks) > 0) {
|
if (!is.null(x$callbacks) && length(x$callbacks) > 0) {
|
||||||
cat('callbacks:\n')
|
cat('callbacks:\n')
|
||||||
|
|||||||
@@ -22,7 +22,7 @@ xgb.ggplot.importance <- function(importance_matrix = NULL, top_n = NULL, measur
|
|||||||
|
|
||||||
plot <-
|
plot <-
|
||||||
ggplot2::ggplot(importance_matrix,
|
ggplot2::ggplot(importance_matrix,
|
||||||
ggplot2::aes(x = factor(Feature, levels = rev(Feature)), y = Importance, width = 0.05),
|
ggplot2::aes(x = factor(Feature, levels = rev(Feature)), y = Importance, width = 0.5),
|
||||||
environment = environment()) +
|
environment = environment()) +
|
||||||
ggplot2::geom_bar(ggplot2::aes(fill = Cluster), stat = "identity", position = "identity") +
|
ggplot2::geom_bar(ggplot2::aes(fill = Cluster), stat = "identity", position = "identity") +
|
||||||
ggplot2::coord_flip() +
|
ggplot2::coord_flip() +
|
||||||
@@ -74,7 +74,7 @@ xgb.ggplot.deepness <- function(model = NULL, which = c("2x1", "max.depth", "med
|
|||||||
p <-
|
p <-
|
||||||
ggplot2::ggplot(dt_depths[, max(Depth), Tree]) +
|
ggplot2::ggplot(dt_depths[, max(Depth), Tree]) +
|
||||||
ggplot2::geom_jitter(ggplot2::aes(x = Tree, y = V1),
|
ggplot2::geom_jitter(ggplot2::aes(x = Tree, y = V1),
|
||||||
height = 0.15, alpha=0.4, size=3, stroke=0) +
|
height = 0.15, alpha = 0.4, size = 3, stroke = 0) +
|
||||||
ggplot2::xlab("tree #") +
|
ggplot2::xlab("tree #") +
|
||||||
ggplot2::ylab("Max tree leaf depth")
|
ggplot2::ylab("Max tree leaf depth")
|
||||||
return(p)
|
return(p)
|
||||||
@@ -83,7 +83,7 @@ xgb.ggplot.deepness <- function(model = NULL, which = c("2x1", "max.depth", "med
|
|||||||
p <-
|
p <-
|
||||||
ggplot2::ggplot(dt_depths[, median(as.numeric(Depth)), Tree]) +
|
ggplot2::ggplot(dt_depths[, median(as.numeric(Depth)), Tree]) +
|
||||||
ggplot2::geom_jitter(ggplot2::aes(x = Tree, y = V1),
|
ggplot2::geom_jitter(ggplot2::aes(x = Tree, y = V1),
|
||||||
height = 0.15, alpha=0.4, size=3, stroke=0) +
|
height = 0.15, alpha = 0.4, size = 3, stroke = 0) +
|
||||||
ggplot2::xlab("tree #") +
|
ggplot2::xlab("tree #") +
|
||||||
ggplot2::ylab("Median tree leaf depth")
|
ggplot2::ylab("Median tree leaf depth")
|
||||||
return(p)
|
return(p)
|
||||||
@@ -92,7 +92,7 @@ xgb.ggplot.deepness <- function(model = NULL, which = c("2x1", "max.depth", "med
|
|||||||
p <-
|
p <-
|
||||||
ggplot2::ggplot(dt_depths[, median(abs(Weight)), Tree]) +
|
ggplot2::ggplot(dt_depths[, median(abs(Weight)), Tree]) +
|
||||||
ggplot2::geom_point(ggplot2::aes(x = Tree, y = V1),
|
ggplot2::geom_point(ggplot2::aes(x = Tree, y = V1),
|
||||||
alpha=0.4, size=3, stroke=0) +
|
alpha = 0.4, size = 3, stroke = 0) +
|
||||||
ggplot2::xlab("tree #") +
|
ggplot2::xlab("tree #") +
|
||||||
ggplot2::ylab("Median absolute leaf weight")
|
ggplot2::ylab("Median absolute leaf weight")
|
||||||
return(p)
|
return(p)
|
||||||
@@ -105,7 +105,7 @@ xgb.ggplot.deepness <- function(model = NULL, which = c("2x1", "max.depth", "med
|
|||||||
# internal utility function
|
# internal utility function
|
||||||
multiplot <- function(..., cols = 1) {
|
multiplot <- function(..., cols = 1) {
|
||||||
plots <- list(...)
|
plots <- list(...)
|
||||||
num_plots = length(plots)
|
num_plots <- length(plots)
|
||||||
|
|
||||||
layout <- matrix(seq(1, cols * ceiling(num_plots / cols)),
|
layout <- matrix(seq(1, cols * ceiling(num_plots / cols)),
|
||||||
ncol = cols, nrow = ceiling(num_plots / cols))
|
ncol = cols, nrow = ceiling(num_plots / cols))
|
||||||
|
|||||||
@@ -99,13 +99,13 @@ xgb.importance <- function(feature_names = NULL, model = NULL, trees = NULL,
|
|||||||
model_text_dump <- xgb.dump(model = model, with_stats = TRUE)
|
model_text_dump <- xgb.dump(model = model, with_stats = TRUE)
|
||||||
|
|
||||||
# linear model
|
# linear model
|
||||||
if(model_text_dump[2] == "bias:"){
|
if (model_text_dump[2] == "bias:"){
|
||||||
weights <- which(model_text_dump == "weight:") %>%
|
weights <- which(model_text_dump == "weight:") %>%
|
||||||
{model_text_dump[(. + 1):length(model_text_dump)]} %>%
|
{model_text_dump[(. + 1):length(model_text_dump)]} %>%
|
||||||
as.numeric
|
as.numeric
|
||||||
|
|
||||||
num_class <- NVL(model$params$num_class, 1)
|
num_class <- NVL(model$params$num_class, 1)
|
||||||
if(is.null(feature_names))
|
if (is.null(feature_names))
|
||||||
feature_names <- seq(to = length(weights) / num_class) - 1
|
feature_names <- seq(to = length(weights) / num_class) - 1
|
||||||
if (length(feature_names) * num_class != length(weights))
|
if (length(feature_names) * num_class != length(weights))
|
||||||
stop("feature_names length does not match the number of features used in the model")
|
stop("feature_names length does not match the number of features used in the model")
|
||||||
@@ -117,18 +117,17 @@ xgb.importance <- function(feature_names = NULL, model = NULL, trees = NULL,
|
|||||||
Weight = weights,
|
Weight = weights,
|
||||||
Class = seq_len(num_class) - 1)[order(Class, -abs(Weight))]
|
Class = seq_len(num_class) - 1)[order(Class, -abs(Weight))]
|
||||||
}
|
}
|
||||||
} else {
|
} else { # tree model
|
||||||
# tree model
|
result <- xgb.model.dt.tree(feature_names = feature_names,
|
||||||
result <- xgb.model.dt.tree(feature_names = feature_names,
|
text = model_text_dump,
|
||||||
text = model_text_dump,
|
trees = trees)[
|
||||||
trees = trees)[
|
Feature != "Leaf", .(Gain = sum(Quality),
|
||||||
Feature != "Leaf", .(Gain = sum(Quality),
|
Cover = sum(Cover),
|
||||||
Cover = sum(Cover),
|
Frequency = .N), by = Feature][
|
||||||
Frequency = .N), by = Feature][
|
, `:=`(Gain = Gain / sum(Gain),
|
||||||
,`:=`(Gain = Gain / sum(Gain),
|
Cover = Cover / sum(Cover),
|
||||||
Cover = Cover / sum(Cover),
|
Frequency = Frequency / sum(Frequency))][
|
||||||
Frequency = Frequency / sum(Frequency))][
|
order(Gain, decreasing = TRUE)]
|
||||||
order(Gain, decreasing = TRUE)]
|
|
||||||
}
|
}
|
||||||
result
|
result
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -28,6 +28,7 @@
|
|||||||
#' eta = 1, nthread = 2, nrounds = 2,objective = "binary:logistic")
|
#' eta = 1, nthread = 2, nrounds = 2,objective = "binary:logistic")
|
||||||
#' xgb.save(bst, 'xgb.model')
|
#' xgb.save(bst, 'xgb.model')
|
||||||
#' bst <- xgb.load('xgb.model')
|
#' bst <- xgb.load('xgb.model')
|
||||||
|
#' if (file.exists('xgb.model')) file.remove('xgb.model')
|
||||||
#' pred <- predict(bst, test$data)
|
#' pred <- predict(bst, test$data)
|
||||||
#' @export
|
#' @export
|
||||||
xgb.load <- function(modelfile) {
|
xgb.load <- function(modelfile) {
|
||||||
|
|||||||
14
R-package/R/xgb.load.raw.R
Normal file
14
R-package/R/xgb.load.raw.R
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
#' Load serialised xgboost model from R's raw vector
|
||||||
|
#'
|
||||||
|
#' User can generate raw memory buffer by calling xgb.save.raw
|
||||||
|
#'
|
||||||
|
#' @param buffer the buffer returned by xgb.save.raw
|
||||||
|
#'
|
||||||
|
#' @export
|
||||||
|
xgb.load.raw <- function(buffer) {
|
||||||
|
cachelist <- list()
|
||||||
|
handle <- .Call(XGBoosterCreate_R, cachelist)
|
||||||
|
.Call(XGBoosterLoadModelFromRaw_R, handle, buffer)
|
||||||
|
class(handle) <- "xgb.Booster.handle"
|
||||||
|
return (handle)
|
||||||
|
}
|
||||||
@@ -108,7 +108,7 @@ xgb.model.dt.tree <- function(feature_names = NULL, model = NULL, text = NULL,
|
|||||||
}
|
}
|
||||||
td <- td[Tree %in% trees & !grepl('^booster', t)]
|
td <- td[Tree %in% trees & !grepl('^booster', t)]
|
||||||
|
|
||||||
td[, Node := stri_match_first_regex(t, "(\\d+):")[,2] %>% as.integer ]
|
td[, Node := stri_match_first_regex(t, "(\\d+):")[, 2] %>% as.integer]
|
||||||
if (!use_int_id) td[, ID := add.tree.id(Node, Tree)]
|
if (!use_int_id) td[, ID := add.tree.id(Node, Tree)]
|
||||||
td[, isLeaf := !is.na(stri_match_first_regex(t, "leaf"))]
|
td[, isLeaf := !is.na(stri_match_first_regex(t, "leaf"))]
|
||||||
|
|
||||||
@@ -119,15 +119,15 @@ xgb.model.dt.tree <- function(feature_names = NULL, model = NULL, text = NULL,
|
|||||||
td[isLeaf == FALSE,
|
td[isLeaf == FALSE,
|
||||||
(branch_cols) := {
|
(branch_cols) := {
|
||||||
# skip some indices with spurious capture groups from anynumber_regex
|
# skip some indices with spurious capture groups from anynumber_regex
|
||||||
xtr <- stri_match_first_regex(t, branch_rx)[, c(2,3,5,6,7,8,10), drop = FALSE]
|
xtr <- stri_match_first_regex(t, branch_rx)[, c(2, 3, 5, 6, 7, 8, 10), drop = FALSE]
|
||||||
xtr[, 3:5] <- add.tree.id(xtr[, 3:5], Tree)
|
xtr[, 3:5] <- add.tree.id(xtr[, 3:5], Tree)
|
||||||
lapply(seq_len(ncol(xtr)), function(i) xtr[,i])
|
lapply(seq_len(ncol(xtr)), function(i) xtr[, i])
|
||||||
}]
|
}]
|
||||||
# assign feature_names when available
|
# assign feature_names when available
|
||||||
if (!is.null(feature_names)) {
|
if (!is.null(feature_names)) {
|
||||||
if (length(feature_names) <= max(as.numeric(td$Feature), na.rm = TRUE))
|
if (length(feature_names) <= max(as.numeric(td$Feature), na.rm = TRUE))
|
||||||
stop("feature_names has less elements than there are features used in the model")
|
stop("feature_names has less elements than there are features used in the model")
|
||||||
td[isLeaf == FALSE, Feature := feature_names[as.numeric(Feature) + 1] ]
|
td[isLeaf == FALSE, Feature := feature_names[as.numeric(Feature) + 1]]
|
||||||
}
|
}
|
||||||
|
|
||||||
# parse leaf lines
|
# parse leaf lines
|
||||||
@@ -135,8 +135,8 @@ xgb.model.dt.tree <- function(feature_names = NULL, model = NULL, text = NULL,
|
|||||||
leaf_cols <- c("Feature", "Quality", "Cover")
|
leaf_cols <- c("Feature", "Quality", "Cover")
|
||||||
td[isLeaf == TRUE,
|
td[isLeaf == TRUE,
|
||||||
(leaf_cols) := {
|
(leaf_cols) := {
|
||||||
xtr <- stri_match_first_regex(t, leaf_rx)[, c(2,4)]
|
xtr <- stri_match_first_regex(t, leaf_rx)[, c(2, 4)]
|
||||||
c("Leaf", lapply(seq_len(ncol(xtr)), function(i) xtr[,i]))
|
c("Leaf", lapply(seq_len(ncol(xtr)), function(i) xtr[, i]))
|
||||||
}]
|
}]
|
||||||
|
|
||||||
# convert some columns to numeric
|
# convert some columns to numeric
|
||||||
@@ -156,4 +156,4 @@ xgb.model.dt.tree <- function(feature_names = NULL, model = NULL, text = NULL,
|
|||||||
# Avoid error messages during CRAN check.
|
# Avoid error messages during CRAN check.
|
||||||
# The reason is that these variables are never declared
|
# The reason is that these variables are never declared
|
||||||
# They are mainly column names inferred by Data.table...
|
# They are mainly column names inferred by Data.table...
|
||||||
globalVariables(c("Tree", "Node", "ID", "Feature", "t", "isLeaf",".SD", ".SDcols"))
|
globalVariables(c("Tree", "Node", "ID", "Feature", "t", "isLeaf", ".SD", ".SDcols"))
|
||||||
|
|||||||
@@ -27,7 +27,7 @@
|
|||||||
#' a tree's median absolute leaf weight changes through the iterations.
|
#' a tree's median absolute leaf weight changes through the iterations.
|
||||||
#'
|
#'
|
||||||
#' This function was inspired by the blog post
|
#' This function was inspired by the blog post
|
||||||
#' \url{http://aysent.github.io/2015/11/08/random-forest-leaf-visualization.html}.
|
#' \url{https://github.com/aysent/random-forest-leaf-visualization}.
|
||||||
#'
|
#'
|
||||||
#' @return
|
#' @return
|
||||||
#'
|
#'
|
||||||
@@ -89,9 +89,9 @@ xgb.plot.deepness <- function(model = NULL, which = c("2x1", "max.depth", "med.d
|
|||||||
if (plot) {
|
if (plot) {
|
||||||
if (which == "2x1") {
|
if (which == "2x1") {
|
||||||
op <- par(no.readonly = TRUE)
|
op <- par(no.readonly = TRUE)
|
||||||
par(mfrow = c(2,1),
|
par(mfrow = c(2, 1),
|
||||||
oma = c(3,1,3,1) + 0.1,
|
oma = c(3, 1, 3, 1) + 0.1,
|
||||||
mar = c(1,4,1,0) + 0.1)
|
mar = c(1, 4, 1, 0) + 0.1)
|
||||||
|
|
||||||
dt_summaries[, barplot(N, border = NA, ylab = 'Number of leafs', ...)]
|
dt_summaries[, barplot(N, border = NA, ylab = 'Number of leafs', ...)]
|
||||||
|
|
||||||
@@ -130,7 +130,7 @@ get.leaf.depth <- function(dt_tree) {
|
|||||||
dt_edges[is.na(Leaf), Leaf := FALSE]
|
dt_edges[is.na(Leaf), Leaf := FALSE]
|
||||||
|
|
||||||
dt_edges[, {
|
dt_edges[, {
|
||||||
graph <- igraph::graph_from_data_frame(.SD[,.(ID, To)])
|
graph <- igraph::graph_from_data_frame(.SD[, .(ID, To)])
|
||||||
# min(ID) in a tree is a root node
|
# min(ID) in a tree is a root node
|
||||||
paths_tmp <- igraph::shortest_paths(graph, from = min(ID), to = To[Leaf == TRUE])
|
paths_tmp <- igraph::shortest_paths(graph, from = min(ID), to = To[Leaf == TRUE])
|
||||||
# list of paths to each leaf in a tree
|
# list of paths to each leaf in a tree
|
||||||
|
|||||||
@@ -30,8 +30,8 @@
|
|||||||
#' Setting \code{rel_to_first = TRUE} allows to see the picture from the perspective of
|
#' Setting \code{rel_to_first = TRUE} allows to see the picture from the perspective of
|
||||||
#' "what is feature's importance contribution relative to the most important feature?"
|
#' "what is feature's importance contribution relative to the most important feature?"
|
||||||
#'
|
#'
|
||||||
#' The ggplot-backend method also performs 1-D custering of the importance values,
|
#' The ggplot-backend method also performs 1-D clustering of the importance values,
|
||||||
#' with bar colors coresponding to different clusters that have somewhat similar importance values.
|
#' with bar colors corresponding to different clusters that have somewhat similar importance values.
|
||||||
#'
|
#'
|
||||||
#' @return
|
#' @return
|
||||||
#' The \code{xgb.plot.importance} function creates a \code{barplot} (when \code{plot=TRUE})
|
#' The \code{xgb.plot.importance} function creates a \code{barplot} (when \code{plot=TRUE})
|
||||||
@@ -92,10 +92,10 @@ xgb.plot.importance <- function(importance_matrix = NULL, top_n = NULL, measure
|
|||||||
importance_matrix <- head(importance_matrix, top_n)
|
importance_matrix <- head(importance_matrix, top_n)
|
||||||
}
|
}
|
||||||
if (rel_to_first) {
|
if (rel_to_first) {
|
||||||
importance_matrix[, Importance := Importance/max(abs(Importance))]
|
importance_matrix[, Importance := Importance / max(abs(Importance))]
|
||||||
}
|
}
|
||||||
if (is.null(cex)) {
|
if (is.null(cex)) {
|
||||||
cex <- 2.5/log2(1 + nrow(importance_matrix))
|
cex <- 2.5 / log2(1 + nrow(importance_matrix))
|
||||||
}
|
}
|
||||||
|
|
||||||
if (plot) {
|
if (plot) {
|
||||||
|
|||||||
@@ -72,7 +72,7 @@ xgb.plot.multi.trees <- function(model, feature_names = NULL, features_keep = 5,
|
|||||||
|
|
||||||
precedent.nodes <- root.nodes
|
precedent.nodes <- root.nodes
|
||||||
|
|
||||||
while(tree.matrix[,sum(is.na(abs.node.position))] > 0) {
|
while (tree.matrix[, sum(is.na(abs.node.position))] > 0) {
|
||||||
yes.row.nodes <- tree.matrix[abs.node.position %in% precedent.nodes & !is.na(Yes)]
|
yes.row.nodes <- tree.matrix[abs.node.position %in% precedent.nodes & !is.na(Yes)]
|
||||||
no.row.nodes <- tree.matrix[abs.node.position %in% precedent.nodes & !is.na(No)]
|
no.row.nodes <- tree.matrix[abs.node.position %in% precedent.nodes & !is.na(No)]
|
||||||
yes.nodes.abs.pos <- yes.row.nodes[, abs.node.position] %>% paste0("_0")
|
yes.nodes.abs.pos <- yes.row.nodes[, abs.node.position] %>% paste0("_0")
|
||||||
@@ -88,35 +88,35 @@ xgb.plot.multi.trees <- function(model, feature_names = NULL, features_keep = 5,
|
|||||||
|
|
||||||
remove.tree <- . %>% stri_replace_first_regex(pattern = "^\\d+-", replacement = "")
|
remove.tree <- . %>% stri_replace_first_regex(pattern = "^\\d+-", replacement = "")
|
||||||
|
|
||||||
tree.matrix[,`:=`(abs.node.position = remove.tree(abs.node.position),
|
tree.matrix[, `:=`(abs.node.position = remove.tree(abs.node.position),
|
||||||
Yes = remove.tree(Yes),
|
Yes = remove.tree(Yes),
|
||||||
No = remove.tree(No))]
|
No = remove.tree(No))]
|
||||||
|
|
||||||
nodes.dt <- tree.matrix[
|
nodes.dt <- tree.matrix[
|
||||||
, .(Quality = sum(Quality))
|
, .(Quality = sum(Quality))
|
||||||
, by = .(abs.node.position, Feature)
|
, by = .(abs.node.position, Feature)
|
||||||
][, .(Text = paste0(Feature[1:min(length(Feature), features_keep)],
|
][, .(Text = paste0(Feature[1:min(length(Feature), features_keep)],
|
||||||
" (",
|
" (",
|
||||||
format(Quality[1:min(length(Quality), features_keep)], digits=5),
|
format(Quality[1:min(length(Quality), features_keep)], digits = 5),
|
||||||
")") %>%
|
")") %>%
|
||||||
paste0(collapse = "\n"))
|
paste0(collapse = "\n"))
|
||||||
, by = abs.node.position]
|
, by = abs.node.position]
|
||||||
|
|
||||||
edges.dt <- tree.matrix[Feature != "Leaf", .(abs.node.position, Yes)] %>%
|
edges.dt <- tree.matrix[Feature != "Leaf", .(abs.node.position, Yes)] %>%
|
||||||
list(tree.matrix[Feature != "Leaf",.(abs.node.position, No)]) %>%
|
list(tree.matrix[Feature != "Leaf", .(abs.node.position, No)]) %>%
|
||||||
rbindlist() %>%
|
rbindlist() %>%
|
||||||
setnames(c("From", "To")) %>%
|
setnames(c("From", "To")) %>%
|
||||||
.[, .N, .(From, To)] %>%
|
.[, .N, .(From, To)] %>%
|
||||||
.[, N:=NULL]
|
.[, N := NULL]
|
||||||
|
|
||||||
nodes <- DiagrammeR::create_node_df(
|
nodes <- DiagrammeR::create_node_df(
|
||||||
n = nrow(nodes.dt),
|
n = nrow(nodes.dt),
|
||||||
label = nodes.dt[,Text]
|
label = nodes.dt[, Text]
|
||||||
)
|
)
|
||||||
|
|
||||||
edges <- DiagrammeR::create_edge_df(
|
edges <- DiagrammeR::create_edge_df(
|
||||||
from = match(edges.dt[,From], nodes.dt[,abs.node.position]),
|
from = match(edges.dt[, From], nodes.dt[, abs.node.position]),
|
||||||
to = match(edges.dt[,To], nodes.dt[,abs.node.position]),
|
to = match(edges.dt[, To], nodes.dt[, abs.node.position]),
|
||||||
rel = "leading_to")
|
rel = "leading_to")
|
||||||
|
|
||||||
graph <- DiagrammeR::create_graph(
|
graph <- DiagrammeR::create_graph(
|
||||||
|
|||||||
@@ -31,7 +31,7 @@
|
|||||||
#' @param plot_loess whether to plot loess-smoothed curves. The smoothing is only done for features with
|
#' @param plot_loess whether to plot loess-smoothed curves. The smoothing is only done for features with
|
||||||
#' more than 5 distinct values.
|
#' more than 5 distinct values.
|
||||||
#' @param col_loess a color to use for the loess curves.
|
#' @param col_loess a color to use for the loess curves.
|
||||||
#' @param span_loess the \code{span} paramerer in \code{\link[stats]{loess}}'s call.
|
#' @param span_loess the \code{span} parameter in \code{\link[stats]{loess}}'s call.
|
||||||
#' @param which whether to do univariate or bivariate plotting. NOTE: only 1D is implemented so far.
|
#' @param which whether to do univariate or bivariate plotting. NOTE: only 1D is implemented so far.
|
||||||
#' @param plot whether a plot should be drawn. If FALSE, only a lits of matrices is returned.
|
#' @param plot whether a plot should be drawn. If FALSE, only a lits of matrices is returned.
|
||||||
#' @param ... other parameters passed to \code{plot}.
|
#' @param ... other parameters passed to \code{plot}.
|
||||||
@@ -125,12 +125,12 @@ xgb.plot.shap <- function(data, shap_contrib = NULL, features = NULL, top_n = 1,
|
|||||||
|
|
||||||
nsample <- if (is.null(subsample)) min(100000, nrow(data)) else as.integer(subsample * nrow(data))
|
nsample <- if (is.null(subsample)) min(100000, nrow(data)) else as.integer(subsample * nrow(data))
|
||||||
idx <- sample(1:nrow(data), nsample)
|
idx <- sample(1:nrow(data), nsample)
|
||||||
data <- data[idx,]
|
data <- data[idx, ]
|
||||||
|
|
||||||
if (is.null(shap_contrib)) {
|
if (is.null(shap_contrib)) {
|
||||||
shap_contrib <- predict(model, data, predcontrib = TRUE, approxcontrib = approxcontrib)
|
shap_contrib <- predict(model, data, predcontrib = TRUE, approxcontrib = approxcontrib)
|
||||||
} else {
|
} else {
|
||||||
shap_contrib <- shap_contrib[idx,]
|
shap_contrib <- shap_contrib[idx, ]
|
||||||
}
|
}
|
||||||
|
|
||||||
which <- match.arg(which)
|
which <- match.arg(which)
|
||||||
@@ -168,8 +168,8 @@ xgb.plot.shap <- function(data, shap_contrib = NULL, features = NULL, top_n = 1,
|
|||||||
|
|
||||||
if (plot && which == "1d") {
|
if (plot && which == "1d") {
|
||||||
op <- par(mfrow = c(ceiling(length(features) / n_col), n_col),
|
op <- par(mfrow = c(ceiling(length(features) / n_col), n_col),
|
||||||
oma = c(0,0,0,0) + 0.2,
|
oma = c(0, 0, 0, 0) + 0.2,
|
||||||
mar = c(3.5,3.5,0,0) + 0.1,
|
mar = c(3.5, 3.5, 0, 0) + 0.1,
|
||||||
mgp = c(1.7, 0.6, 0))
|
mgp = c(1.7, 0.6, 0))
|
||||||
for (f in cols) {
|
for (f in cols) {
|
||||||
ord <- order(data[, f])
|
ord <- order(data[, f])
|
||||||
@@ -192,7 +192,7 @@ xgb.plot.shap <- function(data, shap_contrib = NULL, features = NULL, top_n = 1,
|
|||||||
grid()
|
grid()
|
||||||
if (plot_loess) {
|
if (plot_loess) {
|
||||||
# compress x to 3 digits, and mean-aggredate y
|
# compress x to 3 digits, and mean-aggredate y
|
||||||
zz <- data.table(x = signif(x, 3), y)[, .(.N, y=mean(y)), x]
|
zz <- data.table(x = signif(x, 3), y)[, .(.N, y = mean(y)), x]
|
||||||
if (nrow(zz) <= 5) {
|
if (nrow(zz) <= 5) {
|
||||||
lines(zz$x, zz$y, col = col_loess)
|
lines(zz$x, zz$y, col = col_loess)
|
||||||
} else {
|
} else {
|
||||||
@@ -212,6 +212,7 @@ xgb.plot.shap <- function(data, shap_contrib = NULL, features = NULL, top_n = 1,
|
|||||||
}
|
}
|
||||||
if (plot && which == "2d") {
|
if (plot && which == "2d") {
|
||||||
# TODO
|
# TODO
|
||||||
|
warning("Bivariate plotting is currently not available.")
|
||||||
}
|
}
|
||||||
invisible(list(data = data, shap_contrib = shap_contrib))
|
invisible(list(data = data, shap_contrib = shap_contrib))
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -80,12 +80,12 @@ xgb.plot.tree <- function(feature_names = NULL, model = NULL, trees = NULL, plot
|
|||||||
|
|
||||||
dt <- xgb.model.dt.tree(feature_names = feature_names, model = model, trees = trees)
|
dt <- xgb.model.dt.tree(feature_names = feature_names, model = model, trees = trees)
|
||||||
|
|
||||||
dt[, label:= paste0(Feature, "\nCover: ", Cover, ifelse(Feature == "Leaf", "\nValue: ", "\nGain: "), Quality)]
|
dt[, label := paste0(Feature, "\nCover: ", Cover, ifelse(Feature == "Leaf", "\nValue: ", "\nGain: "), Quality)]
|
||||||
if (show_node_id)
|
if (show_node_id)
|
||||||
dt[, label := paste0(ID, ": ", label)]
|
dt[, label := paste0(ID, ": ", label)]
|
||||||
dt[Node == 0, label := paste0("Tree ", Tree, "\n", label)]
|
dt[Node == 0, label := paste0("Tree ", Tree, "\n", label)]
|
||||||
dt[, shape:= "rectangle"][Feature == "Leaf", shape:= "oval"]
|
dt[, shape := "rectangle"][Feature == "Leaf", shape := "oval"]
|
||||||
dt[, filledcolor:= "Beige"][Feature == "Leaf", filledcolor:= "Khaki"]
|
dt[, filledcolor := "Beige"][Feature == "Leaf", filledcolor := "Khaki"]
|
||||||
# in order to draw the first tree on top:
|
# in order to draw the first tree on top:
|
||||||
dt <- dt[order(-Tree)]
|
dt <- dt[order(-Tree)]
|
||||||
|
|
||||||
|
|||||||
@@ -13,7 +13,11 @@
|
|||||||
#'
|
#'
|
||||||
#' Note: a model can also be saved as an R-object (e.g., by using \code{\link[base]{readRDS}}
|
#' Note: a model can also be saved as an R-object (e.g., by using \code{\link[base]{readRDS}}
|
||||||
#' or \code{\link[base]{save}}). However, it would then only be compatible with R, and
|
#' or \code{\link[base]{save}}). However, it would then only be compatible with R, and
|
||||||
#' corresponding R-methods would need to be used to load it.
|
#' corresponding R-methods would need to be used to load it. Moreover, persisting the model with
|
||||||
|
#' \code{\link[base]{readRDS}} or \code{\link[base]{save}}) will cause compatibility problems in
|
||||||
|
#' future versions of XGBoost. Consult \code{\link{a-compatibility-note-for-saveRDS-save}} to learn
|
||||||
|
#' how to persist models in a future-proof way, i.e. to make the model accessible in future
|
||||||
|
#' releases of XGBoost.
|
||||||
#'
|
#'
|
||||||
#' @seealso
|
#' @seealso
|
||||||
#' \code{\link{xgb.load}}, \code{\link{xgb.Booster.complete}}.
|
#' \code{\link{xgb.load}}, \code{\link{xgb.Booster.complete}}.
|
||||||
@@ -27,6 +31,7 @@
|
|||||||
#' eta = 1, nthread = 2, nrounds = 2,objective = "binary:logistic")
|
#' eta = 1, nthread = 2, nrounds = 2,objective = "binary:logistic")
|
||||||
#' xgb.save(bst, 'xgb.model')
|
#' xgb.save(bst, 'xgb.model')
|
||||||
#' bst <- xgb.load('xgb.model')
|
#' bst <- xgb.load('xgb.model')
|
||||||
|
#' if (file.exists('xgb.model')) file.remove('xgb.model')
|
||||||
#' pred <- predict(bst, test$data)
|
#' pred <- predict(bst, test$data)
|
||||||
#' @export
|
#' @export
|
||||||
xgb.save <- function(model, fname) {
|
xgb.save <- function(model, fname) {
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
#' Save xgboost model to R's raw vector,
|
#' Save xgboost model to R's raw vector,
|
||||||
#' user can call xgb.load to load the model back from raw vector
|
#' user can call xgb.load.raw to load the model back from raw vector
|
||||||
#'
|
#'
|
||||||
#' Save xgboost model from xgboost or xgb.train
|
#' Save xgboost model from xgboost or xgb.train
|
||||||
#'
|
#'
|
||||||
@@ -13,11 +13,11 @@
|
|||||||
#' bst <- xgboost(data = train$data, label = train$label, max_depth = 2,
|
#' bst <- xgboost(data = train$data, label = train$label, max_depth = 2,
|
||||||
#' eta = 1, nthread = 2, nrounds = 2,objective = "binary:logistic")
|
#' eta = 1, nthread = 2, nrounds = 2,objective = "binary:logistic")
|
||||||
#' raw <- xgb.save.raw(bst)
|
#' raw <- xgb.save.raw(bst)
|
||||||
#' bst <- xgb.load(raw)
|
#' bst <- xgb.load.raw(raw)
|
||||||
#' pred <- predict(bst, test$data)
|
#' pred <- predict(bst, test$data)
|
||||||
#'
|
#'
|
||||||
#' @export
|
#' @export
|
||||||
xgb.save.raw <- function(model) {
|
xgb.save.raw <- function(model) {
|
||||||
model <- xgb.get.handle(model)
|
handle <- xgb.get.handle(model)
|
||||||
.Call(XGBoosterModelToRaw_R, model)
|
.Call(XGBoosterModelToRaw_R, handle)
|
||||||
}
|
}
|
||||||
|
|||||||
21
R-package/R/xgb.serialize.R
Normal file
21
R-package/R/xgb.serialize.R
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
#' Serialize the booster instance into R's raw vector. The serialization method differs
|
||||||
|
#' from \code{\link{xgb.save.raw}} as the latter one saves only the model but not
|
||||||
|
#' parameters. This serialization format is not stable across different xgboost versions.
|
||||||
|
#'
|
||||||
|
#' @param booster the booster instance
|
||||||
|
#'
|
||||||
|
#' @examples
|
||||||
|
#' data(agaricus.train, package='xgboost')
|
||||||
|
#' data(agaricus.test, package='xgboost')
|
||||||
|
#' train <- agaricus.train
|
||||||
|
#' test <- agaricus.test
|
||||||
|
#' bst <- xgboost(data = train$data, label = train$label, max_depth = 2,
|
||||||
|
#' eta = 1, nthread = 2, nrounds = 2,objective = "binary:logistic")
|
||||||
|
#' raw <- xgb.serialize(bst)
|
||||||
|
#' bst <- xgb.unserialize(raw)
|
||||||
|
#'
|
||||||
|
#' @export
|
||||||
|
xgb.serialize <- function(booster) {
|
||||||
|
handle <- xgb.get.handle(booster)
|
||||||
|
.Call(XGBoosterSerializeToBuffer_R, handle)
|
||||||
|
}
|
||||||
@@ -3,9 +3,9 @@
|
|||||||
#' \code{xgb.train} is an advanced interface for training an xgboost model.
|
#' \code{xgb.train} is an advanced interface for training an xgboost model.
|
||||||
#' The \code{xgboost} function is a simpler wrapper for \code{xgb.train}.
|
#' The \code{xgboost} function is a simpler wrapper for \code{xgb.train}.
|
||||||
#'
|
#'
|
||||||
#' @param params the list of parameters.
|
#' @param params the list of parameters. The complete list of parameters is
|
||||||
#' The complete list of parameters is available at \url{http://xgboost.readthedocs.io/en/latest/parameter.html}.
|
#' available in the \href{http://xgboost.readthedocs.io/en/latest/parameter.html}{online documentation}. Below
|
||||||
#' Below is a shorter summary:
|
#' is a shorter summary:
|
||||||
#'
|
#'
|
||||||
#' 1. General Parameters
|
#' 1. General Parameters
|
||||||
#'
|
#'
|
||||||
@@ -22,10 +22,11 @@
|
|||||||
#' \item \code{gamma} minimum loss reduction required to make a further partition on a leaf node of the tree. the larger, the more conservative the algorithm will be.
|
#' \item \code{gamma} minimum loss reduction required to make a further partition on a leaf node of the tree. the larger, the more conservative the algorithm will be.
|
||||||
#' \item \code{max_depth} maximum depth of a tree. Default: 6
|
#' \item \code{max_depth} maximum depth of a tree. Default: 6
|
||||||
#' \item \code{min_child_weight} minimum sum of instance weight (hessian) needed in a child. If the tree partition step results in a leaf node with the sum of instance weight less than min_child_weight, then the building process will give up further partitioning. In linear regression mode, this simply corresponds to minimum number of instances needed to be in each node. The larger, the more conservative the algorithm will be. Default: 1
|
#' \item \code{min_child_weight} minimum sum of instance weight (hessian) needed in a child. If the tree partition step results in a leaf node with the sum of instance weight less than min_child_weight, then the building process will give up further partitioning. In linear regression mode, this simply corresponds to minimum number of instances needed to be in each node. The larger, the more conservative the algorithm will be. Default: 1
|
||||||
#' \item \code{subsample} subsample ratio of the training instance. Setting it to 0.5 means that xgboost randomly collected half of the data instances to grow trees and this will prevent overfitting. It makes computation shorter (because less data to analyse). It is advised to use this parameter with \code{eta} and increase \code{nround}. Default: 1
|
#' \item \code{subsample} subsample ratio of the training instance. Setting it to 0.5 means that xgboost randomly collected half of the data instances to grow trees and this will prevent overfitting. It makes computation shorter (because less data to analyse). It is advised to use this parameter with \code{eta} and increase \code{nrounds}. Default: 1
|
||||||
#' \item \code{colsample_bytree} subsample ratio of columns when constructing each tree. Default: 1
|
#' \item \code{colsample_bytree} subsample ratio of columns when constructing each tree. Default: 1
|
||||||
#' \item \code{num_parallel_tree} Experimental parameter. number of trees to grow per round. Useful to test Random Forest through Xgboost (set \code{colsample_bytree < 1}, \code{subsample < 1} and \code{round = 1}) accordingly. Default: 1
|
#' \item \code{num_parallel_tree} Experimental parameter. number of trees to grow per round. Useful to test Random Forest through Xgboost (set \code{colsample_bytree < 1}, \code{subsample < 1} and \code{round = 1}) accordingly. Default: 1
|
||||||
#' \item \code{monotone_constraints} A numerical vector consists of \code{1}, \code{0} and \code{-1} with its length equals to the number of features in the training data. \code{1} is increasing, \code{-1} is decreasing and \code{0} is no constraint.
|
#' \item \code{monotone_constraints} A numerical vector consists of \code{1}, \code{0} and \code{-1} with its length equals to the number of features in the training data. \code{1} is increasing, \code{-1} is decreasing and \code{0} is no constraint.
|
||||||
|
#' \item \code{interaction_constraints} A list of vectors specifying feature indices of permitted interactions. Each item of the list represents one permitted interaction where specified features are allowed to interact with each other. Feature index values should start from \code{0} (\code{0} references the first column). Leave argument unspecified for no interaction constraints.
|
||||||
#' }
|
#' }
|
||||||
#'
|
#'
|
||||||
#' 2.2. Parameter for Linear Booster
|
#' 2.2. Parameter for Linear Booster
|
||||||
@@ -41,14 +42,24 @@
|
|||||||
#' \itemize{
|
#' \itemize{
|
||||||
#' \item \code{objective} specify the learning task and the corresponding learning objective, users can pass a self-defined function to it. The default objective options are below:
|
#' \item \code{objective} specify the learning task and the corresponding learning objective, users can pass a self-defined function to it. The default objective options are below:
|
||||||
#' \itemize{
|
#' \itemize{
|
||||||
#' \item \code{reg:linear} linear regression (Default).
|
#' \item \code{reg:squarederror} Regression with squared loss (Default).
|
||||||
|
#' \item \code{reg:squaredlogerror}: regression with squared log loss \eqn{1/2 * (log(pred + 1) - log(label + 1))^2}. All inputs are required to be greater than -1. Also, see metric rmsle for possible issue with this objective.
|
||||||
#' \item \code{reg:logistic} logistic regression.
|
#' \item \code{reg:logistic} logistic regression.
|
||||||
|
#' \item \code{reg:pseudohubererror}: regression with Pseudo Huber loss, a twice differentiable alternative to absolute loss.
|
||||||
#' \item \code{binary:logistic} logistic regression for binary classification. Output probability.
|
#' \item \code{binary:logistic} logistic regression for binary classification. Output probability.
|
||||||
#' \item \code{binary:logitraw} logistic regression for binary classification, output score before logistic transformation.
|
#' \item \code{binary:logitraw} logistic regression for binary classification, output score before logistic transformation.
|
||||||
#' \item \code{num_class} set the number of classes. To use only with multiclass objectives.
|
#' \item \code{binary:hinge}: hinge loss for binary classification. This makes predictions of 0 or 1, rather than producing probabilities.
|
||||||
|
#' \item \code{count:poisson}: poisson regression for count data, output mean of poisson distribution. \code{max_delta_step} is set to 0.7 by default in poisson regression (used to safeguard optimization).
|
||||||
|
#' \item \code{survival:cox}: Cox regression for right censored survival time data (negative values are considered right censored). Note that predictions are returned on the hazard ratio scale (i.e., as HR = exp(marginal_prediction) in the proportional hazard function \code{h(t) = h0(t) * HR)}.
|
||||||
|
#' \item \code{survival:aft}: Accelerated failure time model for censored survival time data. See \href{https://xgboost.readthedocs.io/en/latest/tutorials/aft_survival_analysis.html}{Survival Analysis with Accelerated Failure Time} for details.
|
||||||
|
#' \item \code{aft_loss_distribution}: Probabilty Density Function used by \code{survival:aft} and \code{aft-nloglik} metric.
|
||||||
#' \item \code{multi:softmax} set xgboost to do multiclass classification using the softmax objective. Class is represented by a number and should be from 0 to \code{num_class - 1}.
|
#' \item \code{multi:softmax} set xgboost to do multiclass classification using the softmax objective. Class is represented by a number and should be from 0 to \code{num_class - 1}.
|
||||||
#' \item \code{multi:softprob} same as softmax, but prediction outputs a vector of ndata * nclass elements, which can be further reshaped to ndata, nclass matrix. The result contains predicted probabilities of each data point belonging to each class.
|
#' \item \code{multi:softprob} same as softmax, but prediction outputs a vector of ndata * nclass elements, which can be further reshaped to ndata, nclass matrix. The result contains predicted probabilities of each data point belonging to each class.
|
||||||
#' \item \code{rank:pairwise} set xgboost to do ranking task by minimizing the pairwise loss.
|
#' \item \code{rank:pairwise} set xgboost to do ranking task by minimizing the pairwise loss.
|
||||||
|
#' \item \code{rank:ndcg}: Use LambdaMART to perform list-wise ranking where \href{https://en.wikipedia.org/wiki/Discounted_cumulative_gain}{Normalized Discounted Cumulative Gain (NDCG)} is maximized.
|
||||||
|
#' \item \code{rank:map}: Use LambdaMART to perform list-wise ranking where \href{https://en.wikipedia.org/wiki/Evaluation_measures_(information_retrieval)#Mean_average_precision}{Mean Average Precision (MAP)} is maximized.
|
||||||
|
#' \item \code{reg:gamma}: gamma regression with log-link. Output is a mean of gamma distribution. It might be useful, e.g., for modeling insurance claims severity, or for any outcome that might be \href{https://en.wikipedia.org/wiki/Gamma_distribution#Applications}{gamma-distributed}.
|
||||||
|
#' \item \code{reg:tweedie}: Tweedie regression with log-link. It might be useful, e.g., for modeling total loss in insurance, or for any outcome that might be \href{https://en.wikipedia.org/wiki/Tweedie_distribution#Applications}{Tweedie-distributed}.
|
||||||
#' }
|
#' }
|
||||||
#' \item \code{base_score} the initial prediction score of all instances, global bias. Default: 0.5
|
#' \item \code{base_score} the initial prediction score of all instances, global bias. Default: 0.5
|
||||||
#' \item \code{eval_metric} evaluation metrics for validation data. Users can pass a self-defined function to it. Default: metric will be assigned according to objective(rmse for regression, and error for classification, mean average precision for ranking). List is provided in detail section.
|
#' \item \code{eval_metric} evaluation metrics for validation data. Users can pass a self-defined function to it. Default: metric will be assigned according to objective(rmse for regression, and error for classification, mean average precision for ranking). List is provided in detail section.
|
||||||
@@ -67,7 +78,7 @@
|
|||||||
#' the performance of each round's model on mat1 and mat2.
|
#' the performance of each round's model on mat1 and mat2.
|
||||||
#' @param obj customized objective function. Returns gradient and second order
|
#' @param obj customized objective function. Returns gradient and second order
|
||||||
#' gradient with given prediction and dtrain.
|
#' gradient with given prediction and dtrain.
|
||||||
#' @param feval custimized evaluation function. Returns
|
#' @param feval customized evaluation function. Returns
|
||||||
#' \code{list(metric='metric-name', value='metric-value')} with given
|
#' \code{list(metric='metric-name', value='metric-value')} with given
|
||||||
#' prediction and dtrain.
|
#' prediction and dtrain.
|
||||||
#' @param verbose If 0, xgboost will stay silent. If 1, it will print information about performance.
|
#' @param verbose If 0, xgboost will stay silent. If 1, it will print information about performance.
|
||||||
@@ -117,7 +128,7 @@
|
|||||||
#' when the \code{eval_metric} parameter is not provided.
|
#' when the \code{eval_metric} parameter is not provided.
|
||||||
#' User may set one or several \code{eval_metric} parameters.
|
#' User may set one or several \code{eval_metric} parameters.
|
||||||
#' Note that when using a customized metric, only this single metric can be used.
|
#' Note that when using a customized metric, only this single metric can be used.
|
||||||
#' The folloiwing is the list of built-in metrics for which Xgboost provides optimized implementation:
|
#' The following is the list of built-in metrics for which Xgboost provides optimized implementation:
|
||||||
#' \itemize{
|
#' \itemize{
|
||||||
#' \item \code{rmse} root mean square error. \url{http://en.wikipedia.org/wiki/Root_mean_square_error}
|
#' \item \code{rmse} root mean square error. \url{http://en.wikipedia.org/wiki/Root_mean_square_error}
|
||||||
#' \item \code{logloss} negative log-likelihood. \url{http://en.wikipedia.org/wiki/Log-likelihood}
|
#' \item \code{logloss} negative log-likelihood. \url{http://en.wikipedia.org/wiki/Log-likelihood}
|
||||||
@@ -146,14 +157,14 @@
|
|||||||
#' \item \code{handle} a handle (pointer) to the xgboost model in memory.
|
#' \item \code{handle} a handle (pointer) to the xgboost model in memory.
|
||||||
#' \item \code{raw} a cached memory dump of the xgboost model saved as R's \code{raw} type.
|
#' \item \code{raw} a cached memory dump of the xgboost model saved as R's \code{raw} type.
|
||||||
#' \item \code{niter} number of boosting iterations.
|
#' \item \code{niter} number of boosting iterations.
|
||||||
#' \item \code{evaluation_log} evaluation history storead as a \code{data.table} with the
|
#' \item \code{evaluation_log} evaluation history stored as a \code{data.table} with the
|
||||||
#' first column corresponding to iteration number and the rest corresponding to evaluation
|
#' first column corresponding to iteration number and the rest corresponding to evaluation
|
||||||
#' metrics' values. It is created by the \code{\link{cb.evaluation.log}} callback.
|
#' metrics' values. It is created by the \code{\link{cb.evaluation.log}} callback.
|
||||||
#' \item \code{call} a function call.
|
#' \item \code{call} a function call.
|
||||||
#' \item \code{params} parameters that were passed to the xgboost library. Note that it does not
|
#' \item \code{params} parameters that were passed to the xgboost library. Note that it does not
|
||||||
#' capture parameters changed by the \code{\link{cb.reset.parameters}} callback.
|
#' capture parameters changed by the \code{\link{cb.reset.parameters}} callback.
|
||||||
#' \item \code{callbacks} callback functions that were either automatically assigned or
|
#' \item \code{callbacks} callback functions that were either automatically assigned or
|
||||||
#' explicitely passed.
|
#' explicitly passed.
|
||||||
#' \item \code{best_iteration} iteration number with the best evaluation metric value
|
#' \item \code{best_iteration} iteration number with the best evaluation metric value
|
||||||
#' (only available with early stopping).
|
#' (only available with early stopping).
|
||||||
#' \item \code{best_ntreelimit} the \code{ntreelimit} value corresponding to the best iteration,
|
#' \item \code{best_ntreelimit} the \code{ntreelimit} value corresponding to the best iteration,
|
||||||
@@ -162,7 +173,7 @@
|
|||||||
#' \item \code{best_score} the best evaluation metric value during early stopping.
|
#' \item \code{best_score} the best evaluation metric value during early stopping.
|
||||||
#' (only available with early stopping).
|
#' (only available with early stopping).
|
||||||
#' \item \code{feature_names} names of the training dataset features
|
#' \item \code{feature_names} names of the training dataset features
|
||||||
#' (only when comun names were defined in training data).
|
#' (only when column names were defined in training data).
|
||||||
#' \item \code{nfeatures} number of features in training data.
|
#' \item \code{nfeatures} number of features in training data.
|
||||||
#' }
|
#' }
|
||||||
#'
|
#'
|
||||||
@@ -185,7 +196,7 @@
|
|||||||
#' watchlist <- list(train = dtrain, eval = dtest)
|
#' watchlist <- list(train = dtrain, eval = dtest)
|
||||||
#'
|
#'
|
||||||
#' ## A simple xgb.train example:
|
#' ## A simple xgb.train example:
|
||||||
#' param <- list(max_depth = 2, eta = 1, silent = 1, nthread = 2,
|
#' param <- list(max_depth = 2, eta = 1, verbose = 0, nthread = 2,
|
||||||
#' objective = "binary:logistic", eval_metric = "auc")
|
#' objective = "binary:logistic", eval_metric = "auc")
|
||||||
#' bst <- xgb.train(param, dtrain, nrounds = 2, watchlist)
|
#' bst <- xgb.train(param, dtrain, nrounds = 2, watchlist)
|
||||||
#'
|
#'
|
||||||
@@ -206,12 +217,12 @@
|
|||||||
#'
|
#'
|
||||||
#' # These functions could be used by passing them either:
|
#' # These functions could be used by passing them either:
|
||||||
#' # as 'objective' and 'eval_metric' parameters in the params list:
|
#' # as 'objective' and 'eval_metric' parameters in the params list:
|
||||||
#' param <- list(max_depth = 2, eta = 1, silent = 1, nthread = 2,
|
#' param <- list(max_depth = 2, eta = 1, verbose = 0, nthread = 2,
|
||||||
#' objective = logregobj, eval_metric = evalerror)
|
#' objective = logregobj, eval_metric = evalerror)
|
||||||
#' bst <- xgb.train(param, dtrain, nrounds = 2, watchlist)
|
#' bst <- xgb.train(param, dtrain, nrounds = 2, watchlist)
|
||||||
#'
|
#'
|
||||||
#' # or through the ... arguments:
|
#' # or through the ... arguments:
|
||||||
#' param <- list(max_depth = 2, eta = 1, silent = 1, nthread = 2)
|
#' param <- list(max_depth = 2, eta = 1, verbose = 0, nthread = 2)
|
||||||
#' bst <- xgb.train(param, dtrain, nrounds = 2, watchlist,
|
#' bst <- xgb.train(param, dtrain, nrounds = 2, watchlist,
|
||||||
#' objective = logregobj, eval_metric = evalerror)
|
#' objective = logregobj, eval_metric = evalerror)
|
||||||
#'
|
#'
|
||||||
@@ -221,7 +232,7 @@
|
|||||||
#'
|
#'
|
||||||
#'
|
#'
|
||||||
#' ## An xgb.train example of using variable learning rates at each iteration:
|
#' ## An xgb.train example of using variable learning rates at each iteration:
|
||||||
#' param <- list(max_depth = 2, eta = 1, silent = 1, nthread = 2,
|
#' param <- list(max_depth = 2, eta = 1, verbose = 0, nthread = 2,
|
||||||
#' objective = "binary:logistic", eval_metric = "auc")
|
#' objective = "binary:logistic", eval_metric = "auc")
|
||||||
#' my_etas <- list(eta = c(0.5, 0.1))
|
#' my_etas <- list(eta = c(0.5, 0.1))
|
||||||
#' bst <- xgb.train(param, dtrain, nrounds = 2, watchlist,
|
#' bst <- xgb.train(param, dtrain, nrounds = 2, watchlist,
|
||||||
@@ -266,8 +277,8 @@ xgb.train <- function(params = list(), data, nrounds, watchlist = list(),
|
|||||||
}
|
}
|
||||||
|
|
||||||
# evaluation printing callback
|
# evaluation printing callback
|
||||||
params <- c(params, list(silent = ifelse(verbose > 1, 0, 1)))
|
params <- c(params)
|
||||||
print_every_n <- max( as.integer(print_every_n), 1L)
|
print_every_n <- max(as.integer(print_every_n), 1L)
|
||||||
if (!has.callbacks(callbacks, 'cb.print.evaluation') &&
|
if (!has.callbacks(callbacks, 'cb.print.evaluation') &&
|
||||||
verbose) {
|
verbose) {
|
||||||
callbacks <- add.cb(callbacks, cb.print.evaluation(print_every_n))
|
callbacks <- add.cb(callbacks, cb.print.evaluation(print_every_n))
|
||||||
@@ -290,8 +301,13 @@ xgb.train <- function(params = list(), data, nrounds, watchlist = list(),
|
|||||||
callbacks <- add.cb(callbacks, cb.early.stop(early_stopping_rounds,
|
callbacks <- add.cb(callbacks, cb.early.stop(early_stopping_rounds,
|
||||||
maximize = maximize, verbose = verbose))
|
maximize = maximize, verbose = verbose))
|
||||||
}
|
}
|
||||||
|
|
||||||
# Sort the callbacks into categories
|
# Sort the callbacks into categories
|
||||||
cb <- categorize.callbacks(callbacks)
|
cb <- categorize.callbacks(callbacks)
|
||||||
|
params['validate_parameters'] <- TRUE
|
||||||
|
if (!is.null(params[['seed']])) {
|
||||||
|
warning("xgb.train: `seed` is ignored in R package. Use `set.seed()` instead.")
|
||||||
|
}
|
||||||
|
|
||||||
# The tree updating process would need slightly different handling
|
# The tree updating process would need slightly different handling
|
||||||
is_update <- NVL(params[['process_type']], '.') == 'update'
|
is_update <- NVL(params[['process_type']], '.') == 'update'
|
||||||
@@ -312,12 +328,9 @@ xgb.train <- function(params = list(), data, nrounds, watchlist = list(),
|
|||||||
niter_init <- xgb.ntree(bst) %/% (num_parallel_tree * num_class)
|
niter_init <- xgb.ntree(bst) %/% (num_parallel_tree * num_class)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if(is_update && nrounds > niter_init)
|
if (is_update && nrounds > niter_init)
|
||||||
stop("nrounds cannot be larger than ", niter_init, " (nrounds of xgb_model)")
|
stop("nrounds cannot be larger than ", niter_init, " (nrounds of xgb_model)")
|
||||||
|
|
||||||
# TODO: distributed code
|
|
||||||
rank <- 0
|
|
||||||
|
|
||||||
niter_skip <- ifelse(is_update, 0, niter_init)
|
niter_skip <- ifelse(is_update, 0, niter_init)
|
||||||
begin_iteration <- niter_skip + 1
|
begin_iteration <- niter_skip + 1
|
||||||
end_iteration <- niter_skip + nrounds
|
end_iteration <- niter_skip + nrounds
|
||||||
@@ -329,7 +342,6 @@ xgb.train <- function(params = list(), data, nrounds, watchlist = list(),
|
|||||||
|
|
||||||
xgb.iter.update(bst$handle, dtrain, iteration - 1, obj)
|
xgb.iter.update(bst$handle, dtrain, iteration - 1, obj)
|
||||||
|
|
||||||
bst_evaluation <- numeric(0)
|
|
||||||
if (length(watchlist) > 0)
|
if (length(watchlist) > 0)
|
||||||
bst_evaluation <- xgb.iter.eval(bst$handle, watchlist, iteration - 1, feval)
|
bst_evaluation <- xgb.iter.eval(bst$handle, watchlist, iteration - 1, feval)
|
||||||
|
|
||||||
@@ -344,7 +356,7 @@ xgb.train <- function(params = list(), data, nrounds, watchlist = list(),
|
|||||||
bst <- xgb.Booster.complete(bst, saveraw = TRUE)
|
bst <- xgb.Booster.complete(bst, saveraw = TRUE)
|
||||||
|
|
||||||
# store the total number of boosting iterations
|
# store the total number of boosting iterations
|
||||||
bst$niter = end_iteration
|
bst$niter <- end_iteration
|
||||||
|
|
||||||
# store the evaluation results
|
# store the evaluation results
|
||||||
if (length(evaluation_log) > 0 &&
|
if (length(evaluation_log) > 0 &&
|
||||||
|
|||||||
31
R-package/R/xgb.unserialize.R
Normal file
31
R-package/R/xgb.unserialize.R
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
#' Load the instance back from \code{\link{xgb.serialize}}
|
||||||
|
#'
|
||||||
|
#' @param buffer the buffer containing booster instance saved by \code{\link{xgb.serialize}}
|
||||||
|
#'
|
||||||
|
#' @export
|
||||||
|
xgb.unserialize <- function(buffer) {
|
||||||
|
cachelist <- list()
|
||||||
|
handle <- .Call(XGBoosterCreate_R, cachelist)
|
||||||
|
tryCatch(
|
||||||
|
.Call(XGBoosterUnserializeFromBuffer_R, handle, buffer),
|
||||||
|
error = function(e) {
|
||||||
|
error_msg <- conditionMessage(e)
|
||||||
|
m <- regexec("(src[\\\\/]learner.cc:[0-9]+): Check failed: (header == serialisation_header_)",
|
||||||
|
error_msg, perl = TRUE)
|
||||||
|
groups <- regmatches(error_msg, m)[[1]]
|
||||||
|
if (length(groups) == 3) {
|
||||||
|
warning(paste("The model had been generated by XGBoost version 1.0.0 or earlier and was ",
|
||||||
|
"loaded from a RDS file. We strongly ADVISE AGAINST using saveRDS() ",
|
||||||
|
"function, to ensure that your model can be read in current and upcoming ",
|
||||||
|
"XGBoost releases. Please use xgb.save() instead to preserve models for the ",
|
||||||
|
"long term. For more details and explanation, see ",
|
||||||
|
"https://xgboost.readthedocs.io/en/latest/tutorials/saving_model.html",
|
||||||
|
sep = ""))
|
||||||
|
.Call(XGBoosterLoadModelFromRaw_R, handle, buffer)
|
||||||
|
} else {
|
||||||
|
stop(e)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
class(handle) <- "xgb.Booster.handle"
|
||||||
|
return (handle)
|
||||||
|
}
|
||||||
@@ -18,7 +18,7 @@ xgboost <- function(data = NULL, label = NULL, missing = NA, weight = NULL,
|
|||||||
early_stopping_rounds = early_stopping_rounds, maximize = maximize,
|
early_stopping_rounds = early_stopping_rounds, maximize = maximize,
|
||||||
save_period = save_period, save_name = save_name,
|
save_period = save_period, save_name = save_name,
|
||||||
xgb_model = xgb_model, callbacks = callbacks, ...)
|
xgb_model = xgb_model, callbacks = callbacks, ...)
|
||||||
return(bst)
|
return (bst)
|
||||||
}
|
}
|
||||||
|
|
||||||
#' Training part from Mushroom Data Set
|
#' Training part from Mushroom Data Set
|
||||||
|
|||||||
@@ -30,4 +30,4 @@ Examples
|
|||||||
Development
|
Development
|
||||||
-----------
|
-----------
|
||||||
|
|
||||||
* See the [R Package section](https://xgboost.readthedocs.io/en/latest/how_to/contribute.html#r-package) of the contributors guide.
|
* See the [R Package section](https://xgboost.readthedocs.io/en/latest/contribute.html#r-package) of the contributors guide.
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
#!/bin/sh
|
#!/bin/sh
|
||||||
|
|
||||||
rm -f src/Makevars
|
rm -f src/Makevars
|
||||||
|
rm -f CMakeLists.txt
|
||||||
|
|||||||
1059
R-package/configure
vendored
1059
R-package/configure
vendored
File diff suppressed because it is too large
Load Diff
@@ -1,31 +1,55 @@
|
|||||||
### configure.ac -*- Autoconf -*-
|
### configure.ac -*- Autoconf -*-
|
||||||
|
|
||||||
AC_PREREQ(2.62)
|
AC_PREREQ(2.69)
|
||||||
|
|
||||||
AC_INIT([xgboost],[0.6-3],[],[xgboost],[])
|
AC_INIT([xgboost],[0.6-3],[],[xgboost],[])
|
||||||
|
|
||||||
|
# Use this line to set CC variable to a C compiler
|
||||||
|
AC_PROG_CC
|
||||||
|
|
||||||
|
### Check whether backtrace() is part of libc or the external lib libexecinfo
|
||||||
|
AC_MSG_CHECKING([Backtrace lib])
|
||||||
|
AC_MSG_RESULT([])
|
||||||
|
AC_CHECK_LIB([execinfo], [backtrace], [BACKTRACE_LIB=-lexecinfo], [BACKTRACE_LIB=''])
|
||||||
|
|
||||||
|
### Endian detection
|
||||||
|
AC_MSG_CHECKING([endian])
|
||||||
|
AC_MSG_RESULT([])
|
||||||
|
AC_RUN_IFELSE([AC_LANG_PROGRAM([[#include <stdint.h>]], [[const uint16_t endianness = 256; return !!(*(const uint8_t *)&endianness);]])],
|
||||||
|
[ENDIAN_FLAG="-DDMLC_CMAKE_LITTLE_ENDIAN=1"],
|
||||||
|
[ENDIAN_FLAG="-DDMLC_CMAKE_LITTLE_ENDIAN=0"])
|
||||||
|
|
||||||
OPENMP_CXXFLAGS=""
|
OPENMP_CXXFLAGS=""
|
||||||
|
|
||||||
if test `uname -s` = "Linux"
|
if test `uname -s` = "Linux"
|
||||||
then
|
then
|
||||||
OPENMP_CXXFLAGS="\$(SHLIB_OPENMP_CFLAGS)"
|
OPENMP_CXXFLAGS="\$(SHLIB_OPENMP_CXXFLAGS)"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if test `uname -s` = "Darwin"
|
if test `uname -s` = "Darwin"
|
||||||
then
|
then
|
||||||
OPENMP_CXXFLAGS="\$(SHLIB_OPENMP_CFLAGS)"
|
OPENMP_CXXFLAGS='-Xclang -fopenmp'
|
||||||
|
OPENMP_LIB='-lomp'
|
||||||
ac_pkg_openmp=no
|
ac_pkg_openmp=no
|
||||||
AC_MSG_CHECKING([whether OpenMP will work in a package])
|
AC_MSG_CHECKING([whether OpenMP will work in a package])
|
||||||
AC_LANG_CONFTEST(
|
AC_LANG_CONFTEST([AC_LANG_PROGRAM([[#include <omp.h>]], [[ return (omp_get_max_threads() <= 1); ]])])
|
||||||
[AC_LANG_PROGRAM([[#include <omp.h>]], [[ return omp_get_num_threads (); ]])])
|
${CC} -o conftest conftest.c ${OPENMP_LIB} ${OPENMP_CXXFLAGS} 2>/dev/null && ./conftest && ac_pkg_openmp=yes
|
||||||
PKG_CFLAGS="${OPENMP_CFLAGS}" PKG_LIBS="${OPENMP_CFLAGS}" "$RBIN" CMD SHLIB conftest.c 1>&AS_MESSAGE_LOG_FD 2>&AS_MESSAGE_LOG_FD && "$RBIN" --vanilla -q -e "dyn.load(paste('conftest',.Platform\$dynlib.ext,sep=''))" 1>&AS_MESSAGE_LOG_FD 2>&AS_MESSAGE_LOG_FD && ac_pkg_openmp=yes
|
|
||||||
AC_MSG_RESULT([${ac_pkg_openmp}])
|
AC_MSG_RESULT([${ac_pkg_openmp}])
|
||||||
if test "${ac_pkg_openmp}" = no; then
|
if test "${ac_pkg_openmp}" = no; then
|
||||||
OPENMP_CXXFLAGS=''
|
OPENMP_CXXFLAGS=''
|
||||||
|
OPENMP_LIB=''
|
||||||
|
echo '*****************************************************************************************'
|
||||||
|
echo 'WARNING: OpenMP is unavailable on this Mac OSX system. Training speed may be suboptimal.'
|
||||||
|
echo ' To use all CPU cores for training jobs, you should install OpenMP by running\n'
|
||||||
|
echo ' brew install libomp'
|
||||||
|
echo '*****************************************************************************************'
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
|
|
||||||
AC_SUBST(OPENMP_CXXFLAGS)
|
AC_SUBST(OPENMP_CXXFLAGS)
|
||||||
|
AC_SUBST(OPENMP_LIB)
|
||||||
|
AC_SUBST(ENDIAN_FLAG)
|
||||||
|
AC_SUBST(BACKTRACE_LIB)
|
||||||
AC_CONFIG_FILES([src/Makevars])
|
AC_CONFIG_FILES([src/Makevars])
|
||||||
AC_OUTPUT
|
AC_OUTPUT
|
||||||
|
|
||||||
|
|||||||
@@ -11,4 +11,5 @@ early_stopping Early Stop in training
|
|||||||
poisson_regression Poisson Regression on count data
|
poisson_regression Poisson Regression on count data
|
||||||
tweedie_regression Tweddie Regression
|
tweedie_regression Tweddie Regression
|
||||||
gpu_accelerated GPU-accelerated tree building algorithms
|
gpu_accelerated GPU-accelerated tree building algorithms
|
||||||
|
interaction_constraints Interaction constraints among features
|
||||||
|
|
||||||
|
|||||||
@@ -17,4 +17,4 @@ Benchmarks
|
|||||||
Notes
|
Notes
|
||||||
====
|
====
|
||||||
* Contribution of examples, benchmarks is more than welcomed!
|
* Contribution of examples, benchmarks is more than welcomed!
|
||||||
* If you like to share how you use xgboost to solve your problem, send a pull request:)
|
* If you like to share how you use xgboost to solve your problem, send a pull request :)
|
||||||
|
|||||||
@@ -3,8 +3,8 @@ require(methods)
|
|||||||
|
|
||||||
# we load in the agaricus dataset
|
# we load in the agaricus dataset
|
||||||
# In this example, we are aiming to predict whether a mushroom is edible
|
# In this example, we are aiming to predict whether a mushroom is edible
|
||||||
data(agaricus.train, package='xgboost')
|
data(agaricus.train, package = 'xgboost')
|
||||||
data(agaricus.test, package='xgboost')
|
data(agaricus.test, package = 'xgboost')
|
||||||
train <- agaricus.train
|
train <- agaricus.train
|
||||||
test <- agaricus.test
|
test <- agaricus.test
|
||||||
# the loaded data is stored in sparseMatrix, and label is a numeric vector in {0,1}
|
# the loaded data is stored in sparseMatrix, and label is a numeric vector in {0,1}
|
||||||
@@ -58,31 +58,31 @@ xgb.save(bst, "xgboost.model")
|
|||||||
bst2 <- xgb.load("xgboost.model")
|
bst2 <- xgb.load("xgboost.model")
|
||||||
pred2 <- predict(bst2, test$data)
|
pred2 <- predict(bst2, test$data)
|
||||||
# pred2 should be identical to pred
|
# pred2 should be identical to pred
|
||||||
print(paste("sum(abs(pred2-pred))=", sum(abs(pred2-pred))))
|
print(paste("sum(abs(pred2-pred))=", sum(abs(pred2 - pred))))
|
||||||
|
|
||||||
# save model to R's raw vector
|
# save model to R's raw vector
|
||||||
raw = xgb.save.raw(bst)
|
raw <- xgb.save.raw(bst)
|
||||||
# load binary model to R
|
# load binary model to R
|
||||||
bst3 <- xgb.load(raw)
|
bst3 <- xgb.load(raw)
|
||||||
pred3 <- predict(bst3, test$data)
|
pred3 <- predict(bst3, test$data)
|
||||||
# pred3 should be identical to pred
|
# pred3 should be identical to pred
|
||||||
print(paste("sum(abs(pred3-pred))=", sum(abs(pred3-pred))))
|
print(paste("sum(abs(pred3-pred))=", sum(abs(pred3 - pred))))
|
||||||
|
|
||||||
#----------------Advanced features --------------
|
#----------------Advanced features --------------
|
||||||
# to use advanced features, we need to put data in xgb.DMatrix
|
# to use advanced features, we need to put data in xgb.DMatrix
|
||||||
dtrain <- xgb.DMatrix(data = train$data, label=train$label)
|
dtrain <- xgb.DMatrix(data = train$data, label = train$label)
|
||||||
dtest <- xgb.DMatrix(data = test$data, label=test$label)
|
dtest <- xgb.DMatrix(data = test$data, label = test$label)
|
||||||
#---------------Using watchlist----------------
|
#---------------Using watchlist----------------
|
||||||
# watchlist is a list of xgb.DMatrix, each of them is tagged with name
|
# watchlist is a list of xgb.DMatrix, each of them is tagged with name
|
||||||
watchlist <- list(train=dtrain, test=dtest)
|
watchlist <- list(train = dtrain, test = dtest)
|
||||||
# to train with watchlist, use xgb.train, which contains more advanced features
|
# to train with watchlist, use xgb.train, which contains more advanced features
|
||||||
# watchlist allows us to monitor the evaluation result on all data in the list
|
# watchlist allows us to monitor the evaluation result on all data in the list
|
||||||
print("Train xgboost using xgb.train with watchlist")
|
print("Train xgboost using xgb.train with watchlist")
|
||||||
bst <- xgb.train(data=dtrain, max_depth=2, eta=1, nrounds=2, watchlist=watchlist,
|
bst <- xgb.train(data = dtrain, max_depth = 2, eta = 1, nrounds = 2, watchlist = watchlist,
|
||||||
nthread = 2, objective = "binary:logistic")
|
nthread = 2, objective = "binary:logistic")
|
||||||
# we can change evaluation metrics, or use multiple evaluation metrics
|
# we can change evaluation metrics, or use multiple evaluation metrics
|
||||||
print("train xgboost using xgb.train with watchlist, watch logloss and error")
|
print("train xgboost using xgb.train with watchlist, watch logloss and error")
|
||||||
bst <- xgb.train(data=dtrain, max_depth=2, eta=1, nrounds=2, watchlist=watchlist,
|
bst <- xgb.train(data = dtrain, max_depth = 2, eta = 1, nrounds = 2, watchlist = watchlist,
|
||||||
eval_metric = "error", eval_metric = "logloss",
|
eval_metric = "error", eval_metric = "logloss",
|
||||||
nthread = 2, objective = "binary:logistic")
|
nthread = 2, objective = "binary:logistic")
|
||||||
|
|
||||||
@@ -90,17 +90,17 @@ bst <- xgb.train(data=dtrain, max_depth=2, eta=1, nrounds=2, watchlist=watchlist
|
|||||||
xgb.DMatrix.save(dtrain, "dtrain.buffer")
|
xgb.DMatrix.save(dtrain, "dtrain.buffer")
|
||||||
# to load it in, simply call xgb.DMatrix
|
# to load it in, simply call xgb.DMatrix
|
||||||
dtrain2 <- xgb.DMatrix("dtrain.buffer")
|
dtrain2 <- xgb.DMatrix("dtrain.buffer")
|
||||||
bst <- xgb.train(data=dtrain2, max_depth=2, eta=1, nrounds=2, watchlist=watchlist,
|
bst <- xgb.train(data = dtrain2, max_depth = 2, eta = 1, nrounds = 2, watchlist = watchlist,
|
||||||
nthread = 2, objective = "binary:logistic")
|
nthread = 2, objective = "binary:logistic")
|
||||||
# information can be extracted from xgb.DMatrix using getinfo
|
# information can be extracted from xgb.DMatrix using getinfo
|
||||||
label = getinfo(dtest, "label")
|
label <- getinfo(dtest, "label")
|
||||||
pred <- predict(bst, dtest)
|
pred <- predict(bst, dtest)
|
||||||
err <- as.numeric(sum(as.integer(pred > 0.5) != label))/length(label)
|
err <- as.numeric(sum(as.integer(pred > 0.5) != label)) / length(label)
|
||||||
print(paste("test-error=", err))
|
print(paste("test-error=", err))
|
||||||
|
|
||||||
# You can dump the tree you learned using xgb.dump into a text file
|
# You can dump the tree you learned using xgb.dump into a text file
|
||||||
dump_path = file.path(tempdir(), 'dump.raw.txt')
|
dump_path <- file.path(tempdir(), 'dump.raw.txt')
|
||||||
xgb.dump(bst, dump_path, with_stats = T)
|
xgb.dump(bst, dump_path, with_stats = TRUE)
|
||||||
|
|
||||||
# Finally, you can check which features are the most important.
|
# Finally, you can check which features are the most important.
|
||||||
print("Most important features (look at column Gain):")
|
print("Most important features (look at column Gain):")
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
require(xgboost)
|
require(xgboost)
|
||||||
# load in the agaricus dataset
|
# load in the agaricus dataset
|
||||||
data(agaricus.train, package='xgboost')
|
data(agaricus.train, package = 'xgboost')
|
||||||
data(agaricus.test, package='xgboost')
|
data(agaricus.test, package = 'xgboost')
|
||||||
dtrain <- xgb.DMatrix(agaricus.train$data, label = agaricus.train$label)
|
dtrain <- xgb.DMatrix(agaricus.train$data, label = agaricus.train$label)
|
||||||
dtest <- xgb.DMatrix(agaricus.test$data, label = agaricus.test$label)
|
dtest <- xgb.DMatrix(agaricus.test$data, label = agaricus.test$label)
|
||||||
|
|
||||||
@@ -11,12 +11,12 @@ watchlist <- list(eval = dtest, train = dtrain)
|
|||||||
#
|
#
|
||||||
print('start running example to start from a initial prediction')
|
print('start running example to start from a initial prediction')
|
||||||
# train xgboost for 1 round
|
# train xgboost for 1 round
|
||||||
param <- list(max_depth=2, eta=1, nthread = 2, silent=1, objective='binary:logistic')
|
param <- list(max_depth = 2, eta = 1, nthread = 2, objective = 'binary:logistic')
|
||||||
bst <- xgb.train(param, dtrain, 1, watchlist)
|
bst <- xgb.train(param, dtrain, 1, watchlist)
|
||||||
# Note: we need the margin value instead of transformed prediction in set_base_margin
|
# Note: we need the margin value instead of transformed prediction in set_base_margin
|
||||||
# do predict with output_margin=TRUE, will always give you margin values before logistic transformation
|
# do predict with output_margin=TRUE, will always give you margin values before logistic transformation
|
||||||
ptrain <- predict(bst, dtrain, outputmargin=TRUE)
|
ptrain <- predict(bst, dtrain, outputmargin = TRUE)
|
||||||
ptest <- predict(bst, dtest, outputmargin=TRUE)
|
ptest <- predict(bst, dtest, outputmargin = TRUE)
|
||||||
# set the base_margin property of dtrain and dtest
|
# set the base_margin property of dtrain and dtest
|
||||||
# base margin is the base prediction we will boost from
|
# base margin is the base prediction we will boost from
|
||||||
setinfo(dtrain, "base_margin", ptrain)
|
setinfo(dtrain, "base_margin", ptrain)
|
||||||
|
|||||||
@@ -9,17 +9,17 @@ require(e1071)
|
|||||||
# Load Arthritis dataset in memory.
|
# Load Arthritis dataset in memory.
|
||||||
data(Arthritis)
|
data(Arthritis)
|
||||||
# Create a copy of the dataset with data.table package (data.table is 100% compliant with R dataframe but its syntax is a lot more consistent and its performance are really good).
|
# Create a copy of the dataset with data.table package (data.table is 100% compliant with R dataframe but its syntax is a lot more consistent and its performance are really good).
|
||||||
df <- data.table(Arthritis, keep.rownames = F)
|
df <- data.table(Arthritis, keep.rownames = FALSE)
|
||||||
|
|
||||||
# Let's add some new categorical features to see if it helps. Of course these feature are highly correlated to the Age feature. Usually it's not a good thing in ML, but Tree algorithms (including boosted trees) are able to select the best features, even in case of highly correlated features.
|
# Let's add some new categorical features to see if it helps. Of course these feature are highly correlated to the Age feature. Usually it's not a good thing in ML, but Tree algorithms (including boosted trees) are able to select the best features, even in case of highly correlated features.
|
||||||
# For the first feature we create groups of age by rounding the real age. Note that we transform it to factor (categorical data) so the algorithm treat them as independant values.
|
# For the first feature we create groups of age by rounding the real age. Note that we transform it to factor (categorical data) so the algorithm treat them as independant values.
|
||||||
df[,AgeDiscret:= as.factor(round(Age/10,0))]
|
df[, AgeDiscret := as.factor(round(Age / 10, 0))]
|
||||||
|
|
||||||
# Here is an even stronger simplification of the real age with an arbitrary split at 30 years old. I choose this value based on nothing. We will see later if simplifying the information based on arbitrary values is a good strategy (I am sure you already have an idea of how well it will work!).
|
# Here is an even stronger simplification of the real age with an arbitrary split at 30 years old. I choose this value based on nothing. We will see later if simplifying the information based on arbitrary values is a good strategy (I am sure you already have an idea of how well it will work!).
|
||||||
df[,AgeCat:= as.factor(ifelse(Age > 30, "Old", "Young"))]
|
df[, AgeCat := as.factor(ifelse(Age > 30, "Old", "Young"))]
|
||||||
|
|
||||||
# We remove ID as there is nothing to learn from this feature (it will just add some noise as the dataset is small).
|
# We remove ID as there is nothing to learn from this feature (it will just add some noise as the dataset is small).
|
||||||
df[,ID:=NULL]
|
df[, ID := NULL]
|
||||||
|
|
||||||
#-------------Basic Training using XGBoost in caret Library-----------------
|
#-------------Basic Training using XGBoost in caret Library-----------------
|
||||||
# Set up control parameters for caret::train
|
# Set up control parameters for caret::train
|
||||||
|
|||||||
@@ -19,7 +19,7 @@ if (!require(vcd)) {
|
|||||||
data(Arthritis)
|
data(Arthritis)
|
||||||
|
|
||||||
# create a copy of the dataset with data.table package (data.table is 100% compliant with R dataframe but its syntax is a lot more consistent and its performance are really good).
|
# create a copy of the dataset with data.table package (data.table is 100% compliant with R dataframe but its syntax is a lot more consistent and its performance are really good).
|
||||||
df <- data.table(Arthritis, keep.rownames = F)
|
df <- data.table(Arthritis, keep.rownames = FALSE)
|
||||||
|
|
||||||
# Let's have a look to the data.table
|
# Let's have a look to the data.table
|
||||||
cat("Print the dataset\n")
|
cat("Print the dataset\n")
|
||||||
@@ -32,17 +32,17 @@ str(df)
|
|||||||
# Let's add some new categorical features to see if it helps. Of course these feature are highly correlated to the Age feature. Usually it's not a good thing in ML, but Tree algorithms (including boosted trees) are able to select the best features, even in case of highly correlated features.
|
# Let's add some new categorical features to see if it helps. Of course these feature are highly correlated to the Age feature. Usually it's not a good thing in ML, but Tree algorithms (including boosted trees) are able to select the best features, even in case of highly correlated features.
|
||||||
|
|
||||||
# For the first feature we create groups of age by rounding the real age. Note that we transform it to factor (categorical data) so the algorithm treat them as independant values.
|
# For the first feature we create groups of age by rounding the real age. Note that we transform it to factor (categorical data) so the algorithm treat them as independant values.
|
||||||
df[,AgeDiscret:= as.factor(round(Age/10,0))]
|
df[, AgeDiscret := as.factor(round(Age / 10, 0))]
|
||||||
|
|
||||||
# Here is an even stronger simplification of the real age with an arbitrary split at 30 years old. I choose this value based on nothing. We will see later if simplifying the information based on arbitrary values is a good strategy (I am sure you already have an idea of how well it will work!).
|
# Here is an even stronger simplification of the real age with an arbitrary split at 30 years old. I choose this value based on nothing. We will see later if simplifying the information based on arbitrary values is a good strategy (I am sure you already have an idea of how well it will work!).
|
||||||
df[,AgeCat:= as.factor(ifelse(Age > 30, "Old", "Young"))]
|
df[, AgeCat := as.factor(ifelse(Age > 30, "Old", "Young"))]
|
||||||
|
|
||||||
# We remove ID as there is nothing to learn from this feature (it will just add some noise as the dataset is small).
|
# We remove ID as there is nothing to learn from this feature (it will just add some noise as the dataset is small).
|
||||||
df[,ID:=NULL]
|
df[, ID := NULL]
|
||||||
|
|
||||||
# List the different values for the column Treatment: Placebo, Treated.
|
# List the different values for the column Treatment: Placebo, Treated.
|
||||||
cat("Values of the categorical feature Treatment\n")
|
cat("Values of the categorical feature Treatment\n")
|
||||||
print(levels(df[,Treatment]))
|
print(levels(df[, Treatment]))
|
||||||
|
|
||||||
# Next step, we will transform the categorical data to dummy variables.
|
# Next step, we will transform the categorical data to dummy variables.
|
||||||
# This method is also called one hot encoding.
|
# This method is also called one hot encoding.
|
||||||
@@ -52,7 +52,7 @@ print(levels(df[,Treatment]))
|
|||||||
#
|
#
|
||||||
# Formulae Improved~.-1 used below means transform all categorical features but column Improved to binary values.
|
# Formulae Improved~.-1 used below means transform all categorical features but column Improved to binary values.
|
||||||
# Column Improved is excluded because it will be our output column, the one we want to predict.
|
# Column Improved is excluded because it will be our output column, the one we want to predict.
|
||||||
sparse_matrix = sparse.model.matrix(Improved~.-1, data = df)
|
sparse_matrix <- sparse.model.matrix(Improved ~ . - 1, data = df)
|
||||||
|
|
||||||
cat("Encoding of the sparse Matrix\n")
|
cat("Encoding of the sparse Matrix\n")
|
||||||
print(sparse_matrix)
|
print(sparse_matrix)
|
||||||
@@ -61,7 +61,7 @@ print(sparse_matrix)
|
|||||||
# 1. Set, for all rows, field in Y column to 0;
|
# 1. Set, for all rows, field in Y column to 0;
|
||||||
# 2. set Y to 1 when Improved == Marked;
|
# 2. set Y to 1 when Improved == Marked;
|
||||||
# 3. Return Y column
|
# 3. Return Y column
|
||||||
output_vector = df[,Y:=0][Improved == "Marked",Y:=1][,Y]
|
output_vector <- df[, Y := 0][Improved == "Marked", Y := 1][, Y]
|
||||||
|
|
||||||
# Following is the same process as other demo
|
# Following is the same process as other demo
|
||||||
cat("Learning...\n")
|
cat("Learning...\n")
|
||||||
|
|||||||
@@ -1,25 +1,25 @@
|
|||||||
require(xgboost)
|
require(xgboost)
|
||||||
# load in the agaricus dataset
|
# load in the agaricus dataset
|
||||||
data(agaricus.train, package='xgboost')
|
data(agaricus.train, package = 'xgboost')
|
||||||
data(agaricus.test, package='xgboost')
|
data(agaricus.test, package = 'xgboost')
|
||||||
dtrain <- xgb.DMatrix(agaricus.train$data, label = agaricus.train$label)
|
dtrain <- xgb.DMatrix(agaricus.train$data, label = agaricus.train$label)
|
||||||
dtest <- xgb.DMatrix(agaricus.test$data, label = agaricus.test$label)
|
dtest <- xgb.DMatrix(agaricus.test$data, label = agaricus.test$label)
|
||||||
|
|
||||||
nround <- 2
|
nrounds <- 2
|
||||||
param <- list(max_depth=2, eta=1, silent=1, nthread=2, objective='binary:logistic')
|
param <- list(max_depth = 2, eta = 1, nthread = 2, objective = 'binary:logistic')
|
||||||
|
|
||||||
cat('running cross validation\n')
|
cat('running cross validation\n')
|
||||||
# do cross validation, this will print result out as
|
# do cross validation, this will print result out as
|
||||||
# [iteration] metric_name:mean_value+std_value
|
# [iteration] metric_name:mean_value+std_value
|
||||||
# std_value is standard deviation of the metric
|
# std_value is standard deviation of the metric
|
||||||
xgb.cv(param, dtrain, nround, nfold=5, metrics={'error'})
|
xgb.cv(param, dtrain, nrounds, nfold = 5, metrics = {'error'})
|
||||||
|
|
||||||
cat('running cross validation, disable standard deviation display\n')
|
cat('running cross validation, disable standard deviation display\n')
|
||||||
# do cross validation, this will print result out as
|
# do cross validation, this will print result out as
|
||||||
# [iteration] metric_name:mean_value+std_value
|
# [iteration] metric_name:mean_value+std_value
|
||||||
# std_value is standard deviation of the metric
|
# std_value is standard deviation of the metric
|
||||||
xgb.cv(param, dtrain, nround, nfold=5,
|
xgb.cv(param, dtrain, nrounds, nfold = 5,
|
||||||
metrics='error', showsd = FALSE)
|
metrics = 'error', showsd = FALSE)
|
||||||
|
|
||||||
###
|
###
|
||||||
# you can also do cross validation with cutomized loss function
|
# you can also do cross validation with cutomized loss function
|
||||||
@@ -29,23 +29,23 @@ print ('running cross validation, with cutomsized loss function')
|
|||||||
|
|
||||||
logregobj <- function(preds, dtrain) {
|
logregobj <- function(preds, dtrain) {
|
||||||
labels <- getinfo(dtrain, "label")
|
labels <- getinfo(dtrain, "label")
|
||||||
preds <- 1/(1 + exp(-preds))
|
preds <- 1 / (1 + exp(-preds))
|
||||||
grad <- preds - labels
|
grad <- preds - labels
|
||||||
hess <- preds * (1 - preds)
|
hess <- preds * (1 - preds)
|
||||||
return(list(grad = grad, hess = hess))
|
return(list(grad = grad, hess = hess))
|
||||||
}
|
}
|
||||||
evalerror <- function(preds, dtrain) {
|
evalerror <- function(preds, dtrain) {
|
||||||
labels <- getinfo(dtrain, "label")
|
labels <- getinfo(dtrain, "label")
|
||||||
err <- as.numeric(sum(labels != (preds > 0)))/length(labels)
|
err <- as.numeric(sum(labels != (preds > 0))) / length(labels)
|
||||||
return(list(metric = "error", value = err))
|
return(list(metric = "error", value = err))
|
||||||
}
|
}
|
||||||
|
|
||||||
param <- list(max_depth=2, eta=1, silent=1,
|
param <- list(max_depth = 2, eta = 1,
|
||||||
objective = logregobj, eval_metric = evalerror)
|
objective = logregobj, eval_metric = evalerror)
|
||||||
# train with customized objective
|
# train with customized objective
|
||||||
xgb.cv(params = param, data = dtrain, nrounds = nround, nfold = 5)
|
xgb.cv(params = param, data = dtrain, nrounds = nrounds, nfold = 5)
|
||||||
|
|
||||||
# do cross validation with prediction values for each fold
|
# do cross validation with prediction values for each fold
|
||||||
res <- xgb.cv(params = param, data = dtrain, nrounds = nround, nfold = 5, prediction = TRUE)
|
res <- xgb.cv(params = param, data = dtrain, nrounds = nrounds, nfold = 5, prediction = TRUE)
|
||||||
res$evaluation_log
|
res$evaluation_log
|
||||||
length(res$pred)
|
length(res$pred)
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
require(xgboost)
|
require(xgboost)
|
||||||
# load in the agaricus dataset
|
# load in the agaricus dataset
|
||||||
data(agaricus.train, package='xgboost')
|
data(agaricus.train, package = 'xgboost')
|
||||||
data(agaricus.test, package='xgboost')
|
data(agaricus.test, package = 'xgboost')
|
||||||
dtrain <- xgb.DMatrix(agaricus.train$data, label = agaricus.train$label)
|
dtrain <- xgb.DMatrix(agaricus.train$data, label = agaricus.train$label)
|
||||||
dtest <- xgb.DMatrix(agaricus.test$data, label = agaricus.test$label)
|
dtest <- xgb.DMatrix(agaricus.test$data, label = agaricus.test$label)
|
||||||
|
|
||||||
@@ -15,7 +15,7 @@ num_round <- 2
|
|||||||
# this is loglikelihood loss
|
# this is loglikelihood loss
|
||||||
logregobj <- function(preds, dtrain) {
|
logregobj <- function(preds, dtrain) {
|
||||||
labels <- getinfo(dtrain, "label")
|
labels <- getinfo(dtrain, "label")
|
||||||
preds <- 1/(1 + exp(-preds))
|
preds <- 1 / (1 + exp(-preds))
|
||||||
grad <- preds - labels
|
grad <- preds - labels
|
||||||
hess <- preds * (1 - preds)
|
hess <- preds * (1 - preds)
|
||||||
return(list(grad = grad, hess = hess))
|
return(list(grad = grad, hess = hess))
|
||||||
@@ -29,12 +29,12 @@ logregobj <- function(preds, dtrain) {
|
|||||||
# Take this in mind when you use the customization, and maybe you need write customized evaluation function
|
# Take this in mind when you use the customization, and maybe you need write customized evaluation function
|
||||||
evalerror <- function(preds, dtrain) {
|
evalerror <- function(preds, dtrain) {
|
||||||
labels <- getinfo(dtrain, "label")
|
labels <- getinfo(dtrain, "label")
|
||||||
err <- as.numeric(sum(labels != (preds > 0)))/length(labels)
|
err <- as.numeric(sum(labels != (preds > 0))) / length(labels)
|
||||||
return(list(metric = "error", value = err))
|
return(list(metric = "error", value = err))
|
||||||
}
|
}
|
||||||
|
|
||||||
param <- list(max_depth=2, eta=1, nthread = 2, silent=1,
|
param <- list(max_depth = 2, eta = 1, nthread = 2, verbosity = 0,
|
||||||
objective=logregobj, eval_metric=evalerror)
|
objective = logregobj, eval_metric = evalerror)
|
||||||
print ('start training with user customized objective')
|
print ('start training with user customized objective')
|
||||||
# training with customized objective, we can also do step by step training
|
# training with customized objective, we can also do step by step training
|
||||||
# simply look at xgboost.py's implementation of train
|
# simply look at xgboost.py's implementation of train
|
||||||
@@ -52,13 +52,13 @@ attr(dtrain, 'label') <- getinfo(dtrain, 'label')
|
|||||||
logregobjattr <- function(preds, dtrain) {
|
logregobjattr <- function(preds, dtrain) {
|
||||||
# now you can access the attribute in customized function
|
# now you can access the attribute in customized function
|
||||||
labels <- attr(dtrain, 'label')
|
labels <- attr(dtrain, 'label')
|
||||||
preds <- 1/(1 + exp(-preds))
|
preds <- 1 / (1 + exp(-preds))
|
||||||
grad <- preds - labels
|
grad <- preds - labels
|
||||||
hess <- preds * (1 - preds)
|
hess <- preds * (1 - preds)
|
||||||
return(list(grad = grad, hess = hess))
|
return(list(grad = grad, hess = hess))
|
||||||
}
|
}
|
||||||
param <- list(max_depth=2, eta=1, nthread = 2, silent=1,
|
param <- list(max_depth = 2, eta = 1, nthread = 2, verbosity = 0,
|
||||||
objective=logregobjattr, eval_metric=evalerror)
|
objective = logregobjattr, eval_metric = evalerror)
|
||||||
print ('start training with user customized objective, with additional attributes in DMatrix')
|
print ('start training with user customized objective, with additional attributes in DMatrix')
|
||||||
# training with customized objective, we can also do step by step training
|
# training with customized objective, we can also do step by step training
|
||||||
# simply look at xgboost.py's implementation of train
|
# simply look at xgboost.py's implementation of train
|
||||||
|
|||||||
@@ -1,20 +1,20 @@
|
|||||||
require(xgboost)
|
require(xgboost)
|
||||||
# load in the agaricus dataset
|
# load in the agaricus dataset
|
||||||
data(agaricus.train, package='xgboost')
|
data(agaricus.train, package = 'xgboost')
|
||||||
data(agaricus.test, package='xgboost')
|
data(agaricus.test, package = 'xgboost')
|
||||||
dtrain <- xgb.DMatrix(agaricus.train$data, label = agaricus.train$label)
|
dtrain <- xgb.DMatrix(agaricus.train$data, label = agaricus.train$label)
|
||||||
dtest <- xgb.DMatrix(agaricus.test$data, label = agaricus.test$label)
|
dtest <- xgb.DMatrix(agaricus.test$data, label = agaricus.test$label)
|
||||||
# note: for customized objective function, we leave objective as default
|
# note: for customized objective function, we leave objective as default
|
||||||
# note: what we are getting is margin value in prediction
|
# note: what we are getting is margin value in prediction
|
||||||
# you must know what you are doing
|
# you must know what you are doing
|
||||||
param <- list(max_depth=2, eta=1, nthread = 2, silent=1)
|
param <- list(max_depth = 2, eta = 1, nthread = 2, verbosity = 0)
|
||||||
watchlist <- list(eval = dtest)
|
watchlist <- list(eval = dtest)
|
||||||
num_round <- 20
|
num_round <- 20
|
||||||
# user define objective function, given prediction, return gradient and second order gradient
|
# user define objective function, given prediction, return gradient and second order gradient
|
||||||
# this is loglikelihood loss
|
# this is loglikelihood loss
|
||||||
logregobj <- function(preds, dtrain) {
|
logregobj <- function(preds, dtrain) {
|
||||||
labels <- getinfo(dtrain, "label")
|
labels <- getinfo(dtrain, "label")
|
||||||
preds <- 1/(1 + exp(-preds))
|
preds <- 1 / (1 + exp(-preds))
|
||||||
grad <- preds - labels
|
grad <- preds - labels
|
||||||
hess <- preds * (1 - preds)
|
hess <- preds * (1 - preds)
|
||||||
return(list(grad = grad, hess = hess))
|
return(list(grad = grad, hess = hess))
|
||||||
@@ -27,7 +27,7 @@ logregobj <- function(preds, dtrain) {
|
|||||||
# Take this in mind when you use the customization, and maybe you need write customized evaluation function
|
# Take this in mind when you use the customization, and maybe you need write customized evaluation function
|
||||||
evalerror <- function(preds, dtrain) {
|
evalerror <- function(preds, dtrain) {
|
||||||
labels <- getinfo(dtrain, "label")
|
labels <- getinfo(dtrain, "label")
|
||||||
err <- as.numeric(sum(labels != (preds > 0)))/length(labels)
|
err <- as.numeric(sum(labels != (preds > 0))) / length(labels)
|
||||||
return(list(metric = "error", value = err))
|
return(list(metric = "error", value = err))
|
||||||
}
|
}
|
||||||
print ('start training with early Stopping setting')
|
print ('start training with early Stopping setting')
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
require(xgboost)
|
require(xgboost)
|
||||||
# load in the agaricus dataset
|
# load in the agaricus dataset
|
||||||
data(agaricus.train, package='xgboost')
|
data(agaricus.train, package = 'xgboost')
|
||||||
data(agaricus.test, package='xgboost')
|
data(agaricus.test, package = 'xgboost')
|
||||||
dtrain <- xgb.DMatrix(agaricus.train$data, label = agaricus.train$label)
|
dtrain <- xgb.DMatrix(agaricus.train$data, label = agaricus.train$label)
|
||||||
dtest <- xgb.DMatrix(agaricus.test$data, label = agaricus.test$label)
|
dtest <- xgb.DMatrix(agaricus.test$data, label = agaricus.test$label)
|
||||||
##
|
##
|
||||||
@@ -30,5 +30,4 @@ num_round <- 2
|
|||||||
bst <- xgb.train(param, dtrain, num_round, watchlist)
|
bst <- xgb.train(param, dtrain, num_round, watchlist)
|
||||||
ypred <- predict(bst, dtest)
|
ypred <- predict(bst, dtest)
|
||||||
labels <- getinfo(dtest, 'label')
|
labels <- getinfo(dtest, 'label')
|
||||||
cat('error of preds=', mean(as.numeric(ypred>0.5)!=labels),'\n')
|
cat('error of preds=', mean(as.numeric(ypred > 0.5) != labels), '\n')
|
||||||
|
|
||||||
|
|||||||
@@ -21,8 +21,8 @@ m <- X[, sel] %*% betas - 1 + rnorm(N)
|
|||||||
y <- rbinom(N, 1, plogis(m))
|
y <- rbinom(N, 1, plogis(m))
|
||||||
|
|
||||||
tr <- sample.int(N, N * 0.75)
|
tr <- sample.int(N, N * 0.75)
|
||||||
dtrain <- xgb.DMatrix(X[tr,], label = y[tr])
|
dtrain <- xgb.DMatrix(X[tr, ], label = y[tr])
|
||||||
dtest <- xgb.DMatrix(X[-tr,], label = y[-tr])
|
dtest <- xgb.DMatrix(X[-tr, ], label = y[-tr])
|
||||||
wl <- list(train = dtrain, test = dtest)
|
wl <- list(train = dtrain, test = dtest)
|
||||||
|
|
||||||
# An example of running 'gpu_hist' algorithm
|
# An example of running 'gpu_hist' algorithm
|
||||||
@@ -30,7 +30,7 @@ wl <- list(train = dtrain, test = dtest)
|
|||||||
# - similar to the 'hist'
|
# - similar to the 'hist'
|
||||||
# - the fastest option for moderately large datasets
|
# - the fastest option for moderately large datasets
|
||||||
# - current limitations: max_depth < 16, does not implement guided loss
|
# - current limitations: max_depth < 16, does not implement guided loss
|
||||||
# You can use tree_method = 'gpu_exact' for another GPU accelerated algorithm,
|
# You can use tree_method = 'gpu_hist' for another GPU accelerated algorithm,
|
||||||
# which is slower, more memory-hungry, but does not use binning.
|
# which is slower, more memory-hungry, but does not use binning.
|
||||||
param <- list(objective = 'reg:logistic', eval_metric = 'auc', subsample = 0.5, nthread = 4,
|
param <- list(objective = 'reg:logistic', eval_metric = 'auc', subsample = 0.5, nthread = 4,
|
||||||
max_bin = 64, tree_method = 'gpu_hist')
|
max_bin = 64, tree_method = 'gpu_hist')
|
||||||
|
|||||||
113
R-package/demo/interaction_constraints.R
Normal file
113
R-package/demo/interaction_constraints.R
Normal file
@@ -0,0 +1,113 @@
|
|||||||
|
library(xgboost)
|
||||||
|
library(data.table)
|
||||||
|
|
||||||
|
set.seed(1024)
|
||||||
|
|
||||||
|
# Function to obtain a list of interactions fitted in trees, requires input of maximum depth
|
||||||
|
treeInteractions <- function(input_tree, input_max_depth) {
|
||||||
|
ID_merge <- i.id <- i.feature <- NULL # Suppress warning "no visible binding for global variable"
|
||||||
|
|
||||||
|
trees <- data.table::copy(input_tree) # copy tree input to prevent overwriting
|
||||||
|
if (input_max_depth < 2) return(list()) # no interactions if max depth < 2
|
||||||
|
if (nrow(input_tree) == 1) return(list())
|
||||||
|
|
||||||
|
# Attach parent nodes
|
||||||
|
for (i in 2:input_max_depth) {
|
||||||
|
if (i == 2) trees[, ID_merge := ID] else trees[, ID_merge := get(paste0('parent_', i - 2))]
|
||||||
|
parents_left <- trees[!is.na(Split), list(i.id = ID, i.feature = Feature, ID_merge = Yes)]
|
||||||
|
parents_right <- trees[!is.na(Split), list(i.id = ID, i.feature = Feature, ID_merge = No)]
|
||||||
|
|
||||||
|
data.table::setorderv(trees, 'ID_merge')
|
||||||
|
data.table::setorderv(parents_left, 'ID_merge')
|
||||||
|
data.table::setorderv(parents_right, 'ID_merge')
|
||||||
|
|
||||||
|
trees <- merge(trees, parents_left, by = 'ID_merge', all.x = TRUE)
|
||||||
|
trees[!is.na(i.id), c(paste0('parent_', i - 1), paste0('parent_feat_', i - 1))
|
||||||
|
:= list(i.id, i.feature)]
|
||||||
|
trees[, c('i.id', 'i.feature') := NULL]
|
||||||
|
|
||||||
|
trees <- merge(trees, parents_right, by = 'ID_merge', all.x = TRUE)
|
||||||
|
trees[!is.na(i.id), c(paste0('parent_', i - 1), paste0('parent_feat_', i - 1))
|
||||||
|
:= list(i.id, i.feature)]
|
||||||
|
trees[, c('i.id', 'i.feature') := NULL]
|
||||||
|
}
|
||||||
|
|
||||||
|
# Extract nodes with interactions
|
||||||
|
interaction_trees <- trees[!is.na(Split) & !is.na(parent_1),
|
||||||
|
c('Feature', paste0('parent_feat_', 1:(input_max_depth - 1))),
|
||||||
|
with = FALSE]
|
||||||
|
interaction_trees_split <- split(interaction_trees, 1:nrow(interaction_trees))
|
||||||
|
interaction_list <- lapply(interaction_trees_split, as.character)
|
||||||
|
|
||||||
|
# Remove NAs (no parent interaction)
|
||||||
|
interaction_list <- lapply(interaction_list, function(x) x[!is.na(x)])
|
||||||
|
|
||||||
|
# Remove non-interactions (same variable)
|
||||||
|
interaction_list <- lapply(interaction_list, unique) # remove same variables
|
||||||
|
interaction_length <- sapply(interaction_list, length)
|
||||||
|
interaction_list <- interaction_list[interaction_length > 1]
|
||||||
|
interaction_list <- unique(lapply(interaction_list, sort))
|
||||||
|
return(interaction_list)
|
||||||
|
}
|
||||||
|
|
||||||
|
# Generate sample data
|
||||||
|
x <- list()
|
||||||
|
for (i in 1:10) {
|
||||||
|
x[[i]] <- i * rnorm(1000, 10)
|
||||||
|
}
|
||||||
|
x <- as.data.table(x)
|
||||||
|
|
||||||
|
y <- -1 * x[, rowSums(.SD)] + x[['V1']] * x[['V2']] + x[['V3']] * x[['V4']] * x[['V5']]
|
||||||
|
+ rnorm(1000, 0.001) + 3 * sin(x[['V7']])
|
||||||
|
|
||||||
|
train <- as.matrix(x)
|
||||||
|
|
||||||
|
# Interaction constraint list (column names form)
|
||||||
|
interaction_list <- list(c('V1', 'V2'), c('V3', 'V4', 'V5'))
|
||||||
|
|
||||||
|
# Convert interaction constraint list into feature index form
|
||||||
|
cols2ids <- function(object, col_names) {
|
||||||
|
LUT <- seq_along(col_names) - 1
|
||||||
|
names(LUT) <- col_names
|
||||||
|
rapply(object, function(x) LUT[x], classes = "character", how = "replace")
|
||||||
|
}
|
||||||
|
interaction_list_fid <- cols2ids(interaction_list, colnames(train))
|
||||||
|
|
||||||
|
# Fit model with interaction constraints
|
||||||
|
bst <- xgboost(data = train, label = y, max_depth = 4,
|
||||||
|
eta = 0.1, nthread = 2, nrounds = 1000,
|
||||||
|
interaction_constraints = interaction_list_fid)
|
||||||
|
|
||||||
|
bst_tree <- xgb.model.dt.tree(colnames(train), bst)
|
||||||
|
bst_interactions <- treeInteractions(bst_tree, 4)
|
||||||
|
# interactions constrained to combinations of V1*V2 and V3*V4*V5
|
||||||
|
|
||||||
|
# Fit model without interaction constraints
|
||||||
|
bst2 <- xgboost(data = train, label = y, max_depth = 4,
|
||||||
|
eta = 0.1, nthread = 2, nrounds = 1000)
|
||||||
|
|
||||||
|
bst2_tree <- xgb.model.dt.tree(colnames(train), bst2)
|
||||||
|
bst2_interactions <- treeInteractions(bst2_tree, 4) # much more interactions
|
||||||
|
|
||||||
|
# Fit model with both interaction and monotonicity constraints
|
||||||
|
bst3 <- xgboost(data = train, label = y, max_depth = 4,
|
||||||
|
eta = 0.1, nthread = 2, nrounds = 1000,
|
||||||
|
interaction_constraints = interaction_list_fid,
|
||||||
|
monotone_constraints = c(-1, 0, 0, 0, 0, 0, 0, 0, 0, 0))
|
||||||
|
|
||||||
|
bst3_tree <- xgb.model.dt.tree(colnames(train), bst3)
|
||||||
|
bst3_interactions <- treeInteractions(bst3_tree, 4)
|
||||||
|
# interactions still constrained to combinations of V1*V2 and V3*V4*V5
|
||||||
|
|
||||||
|
# Show monotonic constraints still apply by checking scores after incrementing V1
|
||||||
|
x1 <- sort(unique(x[['V1']]))
|
||||||
|
for (i in 1:length(x1)){
|
||||||
|
testdata <- copy(x[, -c('V1')])
|
||||||
|
testdata[['V1']] <- x1[i]
|
||||||
|
testdata <- testdata[, paste0('V', 1:10), with = FALSE]
|
||||||
|
pred <- predict(bst3, as.matrix(testdata))
|
||||||
|
|
||||||
|
# Should not print out anything due to monotonic constraints
|
||||||
|
if (i > 1) if (any(pred > prev_pred)) print(i)
|
||||||
|
prev_pred <- pred
|
||||||
|
}
|
||||||
@@ -1,7 +1,6 @@
|
|||||||
data(mtcars)
|
data(mtcars)
|
||||||
head(mtcars)
|
head(mtcars)
|
||||||
bst = xgboost(data=as.matrix(mtcars[,-11]),label=mtcars[,11],
|
bst <- xgboost(data = as.matrix(mtcars[, -11]), label = mtcars[, 11],
|
||||||
objective='count:poisson',nrounds=5)
|
objective = 'count:poisson', nrounds = 5)
|
||||||
pred = predict(bst,as.matrix(mtcars[,-11]))
|
pred <- predict(bst, as.matrix(mtcars[, -11]))
|
||||||
sqrt(mean((pred-mtcars[,11])^2))
|
sqrt(mean((pred - mtcars[, 11]) ^ 2))
|
||||||
|
|
||||||
|
|||||||
@@ -1,23 +1,23 @@
|
|||||||
require(xgboost)
|
require(xgboost)
|
||||||
# load in the agaricus dataset
|
# load in the agaricus dataset
|
||||||
data(agaricus.train, package='xgboost')
|
data(agaricus.train, package = 'xgboost')
|
||||||
data(agaricus.test, package='xgboost')
|
data(agaricus.test, package = 'xgboost')
|
||||||
dtrain <- xgb.DMatrix(agaricus.train$data, label = agaricus.train$label)
|
dtrain <- xgb.DMatrix(agaricus.train$data, label = agaricus.train$label)
|
||||||
dtest <- xgb.DMatrix(agaricus.test$data, label = agaricus.test$label)
|
dtest <- xgb.DMatrix(agaricus.test$data, label = agaricus.test$label)
|
||||||
|
|
||||||
param <- list(max_depth=2, eta=1, silent=1, objective='binary:logistic')
|
param <- list(max_depth = 2, eta = 1, objective = 'binary:logistic')
|
||||||
watchlist <- list(eval = dtest, train = dtrain)
|
watchlist <- list(eval = dtest, train = dtrain)
|
||||||
nround = 2
|
nrounds <- 2
|
||||||
|
|
||||||
# training the model for two rounds
|
# training the model for two rounds
|
||||||
bst = xgb.train(param, dtrain, nround, nthread = 2, watchlist)
|
bst <- xgb.train(param, dtrain, nrounds, nthread = 2, watchlist)
|
||||||
cat('start testing prediction from first n trees\n')
|
cat('start testing prediction from first n trees\n')
|
||||||
labels <- getinfo(dtest,'label')
|
labels <- getinfo(dtest, 'label')
|
||||||
|
|
||||||
### predict using first 1 tree
|
### predict using first 1 tree
|
||||||
ypred1 = predict(bst, dtest, ntreelimit=1)
|
ypred1 <- predict(bst, dtest, ntreelimit = 1)
|
||||||
# by default, we predict using all the trees
|
# by default, we predict using all the trees
|
||||||
ypred2 = predict(bst, dtest)
|
ypred2 <- predict(bst, dtest)
|
||||||
|
|
||||||
cat('error of ypred1=', mean(as.numeric(ypred1>0.5)!=labels),'\n')
|
cat('error of ypred1=', mean(as.numeric(ypred1 > 0.5) != labels), '\n')
|
||||||
cat('error of ypred2=', mean(as.numeric(ypred2>0.5)!=labels),'\n')
|
cat('error of ypred2=', mean(as.numeric(ypred2 > 0.5) != labels), '\n')
|
||||||
|
|||||||
@@ -5,48 +5,51 @@ require(Matrix)
|
|||||||
set.seed(1982)
|
set.seed(1982)
|
||||||
|
|
||||||
# load in the agaricus dataset
|
# load in the agaricus dataset
|
||||||
data(agaricus.train, package='xgboost')
|
data(agaricus.train, package = 'xgboost')
|
||||||
data(agaricus.test, package='xgboost')
|
data(agaricus.test, package = 'xgboost')
|
||||||
dtrain <- xgb.DMatrix(data = agaricus.train$data, label = agaricus.train$label)
|
dtrain <- xgb.DMatrix(data = agaricus.train$data, label = agaricus.train$label)
|
||||||
dtest <- xgb.DMatrix(data = agaricus.test$data, label = agaricus.test$label)
|
dtest <- xgb.DMatrix(data = agaricus.test$data, label = agaricus.test$label)
|
||||||
|
|
||||||
param <- list(max_depth=2, eta=1, silent=1, objective='binary:logistic')
|
param <- list(max_depth = 2, eta = 1, objective = 'binary:logistic')
|
||||||
nround = 4
|
nrounds <- 4
|
||||||
|
|
||||||
# training the model for two rounds
|
# training the model for two rounds
|
||||||
bst = xgb.train(params = param, data = dtrain, nrounds = nround, nthread = 2)
|
bst <- xgb.train(params = param, data = dtrain, nrounds = nrounds, nthread = 2)
|
||||||
|
|
||||||
# Model accuracy without new features
|
# Model accuracy without new features
|
||||||
accuracy.before <- sum((predict(bst, agaricus.test$data) >= 0.5) == agaricus.test$label) / length(agaricus.test$label)
|
accuracy.before <- (sum((predict(bst, agaricus.test$data) >= 0.5) == agaricus.test$label)
|
||||||
|
/ length(agaricus.test$label))
|
||||||
|
|
||||||
# by default, we predict using all the trees
|
# by default, we predict using all the trees
|
||||||
|
pred_with_leaf <- predict(bst, dtest, predleaf = TRUE)
|
||||||
pred_with_leaf = predict(bst, dtest, predleaf = TRUE)
|
|
||||||
head(pred_with_leaf)
|
head(pred_with_leaf)
|
||||||
|
|
||||||
create.new.tree.features <- function(model, original.features){
|
create.new.tree.features <- function(model, original.features){
|
||||||
pred_with_leaf <- predict(model, original.features, predleaf = TRUE)
|
pred_with_leaf <- predict(model, original.features, predleaf = TRUE)
|
||||||
cols <- list()
|
cols <- list()
|
||||||
for(i in 1:model$niter){
|
for (i in 1:model$niter) {
|
||||||
# max is not the real max but it s not important for the purpose of adding features
|
# max is not the real max but it s not important for the purpose of adding features
|
||||||
leaf.id <- sort(unique(pred_with_leaf[,i]))
|
leaf.id <- sort(unique(pred_with_leaf[, i]))
|
||||||
cols[[i]] <- factor(x = pred_with_leaf[,i], level = leaf.id)
|
cols[[i]] <- factor(x = pred_with_leaf[, i], level = leaf.id)
|
||||||
}
|
}
|
||||||
cbind(original.features, sparse.model.matrix( ~ . -1, as.data.frame(cols)))
|
cbind(original.features, sparse.model.matrix(~ . - 1, as.data.frame(cols)))
|
||||||
}
|
}
|
||||||
|
|
||||||
# Convert previous features to one hot encoding
|
# Convert previous features to one hot encoding
|
||||||
new.features.train <- create.new.tree.features(bst, agaricus.train$data)
|
new.features.train <- create.new.tree.features(bst, agaricus.train$data)
|
||||||
new.features.test <- create.new.tree.features(bst, agaricus.test$data)
|
new.features.test <- create.new.tree.features(bst, agaricus.test$data)
|
||||||
|
colnames(new.features.test) <- colnames(new.features.train)
|
||||||
|
|
||||||
# learning with new features
|
# learning with new features
|
||||||
new.dtrain <- xgb.DMatrix(data = new.features.train, label = agaricus.train$label)
|
new.dtrain <- xgb.DMatrix(data = new.features.train, label = agaricus.train$label)
|
||||||
new.dtest <- xgb.DMatrix(data = new.features.test, label = agaricus.test$label)
|
new.dtest <- xgb.DMatrix(data = new.features.test, label = agaricus.test$label)
|
||||||
watchlist <- list(train = new.dtrain)
|
watchlist <- list(train = new.dtrain)
|
||||||
bst <- xgb.train(params = param, data = new.dtrain, nrounds = nround, nthread = 2)
|
bst <- xgb.train(params = param, data = new.dtrain, nrounds = nrounds, nthread = 2)
|
||||||
|
|
||||||
# Model accuracy with new features
|
# Model accuracy with new features
|
||||||
accuracy.after <- sum((predict(bst, new.dtest) >= 0.5) == agaricus.test$label) / length(agaricus.test$label)
|
accuracy.after <- (sum((predict(bst, new.dtest) >= 0.5) == agaricus.test$label)
|
||||||
|
/ length(agaricus.test$label))
|
||||||
|
|
||||||
# Here the accuracy was already good and is now perfect.
|
# Here the accuracy was already good and is now perfect.
|
||||||
cat(paste("The accuracy was", accuracy.before, "before adding leaf features and it is now", accuracy.after, "!\n"))
|
cat(paste("The accuracy was", accuracy.before, "before adding leaf features and it is now",
|
||||||
|
accuracy.after, "!\n"))
|
||||||
|
|||||||
@@ -1,14 +1,14 @@
|
|||||||
# running all scripts in demo folder
|
# running all scripts in demo folder
|
||||||
demo(basic_walkthrough)
|
demo(basic_walkthrough, package = 'xgboost')
|
||||||
demo(custom_objective)
|
demo(custom_objective, package = 'xgboost')
|
||||||
demo(boost_from_prediction)
|
demo(boost_from_prediction, package = 'xgboost')
|
||||||
demo(predict_first_ntree)
|
demo(predict_first_ntree, package = 'xgboost')
|
||||||
demo(generalized_linear_model)
|
demo(generalized_linear_model, package = 'xgboost')
|
||||||
demo(cross_validation)
|
demo(cross_validation, package = 'xgboost')
|
||||||
demo(create_sparse_matrix)
|
demo(create_sparse_matrix, package = 'xgboost')
|
||||||
demo(predict_leaf_indices)
|
demo(predict_leaf_indices, package = 'xgboost')
|
||||||
demo(early_stopping)
|
demo(early_stopping, package = 'xgboost')
|
||||||
demo(poisson_regression)
|
demo(poisson_regression, package = 'xgboost')
|
||||||
demo(caret_wrapper)
|
demo(caret_wrapper, package = 'xgboost')
|
||||||
demo(tweedie_regression)
|
demo(tweedie_regression, package = 'xgboost')
|
||||||
#demo(gpu_accelerated) # can only run when built with GPU support
|
#demo(gpu_accelerated, package = 'xgboost') # can only run when built with GPU support
|
||||||
|
|||||||
6
R-package/demo/tweedie_regression.R
Executable file → Normal file
6
R-package/demo/tweedie_regression.R
Executable file → Normal file
@@ -8,12 +8,12 @@ data(AutoClaim)
|
|||||||
dt <- data.table(AutoClaim)
|
dt <- data.table(AutoClaim)
|
||||||
|
|
||||||
# exclude these columns from the model matrix
|
# exclude these columns from the model matrix
|
||||||
exclude <- c('POLICYNO', 'PLCYDATE', 'CLM_FREQ5', 'CLM_AMT5', 'CLM_FLAG', 'IN_YY')
|
exclude <- c('POLICYNO', 'PLCYDATE', 'CLM_FREQ5', 'CLM_AMT5', 'CLM_FLAG', 'IN_YY')
|
||||||
|
|
||||||
# retains the missing values
|
# retains the missing values
|
||||||
# NOTE: this dataset is comes ready out of the box
|
# NOTE: this dataset is comes ready out of the box
|
||||||
options(na.action = 'na.pass')
|
options(na.action = 'na.pass')
|
||||||
x <- sparse.model.matrix(~ . - 1, data = dt[, -exclude, with = F])
|
x <- sparse.model.matrix(~ . - 1, data = dt[, -exclude, with = FALSE])
|
||||||
options(na.action = 'na.omit')
|
options(na.action = 'na.omit')
|
||||||
|
|
||||||
# response
|
# response
|
||||||
@@ -46,4 +46,4 @@ var_imp <- xgb.importance(attr(x, 'Dimnames')[[2]], model = bst)
|
|||||||
|
|
||||||
preds <- predict(bst, d_train)
|
preds <- predict(bst, d_train)
|
||||||
|
|
||||||
rmse <- sqrt(sum(mean((y - preds)^2)))
|
rmse <- sqrt(sum(mean((y - preds) ^ 2)))
|
||||||
|
|||||||
96
R-package/inst/make-r-def.R
Normal file
96
R-package/inst/make-r-def.R
Normal file
@@ -0,0 +1,96 @@
|
|||||||
|
# [description]
|
||||||
|
# Create a definition file (.def) from a .dll file, using objdump. This
|
||||||
|
# is used by FindLibR.cmake when building the R package with MSVC.
|
||||||
|
#
|
||||||
|
# [usage]
|
||||||
|
#
|
||||||
|
# Rscript make-r-def.R something.dll something.def
|
||||||
|
#
|
||||||
|
# [references]
|
||||||
|
# * https://www.cs.colorado.edu/~main/cs1300/doc/mingwfaq.html
|
||||||
|
|
||||||
|
args <- commandArgs(trailingOnly = TRUE)
|
||||||
|
|
||||||
|
IN_DLL_FILE <- args[[1L]]
|
||||||
|
OUT_DEF_FILE <- args[[2L]]
|
||||||
|
DLL_BASE_NAME <- basename(IN_DLL_FILE)
|
||||||
|
|
||||||
|
message(sprintf("Creating '%s' from '%s'", OUT_DEF_FILE, IN_DLL_FILE))
|
||||||
|
|
||||||
|
# system() will not raise an R exception if the process called
|
||||||
|
# fails. Wrapping it here to get that behavior.
|
||||||
|
#
|
||||||
|
# system() introduces a lot of overhead, at least on Windows,
|
||||||
|
# so trying processx if it is available
|
||||||
|
.pipe_shell_command_to_stdout <- function(command, args, out_file) {
|
||||||
|
has_processx <- suppressMessages({
|
||||||
|
suppressWarnings({
|
||||||
|
require("processx") # nolint
|
||||||
|
})
|
||||||
|
})
|
||||||
|
if (has_processx) {
|
||||||
|
p <- processx::process$new(
|
||||||
|
command = command
|
||||||
|
, args = args
|
||||||
|
, stdout = out_file
|
||||||
|
, windows_verbatim_args = FALSE
|
||||||
|
)
|
||||||
|
invisible(p$wait())
|
||||||
|
} else {
|
||||||
|
message(paste0(
|
||||||
|
"Using system2() to run shell commands. Installing "
|
||||||
|
, "'processx' with install.packages('processx') might "
|
||||||
|
, "make this faster."
|
||||||
|
))
|
||||||
|
exit_code <- system2(
|
||||||
|
command = command
|
||||||
|
, args = shQuote(args)
|
||||||
|
, stdout = out_file
|
||||||
|
)
|
||||||
|
if (exit_code != 0L) {
|
||||||
|
stop(paste0("Command failed with exit code: ", exit_code))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return(invisible(NULL))
|
||||||
|
}
|
||||||
|
|
||||||
|
# use objdump to dump all the symbols
|
||||||
|
OBJDUMP_FILE <- "objdump-out.txt"
|
||||||
|
.pipe_shell_command_to_stdout(
|
||||||
|
command = "objdump"
|
||||||
|
, args = c("-p", IN_DLL_FILE)
|
||||||
|
, out_file = OBJDUMP_FILE
|
||||||
|
)
|
||||||
|
|
||||||
|
objdump_results <- readLines(OBJDUMP_FILE)
|
||||||
|
result <- file.remove(OBJDUMP_FILE)
|
||||||
|
|
||||||
|
# Only one table in the objdump results matters for our purposes,
|
||||||
|
# see https://www.cs.colorado.edu/~main/cs1300/doc/mingwfaq.html
|
||||||
|
start_index <- which(
|
||||||
|
grepl(
|
||||||
|
pattern = "[Ordinal/Name Pointer] Table"
|
||||||
|
, x = objdump_results
|
||||||
|
, fixed = TRUE
|
||||||
|
)
|
||||||
|
)
|
||||||
|
empty_lines <- which(objdump_results == "")
|
||||||
|
end_of_table <- empty_lines[empty_lines > start_index][1L]
|
||||||
|
|
||||||
|
# Read the contents of the table
|
||||||
|
exported_symbols <- objdump_results[(start_index + 1L):end_of_table]
|
||||||
|
exported_symbols <- gsub("\t", "", exported_symbols)
|
||||||
|
exported_symbols <- gsub(".*\\] ", "", exported_symbols)
|
||||||
|
exported_symbols <- gsub(" ", "", exported_symbols)
|
||||||
|
|
||||||
|
# Write R.def file
|
||||||
|
writeLines(
|
||||||
|
text = c(
|
||||||
|
paste0("LIBRARY \"", DLL_BASE_NAME, "\"")
|
||||||
|
, "EXPORTS"
|
||||||
|
, exported_symbols
|
||||||
|
)
|
||||||
|
, con = OUT_DEF_FILE
|
||||||
|
, sep = "\n"
|
||||||
|
)
|
||||||
|
message(sprintf("Successfully created '%s'", OUT_DEF_FILE))
|
||||||
62
R-package/man/a-compatibility-note-for-saveRDS-save.Rd
Normal file
62
R-package/man/a-compatibility-note-for-saveRDS-save.Rd
Normal file
@@ -0,0 +1,62 @@
|
|||||||
|
% Generated by roxygen2: do not edit by hand
|
||||||
|
% Please edit documentation in R/utils.R
|
||||||
|
\name{a-compatibility-note-for-saveRDS-save}
|
||||||
|
\alias{a-compatibility-note-for-saveRDS-save}
|
||||||
|
\title{Do not use \code{\link[base]{saveRDS}} or \code{\link[base]{save}} for long-term archival of
|
||||||
|
models. Instead, use \code{\link{xgb.save}} or \code{\link{xgb.save.raw}}.}
|
||||||
|
\description{
|
||||||
|
It is a common practice to use the built-in \code{\link[base]{saveRDS}} function (or
|
||||||
|
\code{\link[base]{save}}) to persist R objects to the disk. While it is possible to persist
|
||||||
|
\code{xgb.Booster} objects using \code{\link[base]{saveRDS}}, it is not advisable to do so if
|
||||||
|
the model is to be accessed in the future. If you train a model with the current version of
|
||||||
|
XGBoost and persist it with \code{\link[base]{saveRDS}}, the model is not guaranteed to be
|
||||||
|
accessible in later releases of XGBoost. To ensure that your model can be accessed in future
|
||||||
|
releases of XGBoost, use \code{\link{xgb.save}} or \code{\link{xgb.save.raw}} instead.
|
||||||
|
}
|
||||||
|
\details{
|
||||||
|
Use \code{\link{xgb.save}} to save the XGBoost model as a stand-alone file. You may opt into
|
||||||
|
the JSON format by specifying the JSON extension. To read the model back, use
|
||||||
|
\code{\link{xgb.load}}.
|
||||||
|
|
||||||
|
Use \code{\link{xgb.save.raw}} to save the XGBoost model as a sequence (vector) of raw bytes
|
||||||
|
in a future-proof manner. Future releases of XGBoost will be able to read the raw bytes and
|
||||||
|
re-construct the corresponding model. To read the model back, use \code{\link{xgb.load.raw}}.
|
||||||
|
The \code{\link{xgb.save.raw}} function is useful if you'd like to persist the XGBoost model
|
||||||
|
as part of another R object.
|
||||||
|
|
||||||
|
Note: Do not use \code{\link{xgb.serialize}} to store models long-term. It persists not only the
|
||||||
|
model but also internal configurations and parameters, and its format is not stable across
|
||||||
|
multiple XGBoost versions. Use \code{\link{xgb.serialize}} only for checkpointing.
|
||||||
|
|
||||||
|
For more details and explanation about model persistence and archival, consult the page
|
||||||
|
\url{https://xgboost.readthedocs.io/en/latest/tutorials/saving_model.html}.
|
||||||
|
}
|
||||||
|
\examples{
|
||||||
|
data(agaricus.train, package='xgboost')
|
||||||
|
bst <- xgboost(data = agaricus.train$data, label = agaricus.train$label, max_depth = 2,
|
||||||
|
eta = 1, nthread = 2, nrounds = 2, objective = "binary:logistic")
|
||||||
|
|
||||||
|
# Save as a stand-alone file; load it with xgb.load()
|
||||||
|
xgb.save(bst, 'xgb.model')
|
||||||
|
bst2 <- xgb.load('xgb.model')
|
||||||
|
|
||||||
|
# Save as a stand-alone file (JSON); load it with xgb.load()
|
||||||
|
xgb.save(bst, 'xgb.model.json')
|
||||||
|
bst2 <- xgb.load('xgb.model.json')
|
||||||
|
|
||||||
|
# Save as a raw byte vector; load it with xgb.load.raw()
|
||||||
|
xgb_bytes <- xgb.save.raw(bst)
|
||||||
|
bst2 <- xgb.load.raw(xgb_bytes)
|
||||||
|
|
||||||
|
# Persist XGBoost model as part of another R object
|
||||||
|
obj <- list(xgb_model_bytes = xgb.save.raw(bst), description = "My first XGBoost model")
|
||||||
|
# Persist the R object. Here, saveRDS() is okay, since it doesn't persist
|
||||||
|
# xgb.Booster directly. What's being persisted is the future-proof byte representation
|
||||||
|
# as given by xgb.save.raw().
|
||||||
|
saveRDS(obj, 'my_object.rds')
|
||||||
|
# Read back the R object
|
||||||
|
obj2 <- readRDS('my_object.rds')
|
||||||
|
# Re-construct xgb.Booster object from the bytes
|
||||||
|
bst2 <- xgb.load.raw(obj2$xgb_model_bytes)
|
||||||
|
|
||||||
|
}
|
||||||
@@ -4,8 +4,10 @@
|
|||||||
\name{agaricus.test}
|
\name{agaricus.test}
|
||||||
\alias{agaricus.test}
|
\alias{agaricus.test}
|
||||||
\title{Test part from Mushroom Data Set}
|
\title{Test part from Mushroom Data Set}
|
||||||
\format{A list containing a label vector, and a dgCMatrix object with 1611
|
\format{
|
||||||
rows and 126 variables}
|
A list containing a label vector, and a dgCMatrix object with 1611
|
||||||
|
rows and 126 variables
|
||||||
|
}
|
||||||
\usage{
|
\usage{
|
||||||
data(agaricus.test)
|
data(agaricus.test)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -4,8 +4,10 @@
|
|||||||
\name{agaricus.train}
|
\name{agaricus.train}
|
||||||
\alias{agaricus.train}
|
\alias{agaricus.train}
|
||||||
\title{Training part from Mushroom Data Set}
|
\title{Training part from Mushroom Data Set}
|
||||||
\format{A list containing a label vector, and a dgCMatrix object with 6513
|
\format{
|
||||||
rows and 127 variables}
|
A list containing a label vector, and a dgCMatrix object with 6513
|
||||||
|
rows and 127 variables
|
||||||
|
}
|
||||||
\usage{
|
\usage{
|
||||||
data(agaricus.train)
|
data(agaricus.train)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -18,7 +18,7 @@ the boosting is completed.
|
|||||||
WARNING: side-effects!!! Be aware that these callback functions access and modify things in
|
WARNING: side-effects!!! Be aware that these callback functions access and modify things in
|
||||||
the environment from which they are called from, which is a fairly uncommon thing to do in R.
|
the environment from which they are called from, which is a fairly uncommon thing to do in R.
|
||||||
|
|
||||||
To write a custom callback closure, make sure you first understand the main concepts about R envoronments.
|
To write a custom callback closure, make sure you first understand the main concepts about R environments.
|
||||||
Check either R documentation on \code{\link[base]{environment}} or the
|
Check either R documentation on \code{\link[base]{environment}} or the
|
||||||
\href{http://adv-r.had.co.nz/Environments.html}{Environments chapter} from the "Advanced R"
|
\href{http://adv-r.had.co.nz/Environments.html}{Environments chapter} from the "Advanced R"
|
||||||
book by Hadley Wickham. Further, the best option is to read the code of some of the existing callbacks -
|
book by Hadley Wickham. Further, the best option is to read the code of some of the existing callbacks -
|
||||||
|
|||||||
@@ -15,7 +15,7 @@ depending on the number of prediction outputs per data row. The order of predict
|
|||||||
to the order of rows in the original dataset. Note that when a custom \code{folds} list is
|
to the order of rows in the original dataset. Note that when a custom \code{folds} list is
|
||||||
provided in \code{xgb.cv}, the predictions would only be returned properly when this list is a
|
provided in \code{xgb.cv}, the predictions would only be returned properly when this list is a
|
||||||
non-overlapping list of k sets of indices, as in a standard k-fold CV. The predictions would not be
|
non-overlapping list of k sets of indices, as in a standard k-fold CV. The predictions would not be
|
||||||
meaningful when user-profided folds have overlapping indices as in, e.g., random sampling splits.
|
meaningful when user-provided folds have overlapping indices as in, e.g., random sampling splits.
|
||||||
When some of the indices in the training dataset are not included into user-provided \code{folds},
|
When some of the indices in the training dataset are not included into user-provided \code{folds},
|
||||||
their prediction value would be \code{NA}.
|
their prediction value would be \code{NA}.
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -4,8 +4,12 @@
|
|||||||
\alias{cb.early.stop}
|
\alias{cb.early.stop}
|
||||||
\title{Callback closure to activate the early stopping.}
|
\title{Callback closure to activate the early stopping.}
|
||||||
\usage{
|
\usage{
|
||||||
cb.early.stop(stopping_rounds, maximize = FALSE, metric_name = NULL,
|
cb.early.stop(
|
||||||
verbose = TRUE)
|
stopping_rounds,
|
||||||
|
maximize = FALSE,
|
||||||
|
metric_name = NULL,
|
||||||
|
verbose = TRUE
|
||||||
|
)
|
||||||
}
|
}
|
||||||
\arguments{
|
\arguments{
|
||||||
\item{stopping_rounds}{The number of rounds with no improvement in
|
\item{stopping_rounds}{The number of rounds with no improvement in
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
% Please edit documentation in R/callbacks.R
|
% Please edit documentation in R/callbacks.R
|
||||||
\name{cb.reset.parameters}
|
\name{cb.reset.parameters}
|
||||||
\alias{cb.reset.parameters}
|
\alias{cb.reset.parameters}
|
||||||
\title{Callback closure for restetting the booster's parameters at each iteration.}
|
\title{Callback closure for resetting the booster's parameters at each iteration.}
|
||||||
\usage{
|
\usage{
|
||||||
cb.reset.parameters(new_params)
|
cb.reset.parameters(new_params)
|
||||||
}
|
}
|
||||||
@@ -15,14 +15,14 @@ which returns a new parameter value by using the current iteration number
|
|||||||
and the total number of boosting rounds.}
|
and the total number of boosting rounds.}
|
||||||
}
|
}
|
||||||
\description{
|
\description{
|
||||||
Callback closure for restetting the booster's parameters at each iteration.
|
Callback closure for resetting the booster's parameters at each iteration.
|
||||||
}
|
}
|
||||||
\details{
|
\details{
|
||||||
This is a "pre-iteration" callback function used to reset booster's parameters
|
This is a "pre-iteration" callback function used to reset booster's parameters
|
||||||
at the beginning of each iteration.
|
at the beginning of each iteration.
|
||||||
|
|
||||||
Note that when training is resumed from some previous model, and a function is used to
|
Note that when training is resumed from some previous model, and a function is used to
|
||||||
reset a parameter value, the \code{nround} argument in this function would be the
|
reset a parameter value, the \code{nrounds} argument in this function would be the
|
||||||
the number of boosting rounds in the current training.
|
the number of boosting rounds in the current training.
|
||||||
|
|
||||||
Callback function expects the following values to be set in its calling frame:
|
Callback function expects the following values to be set in its calling frame:
|
||||||
|
|||||||
@@ -17,7 +17,7 @@ and the second one is column names}
|
|||||||
}
|
}
|
||||||
\description{
|
\description{
|
||||||
Only column names are supported for \code{xgb.DMatrix}, thus setting of
|
Only column names are supported for \code{xgb.DMatrix}, thus setting of
|
||||||
row names would have no effect and returnten row names would be NULL.
|
row names would have no effect and returned row names would be NULL.
|
||||||
}
|
}
|
||||||
\details{
|
\details{
|
||||||
Generic \code{dimnames} methods are used by \code{colnames}.
|
Generic \code{dimnames} methods are used by \code{colnames}.
|
||||||
|
|||||||
@@ -5,9 +5,20 @@
|
|||||||
\alias{predict.xgb.Booster.handle}
|
\alias{predict.xgb.Booster.handle}
|
||||||
\title{Predict method for eXtreme Gradient Boosting model}
|
\title{Predict method for eXtreme Gradient Boosting model}
|
||||||
\usage{
|
\usage{
|
||||||
\method{predict}{xgb.Booster}(object, newdata, missing = NA,
|
\method{predict}{xgb.Booster}(
|
||||||
outputmargin = FALSE, ntreelimit = NULL, predleaf = FALSE,
|
object,
|
||||||
predcontrib = FALSE, approxcontrib = FALSE, reshape = FALSE, ...)
|
newdata,
|
||||||
|
missing = NA,
|
||||||
|
outputmargin = FALSE,
|
||||||
|
ntreelimit = NULL,
|
||||||
|
predleaf = FALSE,
|
||||||
|
predcontrib = FALSE,
|
||||||
|
approxcontrib = FALSE,
|
||||||
|
predinteraction = FALSE,
|
||||||
|
reshape = FALSE,
|
||||||
|
training = FALSE,
|
||||||
|
...
|
||||||
|
)
|
||||||
|
|
||||||
\method{predict}{xgb.Booster.handle}(object, ...)
|
\method{predict}{xgb.Booster.handle}(object, ...)
|
||||||
}
|
}
|
||||||
@@ -26,14 +37,20 @@ logistic regression would result in predictions for log-odds instead of probabil
|
|||||||
\item{ntreelimit}{limit the number of model's trees or boosting iterations used in prediction (see Details).
|
\item{ntreelimit}{limit the number of model's trees or boosting iterations used in prediction (see Details).
|
||||||
It will use all the trees by default (\code{NULL} value).}
|
It will use all the trees by default (\code{NULL} value).}
|
||||||
|
|
||||||
\item{predleaf}{whether predict leaf index instead.}
|
\item{predleaf}{whether predict leaf index.}
|
||||||
|
|
||||||
\item{predcontrib}{whether to return feature contributions to individual predictions instead (see Details).}
|
\item{predcontrib}{whether to return feature contributions to individual predictions (see Details).}
|
||||||
|
|
||||||
\item{approxcontrib}{whether to use a fast approximation for feature contributions (see Details).}
|
\item{approxcontrib}{whether to use a fast approximation for feature contributions (see Details).}
|
||||||
|
|
||||||
|
\item{predinteraction}{whether to return contributions of feature interactions to individual predictions (see Details).}
|
||||||
|
|
||||||
\item{reshape}{whether to reshape the vector of predictions to a matrix form when there are several
|
\item{reshape}{whether to reshape the vector of predictions to a matrix form when there are several
|
||||||
prediction outputs per case. This option has no effect when \code{predleaf = TRUE}.}
|
prediction outputs per case. This option has no effect when either of predleaf, predcontrib,
|
||||||
|
or predinteraction flags is TRUE.}
|
||||||
|
|
||||||
|
\item{training}{whether is the prediction result used for training. For dart booster,
|
||||||
|
training predicting will perform dropout.}
|
||||||
|
|
||||||
\item{...}{Parameters passed to \code{predict.xgb.Booster}}
|
\item{...}{Parameters passed to \code{predict.xgb.Booster}}
|
||||||
}
|
}
|
||||||
@@ -51,6 +68,14 @@ When \code{predcontrib = TRUE} and it is not a multiclass setting, the output is
|
|||||||
For a multiclass case, a list of \code{num_class} elements is returned, where each element is
|
For a multiclass case, a list of \code{num_class} elements is returned, where each element is
|
||||||
such a matrix. The contribution values are on the scale of untransformed margin
|
such a matrix. The contribution values are on the scale of untransformed margin
|
||||||
(e.g., for binary classification would mean that the contributions are log-odds deviations from bias).
|
(e.g., for binary classification would mean that the contributions are log-odds deviations from bias).
|
||||||
|
|
||||||
|
When \code{predinteraction = TRUE} and it is not a multiclass setting, the output is a 3d array with
|
||||||
|
dimensions \code{c(nrow, num_features + 1, num_features + 1)}. The off-diagonal (in the last two dimensions)
|
||||||
|
elements represent different features interaction contributions. The array is symmetric WRT the last
|
||||||
|
two dimensions. The "+ 1" columns corresponds to bias. Summing this array along the last dimension should
|
||||||
|
produce practically the same result as predict with \code{predcontrib = TRUE}.
|
||||||
|
For a multiclass case, a list of \code{num_class} elements is returned, where each element is
|
||||||
|
such an array.
|
||||||
}
|
}
|
||||||
\description{
|
\description{
|
||||||
Predicted values based on either xgboost model or model handle object.
|
Predicted values based on either xgboost model or model handle object.
|
||||||
@@ -76,6 +101,11 @@ values (Lundberg 2017) that sum to the difference between the expected output
|
|||||||
of the model and the current prediction (where the hessian weights are used to compute the expectations).
|
of the model and the current prediction (where the hessian weights are used to compute the expectations).
|
||||||
Setting \code{approxcontrib = TRUE} approximates these values following the idea explained
|
Setting \code{approxcontrib = TRUE} approximates these values following the idea explained
|
||||||
in \url{http://blog.datadive.net/interpreting-random-forests/}.
|
in \url{http://blog.datadive.net/interpreting-random-forests/}.
|
||||||
|
|
||||||
|
With \code{predinteraction = TRUE}, SHAP values of contributions of interaction of each pair of features
|
||||||
|
are computed. Note that this operation might be rather expensive in terms of compute and memory.
|
||||||
|
Since it quadratically depends on the number of features, it is recommended to perform selection
|
||||||
|
of the most important features first. See below about the format of the returned results.
|
||||||
}
|
}
|
||||||
\examples{
|
\examples{
|
||||||
## binary classification:
|
## binary classification:
|
||||||
|
|||||||
@@ -5,7 +5,7 @@
|
|||||||
\alias{slice.xgb.DMatrix}
|
\alias{slice.xgb.DMatrix}
|
||||||
\alias{[.xgb.DMatrix}
|
\alias{[.xgb.DMatrix}
|
||||||
\title{Get a new DMatrix containing the specified rows of
|
\title{Get a new DMatrix containing the specified rows of
|
||||||
orginal xgb.DMatrix object}
|
original xgb.DMatrix object}
|
||||||
\usage{
|
\usage{
|
||||||
slice(object, ...)
|
slice(object, ...)
|
||||||
|
|
||||||
@@ -24,7 +24,7 @@ slice(object, ...)
|
|||||||
}
|
}
|
||||||
\description{
|
\description{
|
||||||
Get a new DMatrix containing the specified rows of
|
Get a new DMatrix containing the specified rows of
|
||||||
orginal xgb.DMatrix object
|
original xgb.DMatrix object
|
||||||
}
|
}
|
||||||
\examples{
|
\examples{
|
||||||
data(agaricus.train, package='xgboost')
|
data(agaricus.train, package='xgboost')
|
||||||
|
|||||||
@@ -28,7 +28,7 @@ E.g., when an \code{xgb.Booster} model is saved as an R object and then is loade
|
|||||||
its handle (pointer) to an internal xgboost model would be invalid. The majority of xgboost methods
|
its handle (pointer) to an internal xgboost model would be invalid. The majority of xgboost methods
|
||||||
should still work for such a model object since those methods would be using
|
should still work for such a model object since those methods would be using
|
||||||
\code{xgb.Booster.complete} internally. However, one might find it to be more efficient to call the
|
\code{xgb.Booster.complete} internally. However, one might find it to be more efficient to call the
|
||||||
\code{xgb.Booster.complete} function explicitely once after loading a model as an R-object.
|
\code{xgb.Booster.complete} function explicitly once after loading a model as an R-object.
|
||||||
That would prevent further repeated implicit reconstruction of an internal booster model.
|
That would prevent further repeated implicit reconstruction of an internal booster model.
|
||||||
}
|
}
|
||||||
\examples{
|
\examples{
|
||||||
@@ -38,7 +38,10 @@ bst <- xgboost(data = agaricus.train$data, label = agaricus.train$label, max_dep
|
|||||||
eta = 1, nthread = 2, nrounds = 2, objective = "binary:logistic")
|
eta = 1, nthread = 2, nrounds = 2, objective = "binary:logistic")
|
||||||
saveRDS(bst, "xgb.model.rds")
|
saveRDS(bst, "xgb.model.rds")
|
||||||
|
|
||||||
|
# Warning: The resulting RDS file is only compatible with the current XGBoost version.
|
||||||
|
# Refer to the section titled "a-compatibility-note-for-saveRDS-save".
|
||||||
bst1 <- readRDS("xgb.model.rds")
|
bst1 <- readRDS("xgb.model.rds")
|
||||||
|
if (file.exists("xgb.model.rds")) file.remove("xgb.model.rds")
|
||||||
# the handle is invalid:
|
# the handle is invalid:
|
||||||
print(bst1$handle)
|
print(bst1$handle)
|
||||||
|
|
||||||
|
|||||||
@@ -31,4 +31,5 @@ train <- agaricus.train
|
|||||||
dtrain <- xgb.DMatrix(train$data, label=train$label)
|
dtrain <- xgb.DMatrix(train$data, label=train$label)
|
||||||
xgb.DMatrix.save(dtrain, 'xgb.DMatrix.data')
|
xgb.DMatrix.save(dtrain, 'xgb.DMatrix.data')
|
||||||
dtrain <- xgb.DMatrix('xgb.DMatrix.data')
|
dtrain <- xgb.DMatrix('xgb.DMatrix.data')
|
||||||
|
if (file.exists('xgb.DMatrix.data')) file.remove('xgb.DMatrix.data')
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -20,4 +20,5 @@ train <- agaricus.train
|
|||||||
dtrain <- xgb.DMatrix(train$data, label=train$label)
|
dtrain <- xgb.DMatrix(train$data, label=train$label)
|
||||||
xgb.DMatrix.save(dtrain, 'xgb.DMatrix.data')
|
xgb.DMatrix.save(dtrain, 'xgb.DMatrix.data')
|
||||||
dtrain <- xgb.DMatrix('xgb.DMatrix.data')
|
dtrain <- xgb.DMatrix('xgb.DMatrix.data')
|
||||||
|
if (file.exists('xgb.DMatrix.data')) file.remove('xgb.DMatrix.data')
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -55,7 +55,7 @@ than for \code{xgb.Booster}, since only just a handle (pointer) would need to be
|
|||||||
That would only matter if attributes need to be set many times.
|
That would only matter if attributes need to be set many times.
|
||||||
Note, however, that when feeding a handle of an \code{xgb.Booster} object to the attribute setters,
|
Note, however, that when feeding a handle of an \code{xgb.Booster} object to the attribute setters,
|
||||||
the raw model cache of an \code{xgb.Booster} object would not be automatically updated,
|
the raw model cache of an \code{xgb.Booster} object would not be automatically updated,
|
||||||
and it would be user's responsibility to call \code{xgb.save.raw} to update it.
|
and it would be user's responsibility to call \code{xgb.serialize} to update it.
|
||||||
|
|
||||||
The \code{xgb.attributes<-} setter either updates the existing or adds one or several attributes,
|
The \code{xgb.attributes<-} setter either updates the existing or adds one or several attributes,
|
||||||
but it doesn't delete the other existing attributes.
|
but it doesn't delete the other existing attributes.
|
||||||
@@ -73,6 +73,7 @@ xgb.attributes(bst) <- list(a = 123, b = "abc")
|
|||||||
|
|
||||||
xgb.save(bst, 'xgb.model')
|
xgb.save(bst, 'xgb.model')
|
||||||
bst1 <- xgb.load('xgb.model')
|
bst1 <- xgb.load('xgb.model')
|
||||||
|
if (file.exists('xgb.model')) file.remove('xgb.model')
|
||||||
print(xgb.attr(bst1, "my_attribute"))
|
print(xgb.attr(bst1, "my_attribute"))
|
||||||
print(xgb.attributes(bst1))
|
print(xgb.attributes(bst1))
|
||||||
|
|
||||||
|
|||||||
28
R-package/man/xgb.config.Rd
Normal file
28
R-package/man/xgb.config.Rd
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
% Generated by roxygen2: do not edit by hand
|
||||||
|
% Please edit documentation in R/xgb.Booster.R
|
||||||
|
\name{xgb.config}
|
||||||
|
\alias{xgb.config}
|
||||||
|
\alias{xgb.config<-}
|
||||||
|
\title{Accessors for model parameters as JSON string.}
|
||||||
|
\usage{
|
||||||
|
xgb.config(object)
|
||||||
|
|
||||||
|
xgb.config(object) <- value
|
||||||
|
}
|
||||||
|
\arguments{
|
||||||
|
\item{object}{Object of class \code{xgb.Booster}}
|
||||||
|
|
||||||
|
\item{value}{A JSON string.}
|
||||||
|
}
|
||||||
|
\description{
|
||||||
|
Accessors for model parameters as JSON string.
|
||||||
|
}
|
||||||
|
\examples{
|
||||||
|
data(agaricus.train, package='xgboost')
|
||||||
|
train <- agaricus.train
|
||||||
|
|
||||||
|
bst <- xgboost(data = train$data, label = train$label, max_depth = 2,
|
||||||
|
eta = 1, nthread = 2, nrounds = 2, objective = "binary:logistic")
|
||||||
|
config <- xgb.config(bst)
|
||||||
|
|
||||||
|
}
|
||||||
@@ -63,9 +63,9 @@ dtrain <- xgb.DMatrix(data = agaricus.train$data, label = agaricus.train$label)
|
|||||||
dtest <- xgb.DMatrix(data = agaricus.test$data, label = agaricus.test$label)
|
dtest <- xgb.DMatrix(data = agaricus.test$data, label = agaricus.test$label)
|
||||||
|
|
||||||
param <- list(max_depth=2, eta=1, silent=1, objective='binary:logistic')
|
param <- list(max_depth=2, eta=1, silent=1, objective='binary:logistic')
|
||||||
nround = 4
|
nrounds = 4
|
||||||
|
|
||||||
bst = xgb.train(params = param, data = dtrain, nrounds = nround, nthread = 2)
|
bst = xgb.train(params = param, data = dtrain, nrounds = nrounds, nthread = 2)
|
||||||
|
|
||||||
# Model accuracy without new features
|
# Model accuracy without new features
|
||||||
accuracy.before <- sum((predict(bst, agaricus.test$data) >= 0.5) == agaricus.test$label) /
|
accuracy.before <- sum((predict(bst, agaricus.test$data) >= 0.5) == agaricus.test$label) /
|
||||||
@@ -79,7 +79,7 @@ new.features.test <- xgb.create.features(model = bst, agaricus.test$data)
|
|||||||
new.dtrain <- xgb.DMatrix(data = new.features.train, label = agaricus.train$label)
|
new.dtrain <- xgb.DMatrix(data = new.features.train, label = agaricus.train$label)
|
||||||
new.dtest <- xgb.DMatrix(data = new.features.test, label = agaricus.test$label)
|
new.dtest <- xgb.DMatrix(data = new.features.test, label = agaricus.test$label)
|
||||||
watchlist <- list(train = new.dtrain)
|
watchlist <- list(train = new.dtrain)
|
||||||
bst <- xgb.train(params = param, data = new.dtrain, nrounds = nround, nthread = 2)
|
bst <- xgb.train(params = param, data = new.dtrain, nrounds = nrounds, nthread = 2)
|
||||||
|
|
||||||
# Model accuracy with new features
|
# Model accuracy with new features
|
||||||
accuracy.after <- sum((predict(bst, new.dtest) >= 0.5) == agaricus.test$label) /
|
accuracy.after <- sum((predict(bst, new.dtest) >= 0.5) == agaricus.test$label) /
|
||||||
@@ -87,6 +87,6 @@ accuracy.after <- sum((predict(bst, new.dtest) >= 0.5) == agaricus.test$label) /
|
|||||||
|
|
||||||
# Here the accuracy was already good and is now perfect.
|
# Here the accuracy was already good and is now perfect.
|
||||||
cat(paste("The accuracy was", accuracy.before, "before adding leaf features and it is now",
|
cat(paste("The accuracy was", accuracy.before, "before adding leaf features and it is now",
|
||||||
accuracy.after, "!\\n"))
|
accuracy.after, "!\n"))
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -4,19 +4,39 @@
|
|||||||
\alias{xgb.cv}
|
\alias{xgb.cv}
|
||||||
\title{Cross Validation}
|
\title{Cross Validation}
|
||||||
\usage{
|
\usage{
|
||||||
xgb.cv(params = list(), data, nrounds, nfold, label = NULL, missing = NA,
|
xgb.cv(
|
||||||
prediction = FALSE, showsd = TRUE, metrics = list(), obj = NULL,
|
params = list(),
|
||||||
feval = NULL, stratified = TRUE, folds = NULL, verbose = TRUE,
|
data,
|
||||||
print_every_n = 1L, early_stopping_rounds = NULL, maximize = NULL,
|
nrounds,
|
||||||
callbacks = list(), ...)
|
nfold,
|
||||||
|
label = NULL,
|
||||||
|
missing = NA,
|
||||||
|
prediction = FALSE,
|
||||||
|
showsd = TRUE,
|
||||||
|
metrics = list(),
|
||||||
|
obj = NULL,
|
||||||
|
feval = NULL,
|
||||||
|
stratified = TRUE,
|
||||||
|
folds = NULL,
|
||||||
|
train_folds = NULL,
|
||||||
|
verbose = TRUE,
|
||||||
|
print_every_n = 1L,
|
||||||
|
early_stopping_rounds = NULL,
|
||||||
|
maximize = NULL,
|
||||||
|
callbacks = list(),
|
||||||
|
...
|
||||||
|
)
|
||||||
}
|
}
|
||||||
\arguments{
|
\arguments{
|
||||||
\item{params}{the list of parameters. Commonly used ones are:
|
\item{params}{the list of parameters. The complete list of parameters is
|
||||||
|
available in the \href{http://xgboost.readthedocs.io/en/latest/parameter.html}{online documentation}. Below
|
||||||
|
is a shorter summary:
|
||||||
\itemize{
|
\itemize{
|
||||||
\item \code{objective} objective function, common ones are
|
\item \code{objective} objective function, common ones are
|
||||||
\itemize{
|
\itemize{
|
||||||
\item \code{reg:linear} linear regression
|
\item \code{reg:squarederror} Regression with squared loss.
|
||||||
\item \code{binary:logistic} logistic regression for classification
|
\item \code{binary:logistic} logistic regression for classification.
|
||||||
|
\item See \code{\link[=xgb.train]{xgb.train}()} for complete list of objectives.
|
||||||
}
|
}
|
||||||
\item \code{eta} step size of each boosting step
|
\item \code{eta} step size of each boosting step
|
||||||
\item \code{max_depth} maximum depth of the tree
|
\item \code{max_depth} maximum depth of the tree
|
||||||
@@ -58,7 +78,7 @@ from each CV model. This parameter engages the \code{\link{cb.cv.predict}} callb
|
|||||||
\item{obj}{customized objective function. Returns gradient and second order
|
\item{obj}{customized objective function. Returns gradient and second order
|
||||||
gradient with given prediction and dtrain.}
|
gradient with given prediction and dtrain.}
|
||||||
|
|
||||||
\item{feval}{custimized evaluation function. Returns
|
\item{feval}{customized evaluation function. Returns
|
||||||
\code{list(metric='metric-name', value='metric-value')} with given
|
\code{list(metric='metric-name', value='metric-value')} with given
|
||||||
prediction and dtrain.}
|
prediction and dtrain.}
|
||||||
|
|
||||||
@@ -69,6 +89,9 @@ by the values of outcome labels.}
|
|||||||
(each element must be a vector of test fold's indices). When folds are supplied,
|
(each element must be a vector of test fold's indices). When folds are supplied,
|
||||||
the \code{nfold} and \code{stratified} parameters are ignored.}
|
the \code{nfold} and \code{stratified} parameters are ignored.}
|
||||||
|
|
||||||
|
\item{train_folds}{\code{list} list specifying which indicies to use for training. If \code{NULL}
|
||||||
|
(the default) all indices not specified in \code{folds} will be used for training.}
|
||||||
|
|
||||||
\item{verbose}{\code{boolean}, print the statistics during the process}
|
\item{verbose}{\code{boolean}, print the statistics during the process}
|
||||||
|
|
||||||
\item{print_every_n}{Print each n-th iteration evaluation messages when \code{verbose>0}.
|
\item{print_every_n}{Print each n-th iteration evaluation messages when \code{verbose>0}.
|
||||||
@@ -100,7 +123,7 @@ An object of class \code{xgb.cv.synchronous} with the following elements:
|
|||||||
capture parameters changed by the \code{\link{cb.reset.parameters}} callback.
|
capture parameters changed by the \code{\link{cb.reset.parameters}} callback.
|
||||||
\item \code{callbacks} callback functions that were either automatically assigned or
|
\item \code{callbacks} callback functions that were either automatically assigned or
|
||||||
explicitly passed.
|
explicitly passed.
|
||||||
\item \code{evaluation_log} evaluation history storead as a \code{data.table} with the
|
\item \code{evaluation_log} evaluation history stored as a \code{data.table} with the
|
||||||
first column corresponding to iteration number and the rest corresponding to the
|
first column corresponding to iteration number and the rest corresponding to the
|
||||||
CV-based evaluation means and standard deviations for the training and test CV-sets.
|
CV-based evaluation means and standard deviations for the training and test CV-sets.
|
||||||
It is created by the \code{\link{cb.evaluation.log}} callback.
|
It is created by the \code{\link{cb.evaluation.log}} callback.
|
||||||
@@ -115,7 +138,7 @@ An object of class \code{xgb.cv.synchronous} with the following elements:
|
|||||||
(only available with early stopping).
|
(only available with early stopping).
|
||||||
\item \code{pred} CV prediction values available when \code{prediction} is set.
|
\item \code{pred} CV prediction values available when \code{prediction} is set.
|
||||||
It is either vector or matrix (see \code{\link{cb.cv.predict}}).
|
It is either vector or matrix (see \code{\link{cb.cv.predict}}).
|
||||||
\item \code{models} a liost of the CV folds' models. It is only available with the explicit
|
\item \code{models} a list of the CV folds' models. It is only available with the explicit
|
||||||
setting of the \code{cb.cv.predict(save_models = TRUE)} callback.
|
setting of the \code{cb.cv.predict(save_models = TRUE)} callback.
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -4,8 +4,14 @@
|
|||||||
\alias{xgb.dump}
|
\alias{xgb.dump}
|
||||||
\title{Dump an xgboost model in text format.}
|
\title{Dump an xgboost model in text format.}
|
||||||
\usage{
|
\usage{
|
||||||
xgb.dump(model, fname = NULL, fmap = "", with_stats = FALSE,
|
xgb.dump(
|
||||||
dump_format = c("text", "json"), ...)
|
model,
|
||||||
|
fname = NULL,
|
||||||
|
fmap = "",
|
||||||
|
with_stats = FALSE,
|
||||||
|
dump_format = c("text", "json"),
|
||||||
|
...
|
||||||
|
)
|
||||||
}
|
}
|
||||||
\arguments{
|
\arguments{
|
||||||
\item{model}{the model object.}
|
\item{model}{the model object.}
|
||||||
@@ -44,8 +50,8 @@ test <- agaricus.test
|
|||||||
bst <- xgboost(data = train$data, label = train$label, max_depth = 2,
|
bst <- xgboost(data = train$data, label = train$label, max_depth = 2,
|
||||||
eta = 1, nthread = 2, nrounds = 2, objective = "binary:logistic")
|
eta = 1, nthread = 2, nrounds = 2, objective = "binary:logistic")
|
||||||
# save the model in file 'xgb.model.dump'
|
# save the model in file 'xgb.model.dump'
|
||||||
dump.path = file.path(tempdir(), 'model.dump')
|
dump_path = file.path(tempdir(), 'model.dump')
|
||||||
xgb.dump(bst, dump.path, with_stats = TRUE)
|
xgb.dump(bst, dump_path, with_stats = TRUE)
|
||||||
|
|
||||||
# print the model without saving it to a file
|
# print the model without saving it to a file
|
||||||
print(xgb.dump(bst, with_stats = TRUE))
|
print(xgb.dump(bst, with_stats = TRUE))
|
||||||
|
|||||||
@@ -12,7 +12,7 @@ using the \code{cb.gblinear.history()} callback.}
|
|||||||
|
|
||||||
\item{class_index}{zero-based class index to extract the coefficients for only that
|
\item{class_index}{zero-based class index to extract the coefficients for only that
|
||||||
specific class in a multinomial multiclass model. When it is NULL, all the
|
specific class in a multinomial multiclass model. When it is NULL, all the
|
||||||
coeffients are returned. Has no effect in non-multiclass models.}
|
coefficients are returned. Has no effect in non-multiclass models.}
|
||||||
}
|
}
|
||||||
\value{
|
\value{
|
||||||
For an \code{xgb.train} result, a matrix (either dense or sparse) with the columns
|
For an \code{xgb.train} result, a matrix (either dense or sparse) with the columns
|
||||||
|
|||||||
@@ -4,8 +4,14 @@
|
|||||||
\alias{xgb.importance}
|
\alias{xgb.importance}
|
||||||
\title{Importance of features in a model.}
|
\title{Importance of features in a model.}
|
||||||
\usage{
|
\usage{
|
||||||
xgb.importance(feature_names = NULL, model = NULL, trees = NULL,
|
xgb.importance(
|
||||||
data = NULL, label = NULL, target = NULL)
|
feature_names = NULL,
|
||||||
|
model = NULL,
|
||||||
|
trees = NULL,
|
||||||
|
data = NULL,
|
||||||
|
label = NULL,
|
||||||
|
target = NULL
|
||||||
|
)
|
||||||
}
|
}
|
||||||
\arguments{
|
\arguments{
|
||||||
\item{feature_names}{character vector of feature names. If the model already
|
\item{feature_names}{character vector of feature names. If the model already
|
||||||
|
|||||||
@@ -33,6 +33,7 @@ bst <- xgboost(data = train$data, label = train$label, max_depth = 2,
|
|||||||
eta = 1, nthread = 2, nrounds = 2,objective = "binary:logistic")
|
eta = 1, nthread = 2, nrounds = 2,objective = "binary:logistic")
|
||||||
xgb.save(bst, 'xgb.model')
|
xgb.save(bst, 'xgb.model')
|
||||||
bst <- xgb.load('xgb.model')
|
bst <- xgb.load('xgb.model')
|
||||||
|
if (file.exists('xgb.model')) file.remove('xgb.model')
|
||||||
pred <- predict(bst, test$data)
|
pred <- predict(bst, test$data)
|
||||||
}
|
}
|
||||||
\seealso{
|
\seealso{
|
||||||
|
|||||||
14
R-package/man/xgb.load.raw.Rd
Normal file
14
R-package/man/xgb.load.raw.Rd
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
% Generated by roxygen2: do not edit by hand
|
||||||
|
% Please edit documentation in R/xgb.load.raw.R
|
||||||
|
\name{xgb.load.raw}
|
||||||
|
\alias{xgb.load.raw}
|
||||||
|
\title{Load serialised xgboost model from R's raw vector}
|
||||||
|
\usage{
|
||||||
|
xgb.load.raw(buffer)
|
||||||
|
}
|
||||||
|
\arguments{
|
||||||
|
\item{buffer}{the buffer returned by xgb.save.raw}
|
||||||
|
}
|
||||||
|
\description{
|
||||||
|
User can generate raw memory buffer by calling xgb.save.raw
|
||||||
|
}
|
||||||
@@ -4,8 +4,14 @@
|
|||||||
\alias{xgb.model.dt.tree}
|
\alias{xgb.model.dt.tree}
|
||||||
\title{Parse a boosted tree model text dump}
|
\title{Parse a boosted tree model text dump}
|
||||||
\usage{
|
\usage{
|
||||||
xgb.model.dt.tree(feature_names = NULL, model = NULL, text = NULL,
|
xgb.model.dt.tree(
|
||||||
trees = NULL, use_int_id = FALSE, ...)
|
feature_names = NULL,
|
||||||
|
model = NULL,
|
||||||
|
text = NULL,
|
||||||
|
trees = NULL,
|
||||||
|
use_int_id = FALSE,
|
||||||
|
...
|
||||||
|
)
|
||||||
}
|
}
|
||||||
\arguments{
|
\arguments{
|
||||||
\item{feature_names}{character vector of feature names. If the model already
|
\item{feature_names}{character vector of feature names. If the model already
|
||||||
|
|||||||
@@ -5,11 +5,17 @@
|
|||||||
\alias{xgb.plot.deepness}
|
\alias{xgb.plot.deepness}
|
||||||
\title{Plot model trees deepness}
|
\title{Plot model trees deepness}
|
||||||
\usage{
|
\usage{
|
||||||
xgb.ggplot.deepness(model = NULL, which = c("2x1", "max.depth", "med.depth",
|
xgb.ggplot.deepness(
|
||||||
"med.weight"))
|
model = NULL,
|
||||||
|
which = c("2x1", "max.depth", "med.depth", "med.weight")
|
||||||
|
)
|
||||||
|
|
||||||
xgb.plot.deepness(model = NULL, which = c("2x1", "max.depth", "med.depth",
|
xgb.plot.deepness(
|
||||||
"med.weight"), plot = TRUE, ...)
|
model = NULL,
|
||||||
|
which = c("2x1", "max.depth", "med.depth", "med.weight"),
|
||||||
|
plot = TRUE,
|
||||||
|
...
|
||||||
|
)
|
||||||
}
|
}
|
||||||
\arguments{
|
\arguments{
|
||||||
\item{model}{either an \code{xgb.Booster} model generated by the \code{xgb.train} function
|
\item{model}{either an \code{xgb.Booster} model generated by the \code{xgb.train} function
|
||||||
@@ -50,7 +56,7 @@ per tree with respect to tree number are created. And \code{which="med.weight"}
|
|||||||
a tree's median absolute leaf weight changes through the iterations.
|
a tree's median absolute leaf weight changes through the iterations.
|
||||||
|
|
||||||
This function was inspired by the blog post
|
This function was inspired by the blog post
|
||||||
\url{http://aysent.github.io/2015/11/08/random-forest-leaf-visualization.html}.
|
\url{https://github.com/aysent/random-forest-leaf-visualization}.
|
||||||
}
|
}
|
||||||
\examples{
|
\examples{
|
||||||
|
|
||||||
|
|||||||
@@ -5,12 +5,25 @@
|
|||||||
\alias{xgb.plot.importance}
|
\alias{xgb.plot.importance}
|
||||||
\title{Plot feature importance as a bar graph}
|
\title{Plot feature importance as a bar graph}
|
||||||
\usage{
|
\usage{
|
||||||
xgb.ggplot.importance(importance_matrix = NULL, top_n = NULL,
|
xgb.ggplot.importance(
|
||||||
measure = NULL, rel_to_first = FALSE, n_clusters = c(1:10), ...)
|
importance_matrix = NULL,
|
||||||
|
top_n = NULL,
|
||||||
|
measure = NULL,
|
||||||
|
rel_to_first = FALSE,
|
||||||
|
n_clusters = c(1:10),
|
||||||
|
...
|
||||||
|
)
|
||||||
|
|
||||||
xgb.plot.importance(importance_matrix = NULL, top_n = NULL,
|
xgb.plot.importance(
|
||||||
measure = NULL, rel_to_first = FALSE, left_margin = 10, cex = NULL,
|
importance_matrix = NULL,
|
||||||
plot = TRUE, ...)
|
top_n = NULL,
|
||||||
|
measure = NULL,
|
||||||
|
rel_to_first = FALSE,
|
||||||
|
left_margin = 10,
|
||||||
|
cex = NULL,
|
||||||
|
plot = TRUE,
|
||||||
|
...
|
||||||
|
)
|
||||||
}
|
}
|
||||||
\arguments{
|
\arguments{
|
||||||
\item{importance_matrix}{a \code{data.table} returned by \code{\link{xgb.importance}}.}
|
\item{importance_matrix}{a \code{data.table} returned by \code{\link{xgb.importance}}.}
|
||||||
@@ -59,8 +72,8 @@ For linear models, \code{rel_to_first = FALSE} would show actual values of the c
|
|||||||
Setting \code{rel_to_first = TRUE} allows to see the picture from the perspective of
|
Setting \code{rel_to_first = TRUE} allows to see the picture from the perspective of
|
||||||
"what is feature's importance contribution relative to the most important feature?"
|
"what is feature's importance contribution relative to the most important feature?"
|
||||||
|
|
||||||
The ggplot-backend method also performs 1-D custering of the importance values,
|
The ggplot-backend method also performs 1-D clustering of the importance values,
|
||||||
with bar colors coresponding to different clusters that have somewhat similar importance values.
|
with bar colors corresponding to different clusters that have somewhat similar importance values.
|
||||||
}
|
}
|
||||||
\examples{
|
\examples{
|
||||||
data(agaricus.train)
|
data(agaricus.train)
|
||||||
|
|||||||
@@ -4,8 +4,15 @@
|
|||||||
\alias{xgb.plot.multi.trees}
|
\alias{xgb.plot.multi.trees}
|
||||||
\title{Project all trees on one tree and plot it}
|
\title{Project all trees on one tree and plot it}
|
||||||
\usage{
|
\usage{
|
||||||
xgb.plot.multi.trees(model, feature_names = NULL, features_keep = 5,
|
xgb.plot.multi.trees(
|
||||||
plot_width = NULL, plot_height = NULL, render = TRUE, ...)
|
model,
|
||||||
|
feature_names = NULL,
|
||||||
|
features_keep = 5,
|
||||||
|
plot_width = NULL,
|
||||||
|
plot_height = NULL,
|
||||||
|
render = TRUE,
|
||||||
|
...
|
||||||
|
)
|
||||||
}
|
}
|
||||||
\arguments{
|
\arguments{
|
||||||
\item{model}{produced by the \code{xgb.train} function.}
|
\item{model}{produced by the \code{xgb.train} function.}
|
||||||
|
|||||||
@@ -4,13 +4,33 @@
|
|||||||
\alias{xgb.plot.shap}
|
\alias{xgb.plot.shap}
|
||||||
\title{SHAP contribution dependency plots}
|
\title{SHAP contribution dependency plots}
|
||||||
\usage{
|
\usage{
|
||||||
xgb.plot.shap(data, shap_contrib = NULL, features = NULL, top_n = 1,
|
xgb.plot.shap(
|
||||||
model = NULL, trees = NULL, target_class = NULL,
|
data,
|
||||||
approxcontrib = FALSE, subsample = NULL, n_col = 1, col = rgb(0, 0, 1,
|
shap_contrib = NULL,
|
||||||
0.2), pch = ".", discrete_n_uniq = 5, discrete_jitter = 0.01,
|
features = NULL,
|
||||||
ylab = "SHAP", plot_NA = TRUE, col_NA = rgb(0.7, 0, 1, 0.6),
|
top_n = 1,
|
||||||
pch_NA = ".", pos_NA = 1.07, plot_loess = TRUE, col_loess = 2,
|
model = NULL,
|
||||||
span_loess = 0.5, which = c("1d", "2d"), plot = TRUE, ...)
|
trees = NULL,
|
||||||
|
target_class = NULL,
|
||||||
|
approxcontrib = FALSE,
|
||||||
|
subsample = NULL,
|
||||||
|
n_col = 1,
|
||||||
|
col = rgb(0, 0, 1, 0.2),
|
||||||
|
pch = ".",
|
||||||
|
discrete_n_uniq = 5,
|
||||||
|
discrete_jitter = 0.01,
|
||||||
|
ylab = "SHAP",
|
||||||
|
plot_NA = TRUE,
|
||||||
|
col_NA = rgb(0.7, 0, 1, 0.6),
|
||||||
|
pch_NA = ".",
|
||||||
|
pos_NA = 1.07,
|
||||||
|
plot_loess = TRUE,
|
||||||
|
col_loess = 2,
|
||||||
|
span_loess = 0.5,
|
||||||
|
which = c("1d", "2d"),
|
||||||
|
plot = TRUE,
|
||||||
|
...
|
||||||
|
)
|
||||||
}
|
}
|
||||||
\arguments{
|
\arguments{
|
||||||
\item{data}{data as a \code{matrix} or \code{dgCMatrix}.}
|
\item{data}{data as a \code{matrix} or \code{dgCMatrix}.}
|
||||||
@@ -63,7 +83,7 @@ more than 5 distinct values.}
|
|||||||
|
|
||||||
\item{col_loess}{a color to use for the loess curves.}
|
\item{col_loess}{a color to use for the loess curves.}
|
||||||
|
|
||||||
\item{span_loess}{the \code{span} paramerer in \code{\link[stats]{loess}}'s call.}
|
\item{span_loess}{the \code{span} parameter in \code{\link[stats]{loess}}'s call.}
|
||||||
|
|
||||||
\item{which}{whether to do univariate or bivariate plotting. NOTE: only 1D is implemented so far.}
|
\item{which}{whether to do univariate or bivariate plotting. NOTE: only 1D is implemented so far.}
|
||||||
|
|
||||||
|
|||||||
@@ -4,9 +4,16 @@
|
|||||||
\alias{xgb.plot.tree}
|
\alias{xgb.plot.tree}
|
||||||
\title{Plot a boosted tree model}
|
\title{Plot a boosted tree model}
|
||||||
\usage{
|
\usage{
|
||||||
xgb.plot.tree(feature_names = NULL, model = NULL, trees = NULL,
|
xgb.plot.tree(
|
||||||
plot_width = NULL, plot_height = NULL, render = TRUE,
|
feature_names = NULL,
|
||||||
show_node_id = FALSE, ...)
|
model = NULL,
|
||||||
|
trees = NULL,
|
||||||
|
plot_width = NULL,
|
||||||
|
plot_height = NULL,
|
||||||
|
render = TRUE,
|
||||||
|
show_node_id = FALSE,
|
||||||
|
...
|
||||||
|
)
|
||||||
}
|
}
|
||||||
\arguments{
|
\arguments{
|
||||||
\item{feature_names}{names of each feature as a \code{character} vector.}
|
\item{feature_names}{names of each feature as a \code{character} vector.}
|
||||||
|
|||||||
@@ -22,7 +22,11 @@ of \code{\link{xgb.train}}.
|
|||||||
|
|
||||||
Note: a model can also be saved as an R-object (e.g., by using \code{\link[base]{readRDS}}
|
Note: a model can also be saved as an R-object (e.g., by using \code{\link[base]{readRDS}}
|
||||||
or \code{\link[base]{save}}). However, it would then only be compatible with R, and
|
or \code{\link[base]{save}}). However, it would then only be compatible with R, and
|
||||||
corresponding R-methods would need to be used to load it.
|
corresponding R-methods would need to be used to load it. Moreover, persisting the model with
|
||||||
|
\code{\link[base]{readRDS}} or \code{\link[base]{save}}) will cause compatibility problems in
|
||||||
|
future versions of XGBoost. Consult \code{\link{a-compatibility-note-for-saveRDS-save}} to learn
|
||||||
|
how to persist models in a future-proof way, i.e. to make the model accessible in future
|
||||||
|
releases of XGBoost.
|
||||||
}
|
}
|
||||||
\examples{
|
\examples{
|
||||||
data(agaricus.train, package='xgboost')
|
data(agaricus.train, package='xgboost')
|
||||||
@@ -33,6 +37,7 @@ bst <- xgboost(data = train$data, label = train$label, max_depth = 2,
|
|||||||
eta = 1, nthread = 2, nrounds = 2,objective = "binary:logistic")
|
eta = 1, nthread = 2, nrounds = 2,objective = "binary:logistic")
|
||||||
xgb.save(bst, 'xgb.model')
|
xgb.save(bst, 'xgb.model')
|
||||||
bst <- xgb.load('xgb.model')
|
bst <- xgb.load('xgb.model')
|
||||||
|
if (file.exists('xgb.model')) file.remove('xgb.model')
|
||||||
pred <- predict(bst, test$data)
|
pred <- predict(bst, test$data)
|
||||||
}
|
}
|
||||||
\seealso{
|
\seealso{
|
||||||
|
|||||||
@@ -3,7 +3,7 @@
|
|||||||
\name{xgb.save.raw}
|
\name{xgb.save.raw}
|
||||||
\alias{xgb.save.raw}
|
\alias{xgb.save.raw}
|
||||||
\title{Save xgboost model to R's raw vector,
|
\title{Save xgboost model to R's raw vector,
|
||||||
user can call xgb.load to load the model back from raw vector}
|
user can call xgb.load.raw to load the model back from raw vector}
|
||||||
\usage{
|
\usage{
|
||||||
xgb.save.raw(model)
|
xgb.save.raw(model)
|
||||||
}
|
}
|
||||||
@@ -21,7 +21,7 @@ test <- agaricus.test
|
|||||||
bst <- xgboost(data = train$data, label = train$label, max_depth = 2,
|
bst <- xgboost(data = train$data, label = train$label, max_depth = 2,
|
||||||
eta = 1, nthread = 2, nrounds = 2,objective = "binary:logistic")
|
eta = 1, nthread = 2, nrounds = 2,objective = "binary:logistic")
|
||||||
raw <- xgb.save.raw(bst)
|
raw <- xgb.save.raw(bst)
|
||||||
bst <- xgb.load(raw)
|
bst <- xgb.load.raw(raw)
|
||||||
pred <- predict(bst, test$data)
|
pred <- predict(bst, test$data)
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|||||||
29
R-package/man/xgb.serialize.Rd
Normal file
29
R-package/man/xgb.serialize.Rd
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
% Generated by roxygen2: do not edit by hand
|
||||||
|
% Please edit documentation in R/xgb.serialize.R
|
||||||
|
\name{xgb.serialize}
|
||||||
|
\alias{xgb.serialize}
|
||||||
|
\title{Serialize the booster instance into R's raw vector. The serialization method differs
|
||||||
|
from \code{\link{xgb.save.raw}} as the latter one saves only the model but not
|
||||||
|
parameters. This serialization format is not stable across different xgboost versions.}
|
||||||
|
\usage{
|
||||||
|
xgb.serialize(booster)
|
||||||
|
}
|
||||||
|
\arguments{
|
||||||
|
\item{booster}{the booster instance}
|
||||||
|
}
|
||||||
|
\description{
|
||||||
|
Serialize the booster instance into R's raw vector. The serialization method differs
|
||||||
|
from \code{\link{xgb.save.raw}} as the latter one saves only the model but not
|
||||||
|
parameters. This serialization format is not stable across different xgboost versions.
|
||||||
|
}
|
||||||
|
\examples{
|
||||||
|
data(agaricus.train, package='xgboost')
|
||||||
|
data(agaricus.test, package='xgboost')
|
||||||
|
train <- agaricus.train
|
||||||
|
test <- agaricus.test
|
||||||
|
bst <- xgboost(data = train$data, label = train$label, max_depth = 2,
|
||||||
|
eta = 1, nthread = 2, nrounds = 2,objective = "binary:logistic")
|
||||||
|
raw <- xgb.serialize(bst)
|
||||||
|
bst <- xgb.unserialize(raw)
|
||||||
|
|
||||||
|
}
|
||||||
@@ -5,20 +5,46 @@
|
|||||||
\alias{xgboost}
|
\alias{xgboost}
|
||||||
\title{eXtreme Gradient Boosting Training}
|
\title{eXtreme Gradient Boosting Training}
|
||||||
\usage{
|
\usage{
|
||||||
xgb.train(params = list(), data, nrounds, watchlist = list(), obj = NULL,
|
xgb.train(
|
||||||
feval = NULL, verbose = 1, print_every_n = 1L,
|
params = list(),
|
||||||
early_stopping_rounds = NULL, maximize = NULL, save_period = NULL,
|
data,
|
||||||
save_name = "xgboost.model", xgb_model = NULL, callbacks = list(), ...)
|
nrounds,
|
||||||
|
watchlist = list(),
|
||||||
|
obj = NULL,
|
||||||
|
feval = NULL,
|
||||||
|
verbose = 1,
|
||||||
|
print_every_n = 1L,
|
||||||
|
early_stopping_rounds = NULL,
|
||||||
|
maximize = NULL,
|
||||||
|
save_period = NULL,
|
||||||
|
save_name = "xgboost.model",
|
||||||
|
xgb_model = NULL,
|
||||||
|
callbacks = list(),
|
||||||
|
...
|
||||||
|
)
|
||||||
|
|
||||||
xgboost(data = NULL, label = NULL, missing = NA, weight = NULL,
|
xgboost(
|
||||||
params = list(), nrounds, verbose = 1, print_every_n = 1L,
|
data = NULL,
|
||||||
early_stopping_rounds = NULL, maximize = NULL, save_period = NULL,
|
label = NULL,
|
||||||
save_name = "xgboost.model", xgb_model = NULL, callbacks = list(), ...)
|
missing = NA,
|
||||||
|
weight = NULL,
|
||||||
|
params = list(),
|
||||||
|
nrounds,
|
||||||
|
verbose = 1,
|
||||||
|
print_every_n = 1L,
|
||||||
|
early_stopping_rounds = NULL,
|
||||||
|
maximize = NULL,
|
||||||
|
save_period = NULL,
|
||||||
|
save_name = "xgboost.model",
|
||||||
|
xgb_model = NULL,
|
||||||
|
callbacks = list(),
|
||||||
|
...
|
||||||
|
)
|
||||||
}
|
}
|
||||||
\arguments{
|
\arguments{
|
||||||
\item{params}{the list of parameters.
|
\item{params}{the list of parameters. The complete list of parameters is
|
||||||
The complete list of parameters is available at \url{http://xgboost.readthedocs.io/en/latest/parameter.html}.
|
available in the \href{http://xgboost.readthedocs.io/en/latest/parameter.html}{online documentation}. Below
|
||||||
Below is a shorter summary:
|
is a shorter summary:
|
||||||
|
|
||||||
1. General Parameters
|
1. General Parameters
|
||||||
|
|
||||||
@@ -35,10 +61,11 @@ xgboost(data = NULL, label = NULL, missing = NA, weight = NULL,
|
|||||||
\item \code{gamma} minimum loss reduction required to make a further partition on a leaf node of the tree. the larger, the more conservative the algorithm will be.
|
\item \code{gamma} minimum loss reduction required to make a further partition on a leaf node of the tree. the larger, the more conservative the algorithm will be.
|
||||||
\item \code{max_depth} maximum depth of a tree. Default: 6
|
\item \code{max_depth} maximum depth of a tree. Default: 6
|
||||||
\item \code{min_child_weight} minimum sum of instance weight (hessian) needed in a child. If the tree partition step results in a leaf node with the sum of instance weight less than min_child_weight, then the building process will give up further partitioning. In linear regression mode, this simply corresponds to minimum number of instances needed to be in each node. The larger, the more conservative the algorithm will be. Default: 1
|
\item \code{min_child_weight} minimum sum of instance weight (hessian) needed in a child. If the tree partition step results in a leaf node with the sum of instance weight less than min_child_weight, then the building process will give up further partitioning. In linear regression mode, this simply corresponds to minimum number of instances needed to be in each node. The larger, the more conservative the algorithm will be. Default: 1
|
||||||
\item \code{subsample} subsample ratio of the training instance. Setting it to 0.5 means that xgboost randomly collected half of the data instances to grow trees and this will prevent overfitting. It makes computation shorter (because less data to analyse). It is advised to use this parameter with \code{eta} and increase \code{nround}. Default: 1
|
\item \code{subsample} subsample ratio of the training instance. Setting it to 0.5 means that xgboost randomly collected half of the data instances to grow trees and this will prevent overfitting. It makes computation shorter (because less data to analyse). It is advised to use this parameter with \code{eta} and increase \code{nrounds}. Default: 1
|
||||||
\item \code{colsample_bytree} subsample ratio of columns when constructing each tree. Default: 1
|
\item \code{colsample_bytree} subsample ratio of columns when constructing each tree. Default: 1
|
||||||
\item \code{num_parallel_tree} Experimental parameter. number of trees to grow per round. Useful to test Random Forest through Xgboost (set \code{colsample_bytree < 1}, \code{subsample < 1} and \code{round = 1}) accordingly. Default: 1
|
\item \code{num_parallel_tree} Experimental parameter. number of trees to grow per round. Useful to test Random Forest through Xgboost (set \code{colsample_bytree < 1}, \code{subsample < 1} and \code{round = 1}) accordingly. Default: 1
|
||||||
\item \code{monotone_constraints} A numerical vector consists of \code{1}, \code{0} and \code{-1} with its length equals to the number of features in the training data. \code{1} is increasing, \code{-1} is decreasing and \code{0} is no constraint.
|
\item \code{monotone_constraints} A numerical vector consists of \code{1}, \code{0} and \code{-1} with its length equals to the number of features in the training data. \code{1} is increasing, \code{-1} is decreasing and \code{0} is no constraint.
|
||||||
|
\item \code{interaction_constraints} A list of vectors specifying feature indices of permitted interactions. Each item of the list represents one permitted interaction where specified features are allowed to interact with each other. Feature index values should start from \code{0} (\code{0} references the first column). Leave argument unspecified for no interaction constraints.
|
||||||
}
|
}
|
||||||
|
|
||||||
2.2. Parameter for Linear Booster
|
2.2. Parameter for Linear Booster
|
||||||
@@ -54,14 +81,24 @@ xgboost(data = NULL, label = NULL, missing = NA, weight = NULL,
|
|||||||
\itemize{
|
\itemize{
|
||||||
\item \code{objective} specify the learning task and the corresponding learning objective, users can pass a self-defined function to it. The default objective options are below:
|
\item \code{objective} specify the learning task and the corresponding learning objective, users can pass a self-defined function to it. The default objective options are below:
|
||||||
\itemize{
|
\itemize{
|
||||||
\item \code{reg:linear} linear regression (Default).
|
\item \code{reg:squarederror} Regression with squared loss (Default).
|
||||||
|
\item \code{reg:squaredlogerror}: regression with squared log loss \eqn{1/2 * (log(pred + 1) - log(label + 1))^2}. All inputs are required to be greater than -1. Also, see metric rmsle for possible issue with this objective.
|
||||||
\item \code{reg:logistic} logistic regression.
|
\item \code{reg:logistic} logistic regression.
|
||||||
|
\item \code{reg:pseudohubererror}: regression with Pseudo Huber loss, a twice differentiable alternative to absolute loss.
|
||||||
\item \code{binary:logistic} logistic regression for binary classification. Output probability.
|
\item \code{binary:logistic} logistic regression for binary classification. Output probability.
|
||||||
\item \code{binary:logitraw} logistic regression for binary classification, output score before logistic transformation.
|
\item \code{binary:logitraw} logistic regression for binary classification, output score before logistic transformation.
|
||||||
\item \code{num_class} set the number of classes. To use only with multiclass objectives.
|
\item \code{binary:hinge}: hinge loss for binary classification. This makes predictions of 0 or 1, rather than producing probabilities.
|
||||||
|
\item \code{count:poisson}: poisson regression for count data, output mean of poisson distribution. \code{max_delta_step} is set to 0.7 by default in poisson regression (used to safeguard optimization).
|
||||||
|
\item \code{survival:cox}: Cox regression for right censored survival time data (negative values are considered right censored). Note that predictions are returned on the hazard ratio scale (i.e., as HR = exp(marginal_prediction) in the proportional hazard function \code{h(t) = h0(t) * HR)}.
|
||||||
|
\item \code{survival:aft}: Accelerated failure time model for censored survival time data. See \href{https://xgboost.readthedocs.io/en/latest/tutorials/aft_survival_analysis.html}{Survival Analysis with Accelerated Failure Time} for details.
|
||||||
|
\item \code{aft_loss_distribution}: Probabilty Density Function used by \code{survival:aft} and \code{aft-nloglik} metric.
|
||||||
\item \code{multi:softmax} set xgboost to do multiclass classification using the softmax objective. Class is represented by a number and should be from 0 to \code{num_class - 1}.
|
\item \code{multi:softmax} set xgboost to do multiclass classification using the softmax objective. Class is represented by a number and should be from 0 to \code{num_class - 1}.
|
||||||
\item \code{multi:softprob} same as softmax, but prediction outputs a vector of ndata * nclass elements, which can be further reshaped to ndata, nclass matrix. The result contains predicted probabilities of each data point belonging to each class.
|
\item \code{multi:softprob} same as softmax, but prediction outputs a vector of ndata * nclass elements, which can be further reshaped to ndata, nclass matrix. The result contains predicted probabilities of each data point belonging to each class.
|
||||||
\item \code{rank:pairwise} set xgboost to do ranking task by minimizing the pairwise loss.
|
\item \code{rank:pairwise} set xgboost to do ranking task by minimizing the pairwise loss.
|
||||||
|
\item \code{rank:ndcg}: Use LambdaMART to perform list-wise ranking where \href{https://en.wikipedia.org/wiki/Discounted_cumulative_gain}{Normalized Discounted Cumulative Gain (NDCG)} is maximized.
|
||||||
|
\item \code{rank:map}: Use LambdaMART to perform list-wise ranking where \href{https://en.wikipedia.org/wiki/Evaluation_measures_(information_retrieval)#Mean_average_precision}{Mean Average Precision (MAP)} is maximized.
|
||||||
|
\item \code{reg:gamma}: gamma regression with log-link. Output is a mean of gamma distribution. It might be useful, e.g., for modeling insurance claims severity, or for any outcome that might be \href{https://en.wikipedia.org/wiki/Gamma_distribution#Applications}{gamma-distributed}.
|
||||||
|
\item \code{reg:tweedie}: Tweedie regression with log-link. It might be useful, e.g., for modeling total loss in insurance, or for any outcome that might be \href{https://en.wikipedia.org/wiki/Tweedie_distribution#Applications}{Tweedie-distributed}.
|
||||||
}
|
}
|
||||||
\item \code{base_score} the initial prediction score of all instances, global bias. Default: 0.5
|
\item \code{base_score} the initial prediction score of all instances, global bias. Default: 0.5
|
||||||
\item \code{eval_metric} evaluation metrics for validation data. Users can pass a self-defined function to it. Default: metric will be assigned according to objective(rmse for regression, and error for classification, mean average precision for ranking). List is provided in detail section.
|
\item \code{eval_metric} evaluation metrics for validation data. Users can pass a self-defined function to it. Default: metric will be assigned according to objective(rmse for regression, and error for classification, mean average precision for ranking). List is provided in detail section.
|
||||||
@@ -84,7 +121,7 @@ the performance of each round's model on mat1 and mat2.}
|
|||||||
\item{obj}{customized objective function. Returns gradient and second order
|
\item{obj}{customized objective function. Returns gradient and second order
|
||||||
gradient with given prediction and dtrain.}
|
gradient with given prediction and dtrain.}
|
||||||
|
|
||||||
\item{feval}{custimized evaluation function. Returns
|
\item{feval}{customized evaluation function. Returns
|
||||||
\code{list(metric='metric-name', value='metric-value')} with given
|
\code{list(metric='metric-name', value='metric-value')} with given
|
||||||
prediction and dtrain.}
|
prediction and dtrain.}
|
||||||
|
|
||||||
@@ -138,14 +175,14 @@ An object of class \code{xgb.Booster} with the following elements:
|
|||||||
\item \code{handle} a handle (pointer) to the xgboost model in memory.
|
\item \code{handle} a handle (pointer) to the xgboost model in memory.
|
||||||
\item \code{raw} a cached memory dump of the xgboost model saved as R's \code{raw} type.
|
\item \code{raw} a cached memory dump of the xgboost model saved as R's \code{raw} type.
|
||||||
\item \code{niter} number of boosting iterations.
|
\item \code{niter} number of boosting iterations.
|
||||||
\item \code{evaluation_log} evaluation history storead as a \code{data.table} with the
|
\item \code{evaluation_log} evaluation history stored as a \code{data.table} with the
|
||||||
first column corresponding to iteration number and the rest corresponding to evaluation
|
first column corresponding to iteration number and the rest corresponding to evaluation
|
||||||
metrics' values. It is created by the \code{\link{cb.evaluation.log}} callback.
|
metrics' values. It is created by the \code{\link{cb.evaluation.log}} callback.
|
||||||
\item \code{call} a function call.
|
\item \code{call} a function call.
|
||||||
\item \code{params} parameters that were passed to the xgboost library. Note that it does not
|
\item \code{params} parameters that were passed to the xgboost library. Note that it does not
|
||||||
capture parameters changed by the \code{\link{cb.reset.parameters}} callback.
|
capture parameters changed by the \code{\link{cb.reset.parameters}} callback.
|
||||||
\item \code{callbacks} callback functions that were either automatically assigned or
|
\item \code{callbacks} callback functions that were either automatically assigned or
|
||||||
explicitely passed.
|
explicitly passed.
|
||||||
\item \code{best_iteration} iteration number with the best evaluation metric value
|
\item \code{best_iteration} iteration number with the best evaluation metric value
|
||||||
(only available with early stopping).
|
(only available with early stopping).
|
||||||
\item \code{best_ntreelimit} the \code{ntreelimit} value corresponding to the best iteration,
|
\item \code{best_ntreelimit} the \code{ntreelimit} value corresponding to the best iteration,
|
||||||
@@ -154,7 +191,7 @@ An object of class \code{xgb.Booster} with the following elements:
|
|||||||
\item \code{best_score} the best evaluation metric value during early stopping.
|
\item \code{best_score} the best evaluation metric value during early stopping.
|
||||||
(only available with early stopping).
|
(only available with early stopping).
|
||||||
\item \code{feature_names} names of the training dataset features
|
\item \code{feature_names} names of the training dataset features
|
||||||
(only when comun names were defined in training data).
|
(only when column names were defined in training data).
|
||||||
\item \code{nfeatures} number of features in training data.
|
\item \code{nfeatures} number of features in training data.
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -176,7 +213,7 @@ The evaluation metric is chosen automatically by Xgboost (according to the objec
|
|||||||
when the \code{eval_metric} parameter is not provided.
|
when the \code{eval_metric} parameter is not provided.
|
||||||
User may set one or several \code{eval_metric} parameters.
|
User may set one or several \code{eval_metric} parameters.
|
||||||
Note that when using a customized metric, only this single metric can be used.
|
Note that when using a customized metric, only this single metric can be used.
|
||||||
The folloiwing is the list of built-in metrics for which Xgboost provides optimized implementation:
|
The following is the list of built-in metrics for which Xgboost provides optimized implementation:
|
||||||
\itemize{
|
\itemize{
|
||||||
\item \code{rmse} root mean square error. \url{http://en.wikipedia.org/wiki/Root_mean_square_error}
|
\item \code{rmse} root mean square error. \url{http://en.wikipedia.org/wiki/Root_mean_square_error}
|
||||||
\item \code{logloss} negative log-likelihood. \url{http://en.wikipedia.org/wiki/Log-likelihood}
|
\item \code{logloss} negative log-likelihood. \url{http://en.wikipedia.org/wiki/Log-likelihood}
|
||||||
@@ -208,7 +245,7 @@ dtest <- xgb.DMatrix(agaricus.test$data, label = agaricus.test$label)
|
|||||||
watchlist <- list(train = dtrain, eval = dtest)
|
watchlist <- list(train = dtrain, eval = dtest)
|
||||||
|
|
||||||
## A simple xgb.train example:
|
## A simple xgb.train example:
|
||||||
param <- list(max_depth = 2, eta = 1, silent = 1, nthread = 2,
|
param <- list(max_depth = 2, eta = 1, verbose = 0, nthread = 2,
|
||||||
objective = "binary:logistic", eval_metric = "auc")
|
objective = "binary:logistic", eval_metric = "auc")
|
||||||
bst <- xgb.train(param, dtrain, nrounds = 2, watchlist)
|
bst <- xgb.train(param, dtrain, nrounds = 2, watchlist)
|
||||||
|
|
||||||
@@ -229,12 +266,12 @@ evalerror <- function(preds, dtrain) {
|
|||||||
|
|
||||||
# These functions could be used by passing them either:
|
# These functions could be used by passing them either:
|
||||||
# as 'objective' and 'eval_metric' parameters in the params list:
|
# as 'objective' and 'eval_metric' parameters in the params list:
|
||||||
param <- list(max_depth = 2, eta = 1, silent = 1, nthread = 2,
|
param <- list(max_depth = 2, eta = 1, verbose = 0, nthread = 2,
|
||||||
objective = logregobj, eval_metric = evalerror)
|
objective = logregobj, eval_metric = evalerror)
|
||||||
bst <- xgb.train(param, dtrain, nrounds = 2, watchlist)
|
bst <- xgb.train(param, dtrain, nrounds = 2, watchlist)
|
||||||
|
|
||||||
# or through the ... arguments:
|
# or through the ... arguments:
|
||||||
param <- list(max_depth = 2, eta = 1, silent = 1, nthread = 2)
|
param <- list(max_depth = 2, eta = 1, verbose = 0, nthread = 2)
|
||||||
bst <- xgb.train(param, dtrain, nrounds = 2, watchlist,
|
bst <- xgb.train(param, dtrain, nrounds = 2, watchlist,
|
||||||
objective = logregobj, eval_metric = evalerror)
|
objective = logregobj, eval_metric = evalerror)
|
||||||
|
|
||||||
@@ -244,7 +281,7 @@ bst <- xgb.train(param, dtrain, nrounds = 2, watchlist,
|
|||||||
|
|
||||||
|
|
||||||
## An xgb.train example of using variable learning rates at each iteration:
|
## An xgb.train example of using variable learning rates at each iteration:
|
||||||
param <- list(max_depth = 2, eta = 1, silent = 1, nthread = 2,
|
param <- list(max_depth = 2, eta = 1, verbose = 0, nthread = 2,
|
||||||
objective = "binary:logistic", eval_metric = "auc")
|
objective = "binary:logistic", eval_metric = "auc")
|
||||||
my_etas <- list(eta = c(0.5, 0.1))
|
my_etas <- list(eta = c(0.5, 0.1))
|
||||||
bst <- xgb.train(param, dtrain, nrounds = 2, watchlist,
|
bst <- xgb.train(param, dtrain, nrounds = 2, watchlist,
|
||||||
|
|||||||
14
R-package/man/xgb.unserialize.Rd
Normal file
14
R-package/man/xgb.unserialize.Rd
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
% Generated by roxygen2: do not edit by hand
|
||||||
|
% Please edit documentation in R/xgb.unserialize.R
|
||||||
|
\name{xgb.unserialize}
|
||||||
|
\alias{xgb.unserialize}
|
||||||
|
\title{Load the instance back from \code{\link{xgb.serialize}}}
|
||||||
|
\usage{
|
||||||
|
xgb.unserialize(buffer)
|
||||||
|
}
|
||||||
|
\arguments{
|
||||||
|
\item{buffer}{the buffer containing booster instance saved by \code{\link{xgb.serialize}}}
|
||||||
|
}
|
||||||
|
\description{
|
||||||
|
Load the instance back from \code{\link{xgb.serialize}}
|
||||||
|
}
|
||||||
@@ -3,7 +3,7 @@ PKGROOT=../../
|
|||||||
ENABLE_STD_THREAD=1
|
ENABLE_STD_THREAD=1
|
||||||
# _*_ mode: Makefile; _*_
|
# _*_ mode: Makefile; _*_
|
||||||
|
|
||||||
CXX_STD = CXX11
|
CXX_STD = CXX14
|
||||||
|
|
||||||
XGB_RFLAGS = -DXGBOOST_STRICT_R_MODE=1 -DDMLC_LOG_BEFORE_THROW=0\
|
XGB_RFLAGS = -DXGBOOST_STRICT_R_MODE=1 -DDMLC_LOG_BEFORE_THROW=0\
|
||||||
-DDMLC_ENABLE_STD_THREAD=$(ENABLE_STD_THREAD) -DDMLC_DISABLE_STDIN=1\
|
-DDMLC_ENABLE_STD_THREAD=$(ENABLE_STD_THREAD) -DDMLC_DISABLE_STDIN=1\
|
||||||
@@ -17,8 +17,8 @@ endif
|
|||||||
$(foreach v, $(XGB_RFLAGS), $(warning $(v)))
|
$(foreach v, $(XGB_RFLAGS), $(warning $(v)))
|
||||||
|
|
||||||
PKG_CPPFLAGS= -I$(PKGROOT)/include -I$(PKGROOT)/dmlc-core/include -I$(PKGROOT)/rabit/include -I$(PKGROOT) $(XGB_RFLAGS)
|
PKG_CPPFLAGS= -I$(PKGROOT)/include -I$(PKGROOT)/dmlc-core/include -I$(PKGROOT)/rabit/include -I$(PKGROOT) $(XGB_RFLAGS)
|
||||||
PKG_CXXFLAGS= @OPENMP_CXXFLAGS@ $(SHLIB_PTHREAD_FLAGS)
|
PKG_CXXFLAGS= @OPENMP_CXXFLAGS@ @ENDIAN_FLAG@ -pthread
|
||||||
PKG_LIBS = @OPENMP_CXXFLAGS@ $(SHLIB_PTHREAD_FLAGS)
|
PKG_LIBS = @OPENMP_CXXFLAGS@ @OPENMP_LIB@ @ENDIAN_FLAG@ @BACKTRACE_LIB@ -pthread
|
||||||
OBJECTS= ./xgboost_R.o ./xgboost_custom.o ./xgboost_assert.o ./init.o\
|
OBJECTS= ./xgboost_R.o ./xgboost_custom.o ./xgboost_assert.o ./init.o\
|
||||||
$(PKGROOT)/amalgamation/xgboost-all0.o $(PKGROOT)/amalgamation/dmlc-minimum0.o\
|
$(PKGROOT)/amalgamation/xgboost-all0.o $(PKGROOT)/amalgamation/dmlc-minimum0.o\
|
||||||
$(PKGROOT)/rabit/src/engine_empty.o $(PKGROOT)/rabit/src/c_api.o
|
$(PKGROOT)/rabit/src/engine_empty.o $(PKGROOT)/rabit/src/c_api.o
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user