mirror of
https://github.com/grafana/grafana.git
synced 2026-01-07 01:44:00 +08:00
Compare commits
2876 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
925ead51b6 | ||
|
|
85b606b7d2 | ||
|
|
5302d3bb31 | ||
|
|
775a805959 | ||
|
|
0586dbaf8b | ||
|
|
dd6a34e60a | ||
|
|
49c1d1a4ca | ||
|
|
6d6c9b782b | ||
|
|
733332a8f4 | ||
|
|
6b6b102079 | ||
|
|
50895c7e37 | ||
|
|
66a37aa945 | ||
|
|
0fd4c3bd00 | ||
|
|
0d3321c686 | ||
|
|
7cae087eb9 | ||
|
|
4c7e52288d | ||
|
|
e108fecdd6 | ||
|
|
be6ddb08bd | ||
|
|
2cc53f328b | ||
|
|
aaae23e9f0 | ||
|
|
fb81a4f318 | ||
|
|
95bce39751 | ||
|
|
a34af84176 | ||
|
|
610a94ea03 | ||
|
|
0367edf898 | ||
|
|
4279c0b77f | ||
|
|
cae06f6cf4 | ||
|
|
07b5995e85 | ||
|
|
0f7a77f45b | ||
|
|
cd832ea77c | ||
|
|
82c7764679 | ||
|
|
a386733c04 | ||
|
|
381e9dac34 | ||
|
|
736aadb186 | ||
|
|
4acfa1faef | ||
|
|
b9085c1522 | ||
|
|
55af0ce297 | ||
|
|
6b25453f11 | ||
|
|
06a32ce7be | ||
|
|
75c1c1bccc | ||
|
|
0c25c1f0ca | ||
|
|
1b12e3fbf9 | ||
|
|
a7a2a166e6 | ||
|
|
0d7aba2502 | ||
|
|
ce45879efb | ||
|
|
f465a79924 | ||
|
|
fc28423141 | ||
|
|
0a59d6ab2f | ||
|
|
9bf9bb0273 | ||
|
|
5744c70399 | ||
|
|
f3342ae7d3 | ||
|
|
39430e1cee | ||
|
|
142a323efd | ||
|
|
ea198fea6e | ||
|
|
666013b010 | ||
|
|
c7dafd4b42 | ||
|
|
ea623cb5a1 | ||
|
|
8ac47b4e24 | ||
|
|
5b0585ac7f | ||
|
|
2f4d3be303 | ||
|
|
9afdea8d2a | ||
|
|
a065f93777 | ||
|
|
e1345337ac | ||
|
|
494810393a | ||
|
|
8daec7bde2 | ||
|
|
b891677ba5 | ||
|
|
d63a77d8b1 | ||
|
|
2cbe6cdfd8 | ||
|
|
ddd20e5376 | ||
|
|
08cd949d6f | ||
|
|
0b5ba55131 | ||
|
|
262a09bb2d | ||
|
|
0320baeb5b | ||
|
|
42670c27d5 | ||
|
|
fe41a4e6fa | ||
|
|
a7b1df34c5 | ||
|
|
567a42a756 | ||
|
|
2fa9311eee | ||
|
|
14f439f8ba | ||
|
|
bfe7b77313 | ||
|
|
a69086a718 | ||
|
|
3e3980dbac | ||
|
|
9f6c9cd6ff | ||
|
|
ca1cd89190 | ||
|
|
9f8eae908d | ||
|
|
db1847bc1d | ||
|
|
d9936f2dff | ||
|
|
3774114a4c | ||
|
|
4bc5b37c09 | ||
|
|
ac1ca639d6 | ||
|
|
83b67c0693 | ||
|
|
244e682c2b | ||
|
|
0ef7271326 | ||
|
|
3fa228dcbb | ||
|
|
14e25100d4 | ||
|
|
b2c0eefbbc | ||
|
|
8eee0d574c | ||
|
|
150fd869a0 | ||
|
|
50795adcf0 | ||
|
|
3f2c1de28d | ||
|
|
dca45f177f | ||
|
|
597c1f6a41 | ||
|
|
c2da54139a | ||
|
|
afcc388d8a | ||
|
|
5cd1c5c522 | ||
|
|
63f8800007 | ||
|
|
a958965e64 | ||
|
|
5d7c485991 | ||
|
|
d97f24cfc3 | ||
|
|
3b39c43193 | ||
|
|
b746b63036 | ||
|
|
7c707017e7 | ||
|
|
ce155a1d87 | ||
|
|
48975e6533 | ||
|
|
3e05eb23fd | ||
|
|
3980d25c23 | ||
|
|
2255cb53f0 | ||
|
|
521072daea | ||
|
|
66ba19b7ba | ||
|
|
ac37b54ddb | ||
|
|
56c332a3dd | ||
|
|
cd53c78449 | ||
|
|
f237cac074 | ||
|
|
f6ad386ba7 | ||
|
|
24e527ae02 | ||
|
|
8d7ac3862c | ||
|
|
af50188a60 | ||
|
|
572d35506b | ||
|
|
a4b8a88ae5 | ||
|
|
dbd46a523f | ||
|
|
3c86c9908c | ||
|
|
37d75905ed | ||
|
|
19812feb62 | ||
|
|
90ecb4c544 | ||
|
|
42926c48e5 | ||
|
|
e20e2117f7 | ||
|
|
7f602feff4 | ||
|
|
a79c1cb23f | ||
|
|
582ebf25f9 | ||
|
|
896bbd00e1 | ||
|
|
4a9949e643 | ||
|
|
fc81cda971 | ||
|
|
7a030e0c3b | ||
|
|
69c027156c | ||
|
|
2ac07f12b0 | ||
|
|
5494be4493 | ||
|
|
8ed0df64c7 | ||
|
|
e8c65dc384 | ||
|
|
e508db994e | ||
|
|
f7b7401a53 | ||
|
|
f58e228d11 | ||
|
|
97f54ac385 | ||
|
|
aedaae852b | ||
|
|
ae0f8c77d1 | ||
|
|
d0e7d53c69 | ||
|
|
647e43dcbd | ||
|
|
0d856cc135 | ||
|
|
df33cbc8c5 | ||
|
|
a38a06a077 | ||
|
|
15e6a4266c | ||
|
|
11170dd34c | ||
|
|
68a45608e6 | ||
|
|
0838f432ca | ||
|
|
c0d2241523 | ||
|
|
6e8505ae61 | ||
|
|
aaeab3177e | ||
|
|
b18921cd07 | ||
|
|
c776db5485 | ||
|
|
f14c6efaf8 | ||
|
|
9d4cce74b2 | ||
|
|
194273a643 | ||
|
|
05fc34533f | ||
|
|
ca42d976a7 | ||
|
|
20d5d0eee6 | ||
|
|
693af182c7 | ||
|
|
aba824a317 | ||
|
|
5f513773c1 | ||
|
|
999894cccb | ||
|
|
906e70e599 | ||
|
|
3f167d7fd1 | ||
|
|
dd8d6bc705 | ||
|
|
43afd7dff9 | ||
|
|
9266d3789b | ||
|
|
dc70894d83 | ||
|
|
24bab47f14 | ||
|
|
74b8e8244a | ||
|
|
7806da11e6 | ||
|
|
ec98f9181b | ||
|
|
ba2deb5f7b | ||
|
|
7794442277 | ||
|
|
fb8775124c | ||
|
|
680865f13c | ||
|
|
5373a699dd | ||
|
|
177acf835c | ||
|
|
f3ba491a73 | ||
|
|
0e73c1bf78 | ||
|
|
98a07eff81 | ||
|
|
3f917b0af5 | ||
|
|
d3c6db1ba8 | ||
|
|
2a693f7f08 | ||
|
|
fb7b475457 | ||
|
|
26e5d7a3de | ||
|
|
cfeba99776 | ||
|
|
c0b4f7b6d2 | ||
|
|
021c6ac1cb | ||
|
|
261e5ebbd4 | ||
|
|
388fba4569 | ||
|
|
eda13a9626 | ||
|
|
5da656e559 | ||
|
|
d23762e06b | ||
|
|
5e6d876bd0 | ||
|
|
5e9c44b23b | ||
|
|
196b7846cc | ||
|
|
311946900e | ||
|
|
9b12350855 | ||
|
|
fd319bd5f5 | ||
|
|
6f982b2791 | ||
|
|
05f934548b | ||
|
|
f22bffe98a | ||
|
|
87c65bcea0 | ||
|
|
6f4b206a99 | ||
|
|
2c88b418fd | ||
|
|
66e60357dc | ||
|
|
30db7e64fb | ||
|
|
c6c733a40b | ||
|
|
4a9bc70ca0 | ||
|
|
a815511aef | ||
|
|
bae1e2f0c1 | ||
|
|
7ce31bfaa0 | ||
|
|
48175101c6 | ||
|
|
e5280d55b3 | ||
|
|
3675b3fcaa | ||
|
|
5962bcbc6c | ||
|
|
b39f8fb4de | ||
|
|
6deaa87a4c | ||
|
|
5562651ef0 | ||
|
|
2c00267d58 | ||
|
|
b5418c40ba | ||
|
|
2266ef6809 | ||
|
|
ca67b49d67 | ||
|
|
63cc794f37 | ||
|
|
fca6c85abb | ||
|
|
604f8a22f7 | ||
|
|
f6845cd107 | ||
|
|
c20fa85b82 | ||
|
|
bb2d810709 | ||
|
|
95fd34ed14 | ||
|
|
db0c442eaf | ||
|
|
42fc68baa5 | ||
|
|
c8bc0b3bf8 | ||
|
|
aa4d60c21e | ||
|
|
4f0a4ea7e7 | ||
|
|
e40b462040 | ||
|
|
89418a155a | ||
|
|
901685bb63 | ||
|
|
c709a28f02 | ||
|
|
463a750c07 | ||
|
|
10abf61383 | ||
|
|
fcba62894b | ||
|
|
77215182f7 | ||
|
|
3706d0dedd | ||
|
|
3f5ab189cd | ||
|
|
eb793f7feb | ||
|
|
4d0c04b80d | ||
|
|
e9a842259a | ||
|
|
2c7d33cdfa | ||
|
|
83279604c6 | ||
|
|
c821568ac7 | ||
|
|
1c38619d8c | ||
|
|
8cfe89eaf9 | ||
|
|
cb63344394 | ||
|
|
8934c83742 | ||
|
|
b0451dc1b3 | ||
|
|
b5a846154a | ||
|
|
f48d0fcb13 | ||
|
|
650d3d5046 | ||
|
|
0992b6a616 | ||
|
|
53ed407448 | ||
|
|
d96f5598ca | ||
|
|
137cbe5bb4 | ||
|
|
6f783f5a3b | ||
|
|
6ed17fe62f | ||
|
|
7d25d6f191 | ||
|
|
0bd50c06d7 | ||
|
|
9a74105103 | ||
|
|
a433e0e79c | ||
|
|
483ef20527 | ||
|
|
c5d3eb53f0 | ||
|
|
f582ac88b3 | ||
|
|
7b69b789b8 | ||
|
|
5c8b571c3f | ||
|
|
bfe5a56a47 | ||
|
|
8cfbd2f8bf | ||
|
|
50a1feb90a | ||
|
|
6df9012141 | ||
|
|
dc607b8e8a | ||
|
|
1a71da417c | ||
|
|
2446168356 | ||
|
|
153ab4afaa | ||
|
|
ff3843bc7f | ||
|
|
86f5152092 | ||
|
|
83e7c48767 | ||
|
|
85c3a0aa14 | ||
|
|
50645cc36b | ||
|
|
afede880e6 | ||
|
|
e2f6633d57 | ||
|
|
fc43ce657c | ||
|
|
8f9eeb5ebd | ||
|
|
db2ff68fef | ||
|
|
ed974a808b | ||
|
|
0108dfa803 | ||
|
|
cf147cdeaf | ||
|
|
57fac6b9aa | ||
|
|
aeb8bc8755 | ||
|
|
96bd66e811 | ||
|
|
494ede5bbf | ||
|
|
e76d2ec9c2 | ||
|
|
7b972926ae | ||
|
|
4a71a79cb8 | ||
|
|
99556057d1 | ||
|
|
921d672a2c | ||
|
|
5f004f95f2 | ||
|
|
284106b8a8 | ||
|
|
abed438d7d | ||
|
|
0047ce067d | ||
|
|
58f743f87d | ||
|
|
867e333464 | ||
|
|
de683b3360 | ||
|
|
b2a0ae0f83 | ||
|
|
b55d9350e7 | ||
|
|
3dc2a114fa | ||
|
|
57bacb339c | ||
|
|
2191921690 | ||
|
|
23fcf85500 | ||
|
|
3c6b647398 | ||
|
|
1821809e69 | ||
|
|
ba3f6f9d3e | ||
|
|
9bedd83f39 | ||
|
|
592330b5a7 | ||
|
|
2202094e41 | ||
|
|
3354365177 | ||
|
|
a39e484aaa | ||
|
|
51af06edf7 | ||
|
|
7cf9046dc5 | ||
|
|
25c6b64fed | ||
|
|
a8aab0cb2b | ||
|
|
788e7fd36d | ||
|
|
74bf1f23fb | ||
|
|
bf9e51928d | ||
|
|
bfe6d5434e | ||
|
|
f81bde5643 | ||
|
|
fbc6bb2112 | ||
|
|
62e8841e8c | ||
|
|
5270c4bc74 | ||
|
|
a993bc1331 | ||
|
|
e9f38b9fc0 | ||
|
|
5896903bd3 | ||
|
|
f41c314329 | ||
|
|
84618a6076 | ||
|
|
b61f30de94 | ||
|
|
afe76ba260 | ||
|
|
1302630abe | ||
|
|
a6d993ecdc | ||
|
|
65796d7477 | ||
|
|
41a73a7adb | ||
|
|
05d725d0b8 | ||
|
|
7a41ecb63f | ||
|
|
6fd37779b8 | ||
|
|
d14b570d76 | ||
|
|
2af28b90c2 | ||
|
|
09b0e6e388 | ||
|
|
5ca8d590bd | ||
|
|
11c8e80ea9 | ||
|
|
ea993b6404 | ||
|
|
a258e2e608 | ||
|
|
e69bacaee1 | ||
|
|
8c0e1060e6 | ||
|
|
fb56bc59f2 | ||
|
|
1c9993f185 | ||
|
|
2dac975805 | ||
|
|
2bb63835b6 | ||
|
|
bfafb83604 | ||
|
|
ea54f6923b | ||
|
|
1a401780ba | ||
|
|
448a8b8d1c | ||
|
|
c8146e759f | ||
|
|
bb7d79e6d2 | ||
|
|
fb35f7210c | ||
|
|
187834b17c | ||
|
|
4822d02787 | ||
|
|
35cc0a1cc0 | ||
|
|
b3be51f17f | ||
|
|
a40299b4dc | ||
|
|
1a9f630710 | ||
|
|
48ddd7213f | ||
|
|
582e680b12 | ||
|
|
a04a403539 | ||
|
|
564162442d | ||
|
|
7da3ee72c7 | ||
|
|
823294f055 | ||
|
|
5aa495d0e4 | ||
|
|
2e6c2347eb | ||
|
|
a256d71ef9 | ||
|
|
b7a8551290 | ||
|
|
8f0269de76 | ||
|
|
1a6e6f405d | ||
|
|
59a2109cab | ||
|
|
31781bc509 | ||
|
|
db2161c7e6 | ||
|
|
7d4e676cfd | ||
|
|
2dcb98a8ed | ||
|
|
0bde1bb857 | ||
|
|
d685644dee | ||
|
|
e906a1979c | ||
|
|
0a9958c74e | ||
|
|
04f9a6fdd0 | ||
|
|
5eee33a11f | ||
|
|
4b4f398e83 | ||
|
|
9655014041 | ||
|
|
85e65e61d3 | ||
|
|
e32259ccc9 | ||
|
|
15137422a6 | ||
|
|
1f48c07395 | ||
|
|
26c9a19392 | ||
|
|
2c3aad785d | ||
|
|
ece9e8dcea | ||
|
|
85c45c2b8f | ||
|
|
8149381f01 | ||
|
|
e221cbfbd6 | ||
|
|
0c56ae4f32 | ||
|
|
1e196fc091 | ||
|
|
d4c0d5d81b | ||
|
|
12c770f0ab | ||
|
|
637988b104 | ||
|
|
71edd9ff1b | ||
|
|
34539c0c13 | ||
|
|
1f2cba81eb | ||
|
|
e458ca7b47 | ||
|
|
b915bb98fb | ||
|
|
8aef297e71 | ||
|
|
c628ac9bbb | ||
|
|
385048b620 | ||
|
|
e5c1169120 | ||
|
|
38d851eb98 | ||
|
|
73ee8a5985 | ||
|
|
020d7d4939 | ||
|
|
697529d0e8 | ||
|
|
b51a8f11d5 | ||
|
|
a4a8cd05d2 | ||
|
|
38fc85d619 | ||
|
|
be589d81c7 | ||
|
|
ba883d25fe | ||
|
|
f416e2d1ac | ||
|
|
cb8110cd48 | ||
|
|
8d081a081d | ||
|
|
d1e9b6d6ae | ||
|
|
e7ac367392 | ||
|
|
7a8851c5ab | ||
|
|
e771d8e944 | ||
|
|
b48b11e902 | ||
|
|
5de9230924 | ||
|
|
59fcd3914d | ||
|
|
95fcddcd95 | ||
|
|
293d0c3093 | ||
|
|
e17f56b4ad | ||
|
|
32e0ce1beb | ||
|
|
25ef49494b | ||
|
|
3ee1ea28e1 | ||
|
|
0a8cf26731 | ||
|
|
a8b2074fc6 | ||
|
|
14be5e3ff7 | ||
|
|
6de071a5f3 | ||
|
|
0a2b03ba52 | ||
|
|
dbc64c21a7 | ||
|
|
294f7f1f0d | ||
|
|
d10ce90936 | ||
|
|
fe0bf876d9 | ||
|
|
c5be95e46c | ||
|
|
b762f56aee | ||
|
|
74a8fa61f2 | ||
|
|
964d47d517 | ||
|
|
a38da2f7c6 | ||
|
|
593f2e0851 | ||
|
|
9ae3d66da7 | ||
|
|
4c7545e909 | ||
|
|
a0e80e5869 | ||
|
|
75e6947c5a | ||
|
|
3007add4ca | ||
|
|
67fbb173e3 | ||
|
|
0d3fbb8659 | ||
|
|
32fa8180fa | ||
|
|
884dc53f8a | ||
|
|
d812414621 | ||
|
|
52c32d8b10 | ||
|
|
eace358f1c | ||
|
|
e7c43bf614 | ||
|
|
81636d8634 | ||
|
|
b72eba1ef2 | ||
|
|
1fdc5277ae | ||
|
|
1d7f945268 | ||
|
|
eb37fc089b | ||
|
|
979d0ca70f | ||
|
|
7ea579bb71 | ||
|
|
a0dad38974 | ||
|
|
e5844afb0f | ||
|
|
c4ac3d61b1 | ||
|
|
8c14e565a7 | ||
|
|
53cb0feda9 | ||
|
|
14e8c15a3a | ||
|
|
aaea80e053 | ||
|
|
f6a61c1ec5 | ||
|
|
5768f10769 | ||
|
|
bcb80eb38f | ||
|
|
9590f485f1 | ||
|
|
32fe723da6 | ||
|
|
48b25bc327 | ||
|
|
3044a74a52 | ||
|
|
ca7aa294e0 | ||
|
|
5175cf70ef | ||
|
|
5cb7721ab2 | ||
|
|
158b77d54e | ||
|
|
f8f302faad | ||
|
|
3f97bd8212 | ||
|
|
bf6f0f1a65 | ||
|
|
8a986ec340 | ||
|
|
b0ef659add | ||
|
|
29a7490af2 | ||
|
|
ad45b63e79 | ||
|
|
16fc6e8b42 | ||
|
|
af277f560f | ||
|
|
ff403efe09 | ||
|
|
df9403809b | ||
|
|
d86814b6c5 | ||
|
|
d66e510c42 | ||
|
|
87007994af | ||
|
|
03e4afa736 | ||
|
|
f120e96eeb | ||
|
|
7e044e29ba | ||
|
|
63ac6640e6 | ||
|
|
e410fbb558 | ||
|
|
4eeaa6bf9d | ||
|
|
5d57931060 | ||
|
|
f227002a80 | ||
|
|
d84d92f73c | ||
|
|
a0cbca4cad | ||
|
|
0a23a996bc | ||
|
|
380b7f4d94 | ||
|
|
bad2d1a974 | ||
|
|
4fd2c0570c | ||
|
|
fd274592d4 | ||
|
|
7064d87106 | ||
|
|
f8ceb341d6 | ||
|
|
3fdab59029 | ||
|
|
2d7c7871b0 | ||
|
|
9fd3ef2687 | ||
|
|
b6a4db3f8a | ||
|
|
b99ee2562e | ||
|
|
8526025792 | ||
|
|
236c4e65f8 | ||
|
|
d92fae07a5 | ||
|
|
968b1b4308 | ||
|
|
e9a174d1c5 | ||
|
|
2c52224013 | ||
|
|
869cf705e0 | ||
|
|
9f6a348851 | ||
|
|
aa52f6675c | ||
|
|
9cc5e981dd | ||
|
|
da74fa0862 | ||
|
|
1dfc576263 | ||
|
|
170e8a5d76 | ||
|
|
bd0bf3747e | ||
|
|
a11b180480 | ||
|
|
f8cfbedefd | ||
|
|
90a77bdf85 | ||
|
|
e438d21cc0 | ||
|
|
dc88fff704 | ||
|
|
8ff316252e | ||
|
|
dabdf1b7fd | ||
|
|
6fb6e44ece | ||
|
|
6df299886e | ||
|
|
b25411ef6b | ||
|
|
dfa5b005cc | ||
|
|
73b512859a | ||
|
|
c2a4e4720b | ||
|
|
74c5015648 | ||
|
|
5bddf79d09 | ||
|
|
05bed2fd8a | ||
|
|
c3f0ef4126 | ||
|
|
e549a9c09c | ||
|
|
8ef3ee1319 | ||
|
|
c762ad8db2 | ||
|
|
929dc2b29c | ||
|
|
a5d3c1a655 | ||
|
|
a933192b56 | ||
|
|
41565335ed | ||
|
|
9540e12e2e | ||
|
|
9766106686 | ||
|
|
3d8beb918b | ||
|
|
123ea331e6 | ||
|
|
b5a77d4f4a | ||
|
|
9852972db8 | ||
|
|
b286ed6bb8 | ||
|
|
b56a084810 | ||
|
|
c9a28ddd38 | ||
|
|
bb7ee1c5a3 | ||
|
|
1772e15b46 | ||
|
|
360efdf71f | ||
|
|
56d1e6834a | ||
|
|
f1e267498e | ||
|
|
4ba6bf8e27 | ||
|
|
101c44f22d | ||
|
|
7f967524b9 | ||
|
|
01e171373c | ||
|
|
229a4a8aab | ||
|
|
f6feb455a9 | ||
|
|
fbf4eac257 | ||
|
|
48b5d235d8 | ||
|
|
0d8e024c18 | ||
|
|
cc9d2fc139 | ||
|
|
dbc07827cf | ||
|
|
05c27d8340 | ||
|
|
d57ffad5e1 | ||
|
|
ebdbbbc000 | ||
|
|
1edb13d715 | ||
|
|
aa8beda4ec | ||
|
|
7178dcad35 | ||
|
|
76b517b361 | ||
|
|
ddaac50a25 | ||
|
|
a446286869 | ||
|
|
c54f0fe58b | ||
|
|
7c8166cad2 | ||
|
|
3db2aaada6 | ||
|
|
649d40ed66 | ||
|
|
d71b626114 | ||
|
|
fc47fb8f64 | ||
|
|
10e5e5c400 | ||
|
|
942e8fdba8 | ||
|
|
6ba8854854 | ||
|
|
b7c0d99cdf | ||
|
|
bc4584d9fe | ||
|
|
53b1d6a5b2 | ||
|
|
836e4b4911 | ||
|
|
e9afd30b47 | ||
|
|
aefbcb7049 | ||
|
|
4fd2622e55 | ||
|
|
d048e39cde | ||
|
|
140f1307b4 | ||
|
|
9d10b5ab52 | ||
|
|
edb39a072e | ||
|
|
d66d989308 | ||
|
|
05c3b96202 | ||
|
|
e9c7264fd9 | ||
|
|
0b1c1bb8b4 | ||
|
|
47a18bba99 | ||
|
|
7ffdb488c1 | ||
|
|
2c1188f664 | ||
|
|
60ab08ed1b | ||
|
|
2bb85d216e | ||
|
|
7be8fb8432 | ||
|
|
a4ef1e22d6 | ||
|
|
3335041d39 | ||
|
|
19e881513b | ||
|
|
41777c01f1 | ||
|
|
6b26c8d46f | ||
|
|
d4339ae9d2 | ||
|
|
bdc4d14036 | ||
|
|
ed82f6c681 | ||
|
|
d4e6ae2804 | ||
|
|
32275cb009 | ||
|
|
ff353197fb | ||
|
|
88bf0cdb9e | ||
|
|
a2b751976e | ||
|
|
ddb4b928a0 | ||
|
|
2b5fa599fb | ||
|
|
ae2201ef6f | ||
|
|
ed69ddedbf | ||
|
|
66f5411402 | ||
|
|
a60bf642e1 | ||
|
|
be8a72c35b | ||
|
|
cc14705404 | ||
|
|
6de584aafc | ||
|
|
cc427b1307 | ||
|
|
00cdb5e36a | ||
|
|
437cd50039 | ||
|
|
61add0ecc6 | ||
|
|
8accb8865a | ||
|
|
c44248440e | ||
|
|
d32c03ed11 | ||
|
|
3e49609099 | ||
|
|
c47f4bc97c | ||
|
|
8ee7d5c2d5 | ||
|
|
2020fedfdb | ||
|
|
40b2535728 | ||
|
|
2d016c5a3f | ||
|
|
ba3bb57926 | ||
|
|
4c6d7630cd | ||
|
|
d1767144a8 | ||
|
|
a991cda233 | ||
|
|
9c2040aa9b | ||
|
|
96ee1c17a3 | ||
|
|
6911572fa1 | ||
|
|
059db533d5 | ||
|
|
f28af4f369 | ||
|
|
1bd238191c | ||
|
|
c07d48d930 | ||
|
|
00a713c42e | ||
|
|
01c97e69dd | ||
|
|
646298f5c5 | ||
|
|
8f73328e25 | ||
|
|
b5112aeee2 | ||
|
|
22adf0d06e | ||
|
|
382f7066d9 | ||
|
|
08c19692ea | ||
|
|
eb575685aa | ||
|
|
7a95451288 | ||
|
|
538ec7c0a0 | ||
|
|
ea800dd838 | ||
|
|
b8c378f2f0 | ||
|
|
e86bedddb2 | ||
|
|
1e1a6df480 | ||
|
|
7aa1ab855e | ||
|
|
776f15055a | ||
|
|
b291b18a87 | ||
|
|
e6492f7db9 | ||
|
|
3b737999d6 | ||
|
|
00fa7f5e86 | ||
|
|
fde5ba85a0 | ||
|
|
a96e4a343c | ||
|
|
36110d0977 | ||
|
|
1f330d7753 | ||
|
|
cf877e6567 | ||
|
|
c3fa68ade8 | ||
|
|
5422d13607 | ||
|
|
d8f614ed98 | ||
|
|
4ca125da41 | ||
|
|
3c2bfbfc1c | ||
|
|
03e336ba9e | ||
|
|
76ea0f432a | ||
|
|
7eb45e1799 | ||
|
|
4af1dcd54f | ||
|
|
463c519954 | ||
|
|
1d0a3660bd | ||
|
|
b83367063e | ||
|
|
aa724fc11e | ||
|
|
cedb11ebab | ||
|
|
ffe389af4a | ||
|
|
b313637848 | ||
|
|
2eeae21777 | ||
|
|
2af9dfebea | ||
|
|
dade6fc191 | ||
|
|
fb0629a913 | ||
|
|
060d0b777b | ||
|
|
dc03ca8879 | ||
|
|
de51aeab7b | ||
|
|
b46f8bad6d | ||
|
|
8982dc5ed3 | ||
|
|
2d2462fe63 | ||
|
|
b1fedca46e | ||
|
|
929187a934 | ||
|
|
7f34462f9a | ||
|
|
2c54937104 | ||
|
|
472b8c6e8d | ||
|
|
8e33c2c4d3 | ||
|
|
795cee13c8 | ||
|
|
15188c4a88 | ||
|
|
48f5a77e24 | ||
|
|
c42d09b267 | ||
|
|
0f791c9fa8 | ||
|
|
ceb079a7ea | ||
|
|
607b273b28 | ||
|
|
ce99e7b294 | ||
|
|
a20c276c84 | ||
|
|
2186b09ed7 | ||
|
|
38db91c7be | ||
|
|
902f5d895a | ||
|
|
6d565cb355 | ||
|
|
6a9256bb00 | ||
|
|
b48b2ac656 | ||
|
|
fde11be279 | ||
|
|
adb5d6da84 | ||
|
|
92d869e85c | ||
|
|
e8e83bdbce | ||
|
|
bb8f8fccca | ||
|
|
1824caa5ea | ||
|
|
dfac87db05 | ||
|
|
15c8a5e351 | ||
|
|
8e9a0eddf5 | ||
|
|
1200dd4b75 | ||
|
|
f969fce4d4 | ||
|
|
45419195aa | ||
|
|
80c771c945 | ||
|
|
6bd2736116 | ||
|
|
f7b4f331f3 | ||
|
|
618d4f0a9d | ||
|
|
c01efad997 | ||
|
|
2933b89a82 | ||
|
|
1d64ba3b5d | ||
|
|
e646ae8be4 | ||
|
|
d3db49ae3e | ||
|
|
7be7aeb70a | ||
|
|
541cd2e430 | ||
|
|
4322f29f34 | ||
|
|
7d0ae23c0e | ||
|
|
0122a9ab18 | ||
|
|
165647184d | ||
|
|
2ca86b6085 | ||
|
|
b04c50537d | ||
|
|
fcac4c057c | ||
|
|
71aa2ef2c2 | ||
|
|
03aa997673 | ||
|
|
df4a00f8ef | ||
|
|
f343a46ca4 | ||
|
|
b1f85dc8f1 | ||
|
|
722d74a41b | ||
|
|
86a9fe6024 | ||
|
|
68701c5cf9 | ||
|
|
f4280ca517 | ||
|
|
afeb65b3bf | ||
|
|
d9e56b678a | ||
|
|
aa60edd9fe | ||
|
|
c4b8a9853e | ||
|
|
e640c51f84 | ||
|
|
b37e100058 | ||
|
|
cb3593e472 | ||
|
|
2e6d28027a | ||
|
|
9c5e116d09 | ||
|
|
10618637e2 | ||
|
|
4e97df06a3 | ||
|
|
abe529b5be | ||
|
|
e31a3a64e1 | ||
|
|
152b01064a | ||
|
|
f235b516dc | ||
|
|
1f6d5bfd53 | ||
|
|
da833cbc58 | ||
|
|
9268ecf3e9 | ||
|
|
c8687560d6 | ||
|
|
5286f0856d | ||
|
|
789363b0ad | ||
|
|
e6918c4b99 | ||
|
|
f9cf673f81 | ||
|
|
cc71b1f07d | ||
|
|
7919d79347 | ||
|
|
22b78aa037 | ||
|
|
a5c3855233 | ||
|
|
ddd3df26b1 | ||
|
|
c27db7a347 | ||
|
|
527e802b05 | ||
|
|
d758729633 | ||
|
|
3e9adeefbc | ||
|
|
5f0e7cd52a | ||
|
|
98c0209976 | ||
|
|
6f2a8e27b8 | ||
|
|
41820ccb05 | ||
|
|
4d13a5bffb | ||
|
|
7614ddb318 | ||
|
|
a5fac17f2b | ||
|
|
49a0ea53c7 | ||
|
|
44bc2b2d56 | ||
|
|
a26436f59b | ||
|
|
1e4c62a70d | ||
|
|
526f3e1a31 | ||
|
|
c67291da33 | ||
|
|
9c9ebb4987 | ||
|
|
d987532262 | ||
|
|
7d4293f849 | ||
|
|
c658189c85 | ||
|
|
a8d9ec426b | ||
|
|
9bae19a7ec | ||
|
|
bf898138b7 | ||
|
|
245c6dbff0 | ||
|
|
f48f5428e5 | ||
|
|
964f0861d6 | ||
|
|
a76758255f | ||
|
|
7db3703275 | ||
|
|
306358e73c | ||
|
|
2bd2605ae9 | ||
|
|
7010df0fe8 | ||
|
|
36a948965b | ||
|
|
5d6583ef7b | ||
|
|
0aab51a73f | ||
|
|
86e9d2cf07 | ||
|
|
c7b4041879 | ||
|
|
6e0947c0d9 | ||
|
|
3334e36397 | ||
|
|
35888c814c | ||
|
|
0d817e0664 | ||
|
|
a8363f02b8 | ||
|
|
94c3a07115 | ||
|
|
01148ac1b9 | ||
|
|
30047e6a9f | ||
|
|
c1d6fcd18d | ||
|
|
94415e2b60 | ||
|
|
d81d0c8c44 | ||
|
|
eb8b9c4ac3 | ||
|
|
080847ec4d | ||
|
|
d08144e730 | ||
|
|
bb5eeee82e | ||
|
|
741a1736a4 | ||
|
|
9f729900f2 | ||
|
|
422324723a | ||
|
|
5de499c7f6 | ||
|
|
12e033976b | ||
|
|
fc5839d8f1 | ||
|
|
32071445af | ||
|
|
bce62c49d0 | ||
|
|
7235bd19ea | ||
|
|
6a09a7c398 | ||
|
|
068ccf1c0f | ||
|
|
2a1434ce49 | ||
|
|
1b59fb5be9 | ||
|
|
3fe54894a5 | ||
|
|
2fec2c2fa0 | ||
|
|
ea4dbeaeb9 | ||
|
|
d705ee70f0 | ||
|
|
448717a757 | ||
|
|
4b3224702c | ||
|
|
ab307ec7f3 | ||
|
|
3dc9f792e6 | ||
|
|
9faa1fadcb | ||
|
|
cb04b7f4e2 | ||
|
|
de94b48d77 | ||
|
|
aca83479be | ||
|
|
d5564b476f | ||
|
|
3693f36e59 | ||
|
|
17063df3cc | ||
|
|
f417d9aa19 | ||
|
|
b91b47fc46 | ||
|
|
13206fbb69 | ||
|
|
d3d896dccd | ||
|
|
e78b358643 | ||
|
|
b757048544 | ||
|
|
ebe41fca53 | ||
|
|
96ad3a04d4 | ||
|
|
43ba5842bb | ||
|
|
b8979a7253 | ||
|
|
813b851c10 | ||
|
|
c1d7bef768 | ||
|
|
434a237764 | ||
|
|
945cb3254c | ||
|
|
9c2edba12b | ||
|
|
477e035f2e | ||
|
|
f3d4d2782f | ||
|
|
4f03a86414 | ||
|
|
6336edbc18 | ||
|
|
1f550be949 | ||
|
|
5f5bb8541b | ||
|
|
70ce5d0a32 | ||
|
|
5f22e7da1f | ||
|
|
2c28f8cdca | ||
|
|
4244ed33c8 | ||
|
|
0c62ab0683 | ||
|
|
aaa98e13b6 | ||
|
|
0ff99dddb9 | ||
|
|
2b22d11923 | ||
|
|
fa2e074b94 | ||
|
|
e75acce6ed | ||
|
|
390589fae2 | ||
|
|
ae41fa3011 | ||
|
|
b693c7515c | ||
|
|
3119d15de5 | ||
|
|
b99e7ed97e | ||
|
|
f32c34c726 | ||
|
|
15cb1d8980 | ||
|
|
99259d246b | ||
|
|
8c7076c04a | ||
|
|
81bae9a844 | ||
|
|
d7ce7271fe | ||
|
|
5ab64987eb | ||
|
|
dc46148ab6 | ||
|
|
1b1bbcecec | ||
|
|
ebad9cb502 | ||
|
|
d8005af8ee | ||
|
|
ba3ba0aca7 | ||
|
|
880893914a | ||
|
|
07a21e921d | ||
|
|
2193f222c5 | ||
|
|
f36ea13cb2 | ||
|
|
595a1fe852 | ||
|
|
3334e6bf61 | ||
|
|
0a82b75f2a | ||
|
|
5b563e3841 | ||
|
|
3a98ce775a | ||
|
|
8c27a10612 | ||
|
|
e3ce8048ad | ||
|
|
0d367d4b54 | ||
|
|
028ae7053b | ||
|
|
3f3b14130b | ||
|
|
820051d40b | ||
|
|
97499aaa75 | ||
|
|
cc89bc02f4 | ||
|
|
4cbcee7e15 | ||
|
|
ab47c2c734 | ||
|
|
3e6ec19fd4 | ||
|
|
5719e437ef | ||
|
|
dba7062fbc | ||
|
|
6786ffffac | ||
|
|
16320d66f2 | ||
|
|
ec6dd35098 | ||
|
|
f5f4689e7c | ||
|
|
259d330822 | ||
|
|
79be106711 | ||
|
|
5ccd3e2f84 | ||
|
|
4f8d22796a | ||
|
|
5be5ee9b1b | ||
|
|
679f2aa262 | ||
|
|
58034c24ce | ||
|
|
1ac9b50f21 | ||
|
|
af9dc4c277 | ||
|
|
f817a8ca3c | ||
|
|
97e5a04621 | ||
|
|
30294740bd | ||
|
|
6bf4edade1 | ||
|
|
de1e762528 | ||
|
|
d043ee9407 | ||
|
|
14766ad38e | ||
|
|
dfcf68f21d | ||
|
|
433070a135 | ||
|
|
d89c77af54 | ||
|
|
2d7d70b90f | ||
|
|
5dc905e35d | ||
|
|
660d44b6ad | ||
|
|
d3d39b3e17 | ||
|
|
3fd2f87b15 | ||
|
|
9e7a0f8d83 | ||
|
|
6703f0514d | ||
|
|
02f4eb53dd | ||
|
|
04597b3f54 | ||
|
|
4379f58213 | ||
|
|
2906d643a5 | ||
|
|
bec9504cd8 | ||
|
|
7a8889e7f5 | ||
|
|
8054f6a2d1 | ||
|
|
f2595917c6 | ||
|
|
55b40aeb65 | ||
|
|
4f2bca08f1 | ||
|
|
48d83a4dc4 | ||
|
|
2362f9574e | ||
|
|
8ba9b8d7bd | ||
|
|
c6d4b5cc86 | ||
|
|
0db55b6194 | ||
|
|
59da4a0b3b | ||
|
|
9192acf250 | ||
|
|
7e0f1a57af | ||
|
|
f5cd3d853d | ||
|
|
04d25dc58a | ||
|
|
56c83cefe9 | ||
|
|
6850a4d25d | ||
|
|
ad2065afc7 | ||
|
|
9710771f16 | ||
|
|
ea1164322b | ||
|
|
bee501da4a | ||
|
|
57b5b4c376 | ||
|
|
2c3d3d0fe3 | ||
|
|
d0d995da09 | ||
|
|
a3fe1efa2b | ||
|
|
e84d0dd6b3 | ||
|
|
0f3bf488f2 | ||
|
|
04ca85fe89 | ||
|
|
962b316bcf | ||
|
|
ae3b9617b6 | ||
|
|
3c554953a2 | ||
|
|
0b1044b46f | ||
|
|
ba56535fa6 | ||
|
|
2e0e8ba705 | ||
|
|
c283f0996d | ||
|
|
232f980c72 | ||
|
|
4a72c37fc1 | ||
|
|
dcfad7d61b | ||
|
|
9e892bddf0 | ||
|
|
109dd3240a | ||
|
|
c198242292 | ||
|
|
5bd5713a52 | ||
|
|
5c39a3d004 | ||
|
|
4e607876ea | ||
|
|
011fdf7ab6 | ||
|
|
708ddf5387 | ||
|
|
c75aa23092 | ||
|
|
6a2a6afc1d | ||
|
|
7c241fd617 | ||
|
|
c14a90a3d0 | ||
|
|
528f54153c | ||
|
|
ae7f18f981 | ||
|
|
f5f07bd552 | ||
|
|
bbbd3320ef | ||
|
|
32450640e2 | ||
|
|
3f96afba73 | ||
|
|
9e789e6d82 | ||
|
|
e1d078f2c1 | ||
|
|
563d5e3ad8 | ||
|
|
16fd256225 | ||
|
|
f3f79792ab | ||
|
|
ed68a4bb9a | ||
|
|
02a89c752b | ||
|
|
b9b63f695d | ||
|
|
7293ee0894 | ||
|
|
da41d99aa7 | ||
|
|
26e4809e2e | ||
|
|
e9e2fa2927 | ||
|
|
5b2715515f | ||
|
|
a8c90e2365 | ||
|
|
f66968ae02 | ||
|
|
a5fd40ed80 | ||
|
|
8cc4dae00b | ||
|
|
2c5f19253c | ||
|
|
09f2950256 | ||
|
|
65307c463a | ||
|
|
2ae0ef94de | ||
|
|
138e720695 | ||
|
|
12e28c1895 | ||
|
|
1a106e5c38 | ||
|
|
15c52cacfb | ||
|
|
4a93d205bb | ||
|
|
a146a24c06 | ||
|
|
6347b9b0d7 | ||
|
|
fea6b970e5 | ||
|
|
afeaa9fecd | ||
|
|
7e82626b17 | ||
|
|
9b9aab27ca | ||
|
|
31e5271921 | ||
|
|
8c977e37a9 | ||
|
|
9c8134f8bc | ||
|
|
36ec15be8f | ||
|
|
c29bf31f2b | ||
|
|
5b1727bcba | ||
|
|
7a14054057 | ||
|
|
481a4b0f1b | ||
|
|
fe34c8f2e0 | ||
|
|
923e18b2b9 | ||
|
|
596dfc304d | ||
|
|
47c6c6e1b7 | ||
|
|
afc52f57a2 | ||
|
|
0786977bcf | ||
|
|
181542249d | ||
|
|
0d165e67c9 | ||
|
|
978b12b0dc | ||
|
|
2ca849c0c4 | ||
|
|
3991d9dc06 | ||
|
|
60ae4afe87 | ||
|
|
4ed54f6aa9 | ||
|
|
17004ce3ae | ||
|
|
596ce18aeb | ||
|
|
5c9ef9d9da | ||
|
|
ad13fd0542 | ||
|
|
26eb6e559e | ||
|
|
946afccbb5 | ||
|
|
ea8307f79e | ||
|
|
83d798dbad | ||
|
|
7f5f6763ea | ||
|
|
8828db4fce | ||
|
|
56d8fe4a22 | ||
|
|
b6428b08d0 | ||
|
|
9223c95481 | ||
|
|
a3925e8aa0 | ||
|
|
7c8fa067a2 | ||
|
|
f2e9ec63df | ||
|
|
b97361b193 | ||
|
|
22652889b2 | ||
|
|
04a970eda2 | ||
|
|
f443b7087c | ||
|
|
9cb1170361 | ||
|
|
dda760b9b5 | ||
|
|
ca4124940a | ||
|
|
90cd10e034 | ||
|
|
c1d4acc01e | ||
|
|
81531a29eb | ||
|
|
7d4c319fcb | ||
|
|
ca37b24455 | ||
|
|
f6c07fdabd | ||
|
|
ed0fabd9de | ||
|
|
0767992662 | ||
|
|
a88187023d | ||
|
|
57dc12ee17 | ||
|
|
c9e5da48a4 | ||
|
|
7d69885e06 | ||
|
|
55ba8ad0c4 | ||
|
|
6263ec1d38 | ||
|
|
1d86d4b94f | ||
|
|
8aef2c13ec | ||
|
|
8722ee8ad6 | ||
|
|
1a44036148 | ||
|
|
e65a6cc063 | ||
|
|
29607d8951 | ||
|
|
366a9f71ad | ||
|
|
79f798f67b | ||
|
|
2c16b0f0f3 | ||
|
|
4df6668416 | ||
|
|
0140a00884 | ||
|
|
2a31f16b76 | ||
|
|
793eda7640 | ||
|
|
8230279932 | ||
|
|
a6df991b76 | ||
|
|
9cc0be0fc2 | ||
|
|
0d0d802526 | ||
|
|
71f09ddd94 | ||
|
|
17f1224a5d | ||
|
|
441e2ad2f3 | ||
|
|
39a6bf9784 | ||
|
|
8d130421c4 | ||
|
|
add4adeec9 | ||
|
|
e165e2af95 | ||
|
|
088ad881e0 | ||
|
|
c5afcd8e09 | ||
|
|
5c795e9a50 | ||
|
|
cd8f938cab | ||
|
|
2de6f3434c | ||
|
|
5269422f7c | ||
|
|
c9f06e1da1 | ||
|
|
c4fe9d50bf | ||
|
|
2c5828528f | ||
|
|
b1a95cacff | ||
|
|
20b64d9b7e | ||
|
|
ab55412264 | ||
|
|
f82cb3aefe | ||
|
|
48e5a82e7f | ||
|
|
727fc9d3a6 | ||
|
|
51c640d409 | ||
|
|
7ab83bf47f | ||
|
|
9818f81c5f | ||
|
|
8f4e440179 | ||
|
|
83bf68ecad | ||
|
|
23bbc5160e | ||
|
|
ed6d50d6ba | ||
|
|
b3a1bb2017 | ||
|
|
a0a4b04007 | ||
|
|
ddfb36b511 | ||
|
|
28f7806d19 | ||
|
|
72f6dc1e0c | ||
|
|
9a45a25e17 | ||
|
|
9cadf0aa97 | ||
|
|
397dfc54cd | ||
|
|
eeba128f8a | ||
|
|
396b687e4b | ||
|
|
cbe72ba9f4 | ||
|
|
85c1c97ea2 | ||
|
|
503e04d375 | ||
|
|
4e5d5cd1a9 | ||
|
|
9cdd9e6d5f | ||
|
|
f8a10fa7af | ||
|
|
e3764ad951 | ||
|
|
edb7722ad6 | ||
|
|
39ee394068 | ||
|
|
f4e26bba3f | ||
|
|
102c896046 | ||
|
|
e5fbbe96fd | ||
|
|
8e1b753664 | ||
|
|
b6d5f49c0f | ||
|
|
2cc9515fd6 | ||
|
|
0f63c04beb | ||
|
|
861e45aedf | ||
|
|
10820f31c2 | ||
|
|
3ab7b5c3f7 | ||
|
|
076905d14e | ||
|
|
d5471c153a | ||
|
|
69e7279cff | ||
|
|
9e58921b23 | ||
|
|
7fa6527ca8 | ||
|
|
2fa8096569 | ||
|
|
ff47eccf4b | ||
|
|
5a6c04e3b0 | ||
|
|
896e6d4662 | ||
|
|
d8db5189c1 | ||
|
|
525179eb85 | ||
|
|
dace35d31d | ||
|
|
3752379106 | ||
|
|
048b26a0fd | ||
|
|
07ec00641f | ||
|
|
60541a455f | ||
|
|
882ee4d49f | ||
|
|
f0b13153ac | ||
|
|
df51be02bf | ||
|
|
96798ac70c | ||
|
|
a28e4de3bd | ||
|
|
ac73e78b3a | ||
|
|
613e209af3 | ||
|
|
1ac3355b03 | ||
|
|
5e4b026668 | ||
|
|
9d5979c01d | ||
|
|
a712f1a231 | ||
|
|
42a5aefb23 | ||
|
|
bc673fd969 | ||
|
|
a127f2d572 | ||
|
|
16acdfc7c3 | ||
|
|
858d7be8cf | ||
|
|
1ca4ecc241 | ||
|
|
bd470abcf0 | ||
|
|
dfef4727b6 | ||
|
|
3e5223b98d | ||
|
|
0fe83d5198 | ||
|
|
6b6b97c8d8 | ||
|
|
5c3e366192 | ||
|
|
68a6a1bc15 | ||
|
|
7fef460fa2 | ||
|
|
4b7d802a9d | ||
|
|
04ec222462 | ||
|
|
97758380e0 | ||
|
|
b67f4dc390 | ||
|
|
ad3d15e28d | ||
|
|
1e3970c6e5 | ||
|
|
706481b1a3 | ||
|
|
66d9c4f1af | ||
|
|
cd17278c64 | ||
|
|
f424abf87c | ||
|
|
8a5c91021d | ||
|
|
ddc32d3cad | ||
|
|
26535e163f | ||
|
|
cefdd86634 | ||
|
|
56aa8ae6e0 | ||
|
|
aa613bc767 | ||
|
|
d272b0f56d | ||
|
|
6bc524dff8 | ||
|
|
496e5bdad9 | ||
|
|
f00e5936c9 | ||
|
|
6ea35fce66 | ||
|
|
3da90e09d5 | ||
|
|
20c6925470 | ||
|
|
465c72d98e | ||
|
|
59ade61687 | ||
|
|
1ddc2e68bf | ||
|
|
fcba5a6eeb | ||
|
|
2f7770b165 | ||
|
|
04d03f73b3 | ||
|
|
740709da04 | ||
|
|
2feab76a6f | ||
|
|
d95c5e6674 | ||
|
|
1d6413bfae | ||
|
|
3b5c813be7 | ||
|
|
e02e60171e | ||
|
|
b25bf363b3 | ||
|
|
9d0982f2f7 | ||
|
|
25316cbe0e | ||
|
|
cf5595bd64 | ||
|
|
12f2ca9262 | ||
|
|
5631d52f50 | ||
|
|
a7a8ea3077 | ||
|
|
1cff564483 | ||
|
|
c75e669204 | ||
|
|
1758412a76 | ||
|
|
aa261bbe23 | ||
|
|
017eab8dcd | ||
|
|
7e26d7a4bf | ||
|
|
c6e110100a | ||
|
|
a5e450a0dd | ||
|
|
757b185398 | ||
|
|
257519490a | ||
|
|
4572747bd6 | ||
|
|
6e5ef561eb | ||
|
|
95305e7e11 | ||
|
|
951ce0a102 | ||
|
|
11b74baf79 | ||
|
|
c0353ab5d8 | ||
|
|
9fa0a4c973 | ||
|
|
db371d2a5d | ||
|
|
01cce09ef2 | ||
|
|
d2f21bc93e | ||
|
|
5ba2423626 | ||
|
|
a6b0856020 | ||
|
|
9a641ee7c0 | ||
|
|
ab7e2f89fb | ||
|
|
71ab8d6afc | ||
|
|
ea5da627af | ||
|
|
1c8ef716a2 | ||
|
|
6fc451da9e | ||
|
|
b940f4a97d | ||
|
|
c2c81e7e6a | ||
|
|
1508d0ac23 | ||
|
|
d3c37bda71 | ||
|
|
c82e2d74bf | ||
|
|
1a11b400b0 | ||
|
|
1d8bb45d3a | ||
|
|
fc9de68ee9 | ||
|
|
1fb9bbba91 | ||
|
|
cd4fc78aec | ||
|
|
4e6a04923a | ||
|
|
83899eb884 | ||
|
|
3744bc3228 | ||
|
|
a0c8d3fa6f | ||
|
|
d3c13a1d8c | ||
|
|
3c8c53194f | ||
|
|
8ed9212401 | ||
|
|
5cef73331c | ||
|
|
5760fa104c | ||
|
|
4786e0f882 | ||
|
|
3bf7f6ed5f | ||
|
|
9e6df378c3 | ||
|
|
9667f324f1 | ||
|
|
eec178458b | ||
|
|
4ea5d80099 | ||
|
|
d9a33680a6 | ||
|
|
0a695ba17a | ||
|
|
afb847acc8 | ||
|
|
8bb9126b77 | ||
|
|
2379c5b770 | ||
|
|
581efa857b | ||
|
|
590c3b4b50 | ||
|
|
e750508d76 | ||
|
|
efe7279ead | ||
|
|
90925273a0 | ||
|
|
f1996a9f1f | ||
|
|
e411b8e423 | ||
|
|
d8e5be5782 | ||
|
|
7d70ffe201 | ||
|
|
a64a38d7dd | ||
|
|
8bfed7508c | ||
|
|
68a77c4051 | ||
|
|
38f237efcb | ||
|
|
2daffe2a15 | ||
|
|
1f987c1903 | ||
|
|
9a29b04561 | ||
|
|
1c5f902770 | ||
|
|
3be11b9861 | ||
|
|
c970e82758 | ||
|
|
a0036179d5 | ||
|
|
1532eb4278 | ||
|
|
64f98d2409 | ||
|
|
ed879df1f4 | ||
|
|
2b05dac071 | ||
|
|
507bff8b59 | ||
|
|
3912ed5023 | ||
|
|
984ece646f | ||
|
|
22156fe309 | ||
|
|
9637efd5ee | ||
|
|
52992928d5 | ||
|
|
aa9ca68883 | ||
|
|
aaa717aac2 | ||
|
|
4ece7fc038 | ||
|
|
9feb8a73fd | ||
|
|
d3cc6e518d | ||
|
|
f858f6b621 | ||
|
|
500e000661 | ||
|
|
4c73d970e5 | ||
|
|
becdaafdca | ||
|
|
3887aa72cb | ||
|
|
72d7a7d91d | ||
|
|
e1f410d32e | ||
|
|
cd5843e977 | ||
|
|
af3aa81d71 | ||
|
|
04bbdbad12 | ||
|
|
9d1dacb8d4 | ||
|
|
f3bb2d41a7 | ||
|
|
804bff55ec | ||
|
|
5b93e09714 | ||
|
|
fdfcc3ab2a | ||
|
|
cf344abff2 | ||
|
|
5ec07db143 | ||
|
|
67b935188b | ||
|
|
0dfd09ed0e | ||
|
|
6f63d63ee0 | ||
|
|
961ebbde6b | ||
|
|
50a5355de0 | ||
|
|
f25cffd24e | ||
|
|
5e18afe916 | ||
|
|
733a9af629 | ||
|
|
166ce7d2ae | ||
|
|
5833867b44 | ||
|
|
6cc1502c89 | ||
|
|
ced5e5500e | ||
|
|
7b17e38f5d | ||
|
|
e58cd91487 | ||
|
|
10fd66b7c5 | ||
|
|
9f4ea7301a | ||
|
|
14e1bc8702 | ||
|
|
cc21c66b3a | ||
|
|
1e9c51072a | ||
|
|
4544f79471 | ||
|
|
a3da11c5bc | ||
|
|
66631da1c7 | ||
|
|
d6f9ff34ce | ||
|
|
a1e39ce24d | ||
|
|
c39f9ed2f3 | ||
|
|
3a27b610d5 | ||
|
|
3dc30e4d8b | ||
|
|
1a49d51d42 | ||
|
|
0b46196ff5 | ||
|
|
ea6cea6f29 | ||
|
|
93fb02509b | ||
|
|
09a0ef2013 | ||
|
|
ccbe055e5b | ||
|
|
8e65f36131 | ||
|
|
b66894f727 | ||
|
|
dc16c8c60c | ||
|
|
92c3d80189 | ||
|
|
68cc3f86dd | ||
|
|
1d769fe41c | ||
|
|
4edf0c9768 | ||
|
|
d562dcd90c | ||
|
|
f99e1ba441 | ||
|
|
1b9b8ba2bf | ||
|
|
16f7f68636 | ||
|
|
c1272de462 | ||
|
|
31cf8812f6 | ||
|
|
a81a5315e1 | ||
|
|
f332f22bed | ||
|
|
08ae183877 | ||
|
|
c4ce553936 | ||
|
|
6f987253ac | ||
|
|
1f1244f285 | ||
|
|
3cf775a1f6 | ||
|
|
979349388c | ||
|
|
cdcf88cbdf | ||
|
|
d573ee22f1 | ||
|
|
18ff1569b9 | ||
|
|
3226a3a58e | ||
|
|
164435f71d | ||
|
|
6d814af0cc | ||
|
|
ffbbb5bc9e | ||
|
|
d3e11cabd5 | ||
|
|
35326e1d92 | ||
|
|
9d629f2780 | ||
|
|
1ae52d2472 | ||
|
|
37ba2511d5 | ||
|
|
bcdbec61d7 | ||
|
|
d03949a735 | ||
|
|
63fa9f4535 | ||
|
|
3f266a3e1b | ||
|
|
a5c8bbfe1f | ||
|
|
584f40d0c4 | ||
|
|
72974719ac | ||
|
|
d4c3463f68 | ||
|
|
47e4b77140 | ||
|
|
3e07605260 | ||
|
|
49b18e17d7 | ||
|
|
7c6f0ad445 | ||
|
|
0e3f91508e | ||
|
|
34427f34e8 | ||
|
|
4131b75562 | ||
|
|
62af885c09 | ||
|
|
344812f1e0 | ||
|
|
d06174fb56 | ||
|
|
72d9532207 | ||
|
|
ab052a5ed3 | ||
|
|
6ebf766342 | ||
|
|
d1b31bb3d6 | ||
|
|
19c70a126f | ||
|
|
4cc8a9bd18 | ||
|
|
9be53f0a79 | ||
|
|
f25a415a9e | ||
|
|
5feed2344a | ||
|
|
9ba34aabab | ||
|
|
0b855aeac9 | ||
|
|
176c042cb6 | ||
|
|
db4ab8d8a7 | ||
|
|
836f48b24c | ||
|
|
9e01740193 | ||
|
|
b77a2d0f42 | ||
|
|
c83b0f2680 | ||
|
|
5a3d3f5098 | ||
|
|
30c7cb80f9 | ||
|
|
a71e54113e | ||
|
|
dff19db58d | ||
|
|
31520844b4 | ||
|
|
282b3b3170 | ||
|
|
55bc150701 | ||
|
|
fc88429282 | ||
|
|
ee443d91dd | ||
|
|
318338f967 | ||
|
|
29c9330965 | ||
|
|
7c72705bc5 | ||
|
|
ad4cf373a6 | ||
|
|
33e3fc70b2 | ||
|
|
9c3cd87bee | ||
|
|
f8ddfec98c | ||
|
|
53ff171436 | ||
|
|
75c77a44c9 | ||
|
|
11b44f2b01 | ||
|
|
d1e33ec071 | ||
|
|
39f3cfc1eb | ||
|
|
2f811375bb | ||
|
|
8d7390e765 | ||
|
|
8268c65c57 | ||
|
|
f9b0a01c74 | ||
|
|
0a4ccdcef2 | ||
|
|
2b02c87b16 | ||
|
|
e738a67e06 | ||
|
|
b2f70c12b9 | ||
|
|
c4a48b35c0 | ||
|
|
9cc47b6b58 | ||
|
|
41083b08ce | ||
|
|
8d6b09e995 | ||
|
|
599a77c776 | ||
|
|
d7e796097a | ||
|
|
79e5dd3794 | ||
|
|
ef90f1dca6 | ||
|
|
aafe2c5b98 | ||
|
|
e9fcca16bd | ||
|
|
c04a2ababd | ||
|
|
8faa806c90 | ||
|
|
164d11c816 | ||
|
|
27b11b1d79 | ||
|
|
d9386cc2c3 | ||
|
|
38f348e642 | ||
|
|
656ec9c48f | ||
|
|
ec98c201e4 | ||
|
|
47f226be3b | ||
|
|
f3132b4513 | ||
|
|
cfabccc5f2 | ||
|
|
f5e8f9334b | ||
|
|
7be2105fd9 | ||
|
|
49ee388dcf | ||
|
|
6cb4b4061c | ||
|
|
85e50ece2e | ||
|
|
32ae0ea13e | ||
|
|
a55a606a55 | ||
|
|
22bf20a135 | ||
|
|
5dcf6ff2d3 | ||
|
|
dede578c7d | ||
|
|
607b0c0c0e | ||
|
|
d5a59ac6b7 | ||
|
|
ccba95542b | ||
|
|
36c46112df | ||
|
|
1b3bddd622 | ||
|
|
e5811e29b1 | ||
|
|
525b367a6f | ||
|
|
a7c816c65e | ||
|
|
a2a0e0394d | ||
|
|
d69258e28f | ||
|
|
4e542d8b83 | ||
|
|
731bb6ba03 | ||
|
|
fed06ef97d | ||
|
|
ce947d4793 | ||
|
|
ad91093902 | ||
|
|
a58330f4d8 | ||
|
|
5a46c2397b | ||
|
|
adf4e72cf8 | ||
|
|
0e97030716 | ||
|
|
c7ed348ee8 | ||
|
|
27f07e9de2 | ||
|
|
adb1502e72 | ||
|
|
f79588c191 | ||
|
|
b70a3f0958 | ||
|
|
b3b096e204 | ||
|
|
f665a30d28 | ||
|
|
53b00d80d6 | ||
|
|
705455d5d6 | ||
|
|
973b9cad36 | ||
|
|
1663cbbb34 | ||
|
|
26ec858903 | ||
|
|
9f766557f1 | ||
|
|
6c351c724d | ||
|
|
373118c5ee | ||
|
|
a40c6e65dd | ||
|
|
29749809d2 | ||
|
|
846cf934f5 | ||
|
|
162eb4ca35 | ||
|
|
2f18444a43 | ||
|
|
8a4ff5bddc | ||
|
|
4ae7648bea | ||
|
|
dc75559758 | ||
|
|
acd944a649 | ||
|
|
3c1b30e3c1 | ||
|
|
5daefc8b8e | ||
|
|
715b9cbad0 | ||
|
|
7194e91d3b | ||
|
|
69d56b8ed7 | ||
|
|
982a5b1a39 | ||
|
|
6b8cb4ac7f | ||
|
|
397f253180 | ||
|
|
f93b6f7d85 | ||
|
|
edb2cf2cf2 | ||
|
|
fb12dd4688 | ||
|
|
0a561e5aeb | ||
|
|
c140c8cac9 | ||
|
|
06ab063671 | ||
|
|
1591a486cc | ||
|
|
b53efed1ef | ||
|
|
7a202db5ad | ||
|
|
af1ae7cab4 | ||
|
|
24519cbf78 | ||
|
|
01305462aa | ||
|
|
90ae59ccaf | ||
|
|
ff6a2d014a | ||
|
|
246f41b88a | ||
|
|
a799fac983 | ||
|
|
50164324f3 | ||
|
|
634d8c9978 | ||
|
|
e5eb0e3874 | ||
|
|
63273f4ccf | ||
|
|
7a4077405e | ||
|
|
9594effb6c | ||
|
|
e750498696 | ||
|
|
69e18905f5 | ||
|
|
ac4524cf9b | ||
|
|
7baad7ff10 | ||
|
|
d7ef6daeb8 | ||
|
|
d8d5516e24 | ||
|
|
93e3908a63 | ||
|
|
d7ee4b4573 | ||
|
|
bf5f6ce97c | ||
|
|
ed2ca5fced | ||
|
|
8dfe85f23e | ||
|
|
a492eceff0 | ||
|
|
be781bdb98 | ||
|
|
a8f915f049 | ||
|
|
e750080f00 | ||
|
|
79beefe57c | ||
|
|
fcdcd63dc7 | ||
|
|
846992930c | ||
|
|
a1d652d578 | ||
|
|
d0e057722b | ||
|
|
f04932aa67 | ||
|
|
d1be4e2a90 | ||
|
|
3cde783d1d | ||
|
|
e933369f56 | ||
|
|
e109f8d69c | ||
|
|
d198057eaf | ||
|
|
88c2f18b20 | ||
|
|
a30a604228 | ||
|
|
4eefa73441 | ||
|
|
00b4d233cc | ||
|
|
eb2c078898 | ||
|
|
4b5eadf7b5 | ||
|
|
9b68911d00 | ||
|
|
62b58d8bb0 | ||
|
|
a5951592f7 | ||
|
|
6392d6514e | ||
|
|
91d6641326 | ||
|
|
dd398f73c2 | ||
|
|
de10bd4ef6 | ||
|
|
56321da9c1 | ||
|
|
d0d1c5ea5f | ||
|
|
e16872c864 | ||
|
|
1e425244d2 | ||
|
|
0c6618d2f6 | ||
|
|
a9d7823186 | ||
|
|
6c0f5329aa | ||
|
|
a677a4feff | ||
|
|
4f674c8d19 | ||
|
|
e197163019 | ||
|
|
7ba0099fa9 | ||
|
|
f45797ec4b | ||
|
|
02e1ac12b2 | ||
|
|
bde138177d | ||
|
|
2423f470ef | ||
|
|
e5219af481 | ||
|
|
21a4f3cc05 | ||
|
|
a9d9939bdd | ||
|
|
41fb2b4cc4 | ||
|
|
526e232c09 | ||
|
|
f2ed3d3ff2 | ||
|
|
f5b1b192a0 | ||
|
|
a77c33d9b7 | ||
|
|
640c558446 | ||
|
|
c11ce99bb3 | ||
|
|
8ad83b8d58 | ||
|
|
873d3d7c4a | ||
|
|
d4adaaaf2b | ||
|
|
104493e725 | ||
|
|
b172e7afdc | ||
|
|
b1efbeb220 | ||
|
|
f2a6657b72 | ||
|
|
4eb4974909 | ||
|
|
3e2c898881 | ||
|
|
2fb176a244 | ||
|
|
53a6a7b305 | ||
|
|
2045380223 | ||
|
|
1c0fc3c924 | ||
|
|
0b966b7a28 | ||
|
|
882a477c0f | ||
|
|
ebcf2c3f68 | ||
|
|
54fafb3a76 | ||
|
|
01bd662046 | ||
|
|
98d934c8a4 | ||
|
|
9e26d3e85d | ||
|
|
17114778b7 | ||
|
|
98037ca0c6 | ||
|
|
4ec59e8211 | ||
|
|
1e6a5ff8ec | ||
|
|
c12d830162 | ||
|
|
a49a9b3b64 | ||
|
|
5da3da5962 | ||
|
|
8bb51d47f8 | ||
|
|
381b9ee7ee | ||
|
|
a301c96c9d | ||
|
|
f9c3cdab67 | ||
|
|
27ec0d532e | ||
|
|
3aa619b617 | ||
|
|
ef92272bee | ||
|
|
815ef05daf | ||
|
|
2ab19148c1 | ||
|
|
d12f4a4aee | ||
|
|
81b1939f92 | ||
|
|
834daeecd0 | ||
|
|
6aa0208316 | ||
|
|
aa87d8eb22 | ||
|
|
cd21fa7016 | ||
|
|
7ba4f6b93f | ||
|
|
e16a51ad06 | ||
|
|
6861dc137f | ||
|
|
e530e4d4bc | ||
|
|
d150bc1e52 | ||
|
|
cc8961360a | ||
|
|
c0539e483e | ||
|
|
f0b7099be3 | ||
|
|
ee183d4574 | ||
|
|
fa813024ca | ||
|
|
37176fa42d | ||
|
|
7ff8931def | ||
|
|
2a962bf8fd | ||
|
|
bbbcba8b98 | ||
|
|
ecdcd10612 | ||
|
|
10ea140358 | ||
|
|
74e0309241 | ||
|
|
c88bfbbf82 | ||
|
|
4edb89eeb9 | ||
|
|
cdb4b3cc7d | ||
|
|
ed8dd03fa1 | ||
|
|
e5bb7f7c2d | ||
|
|
a7b0f6dc9f | ||
|
|
c42986c07d | ||
|
|
a982dd1765 | ||
|
|
c3900398fc | ||
|
|
4b79a5e9da | ||
|
|
eed2feea97 | ||
|
|
60a2d9f624 | ||
|
|
3cd33b6ffc | ||
|
|
e3942b3438 | ||
|
|
0bf37b8c00 | ||
|
|
0e5dbf3889 | ||
|
|
785f96aabe | ||
|
|
5cec936128 | ||
|
|
02861142cb | ||
|
|
1cfc4d2f31 | ||
|
|
08e816a539 | ||
|
|
272cf64aac | ||
|
|
ed57a4099b | ||
|
|
79c5d48a3c | ||
|
|
6c70122e55 | ||
|
|
6c83699e6f | ||
|
|
ff254ce08d | ||
|
|
8a80ea26b8 | ||
|
|
b85fe62389 | ||
|
|
31a4d9204c | ||
|
|
69fdfd5cb3 | ||
|
|
16e7980982 | ||
|
|
e3e08cf8e7 | ||
|
|
cae6626b06 | ||
|
|
956d93e871 | ||
|
|
7c4d1b7b01 | ||
|
|
9866e0851b | ||
|
|
0a97a2435b | ||
|
|
5c80f03eae | ||
|
|
dd03a4b011 | ||
|
|
6cd1bc32fe | ||
|
|
dd0193a9a8 | ||
|
|
123faa6f8e | ||
|
|
f743288ce0 | ||
|
|
a1d764bd26 | ||
|
|
61f6bd2c80 | ||
|
|
fe620d8e44 | ||
|
|
22db28d3e7 | ||
|
|
1330488e13 | ||
|
|
22297be3cf | ||
|
|
29d7d6994a | ||
|
|
87e8162a2d | ||
|
|
38b71bf386 | ||
|
|
920689b80e | ||
|
|
e9c7523646 | ||
|
|
88bbc720ca | ||
|
|
1bf1469c80 | ||
|
|
c74eda20dc | ||
|
|
db0a5bd537 | ||
|
|
c6cb01aa3b | ||
|
|
7463f91878 | ||
|
|
482b31298f | ||
|
|
ce46ca2f39 | ||
|
|
de00d18a7e | ||
|
|
b0cf0c558d | ||
|
|
920e5c93e1 | ||
|
|
0aae78c6bd | ||
|
|
7fb048f423 | ||
|
|
e86207bb28 | ||
|
|
d7cd2b970e | ||
|
|
450d242d5f | ||
|
|
2783196547 | ||
|
|
e84f06b503 | ||
|
|
75d60ccb69 | ||
|
|
9245cd6aae | ||
|
|
d8183b60c3 | ||
|
|
a24272690d | ||
|
|
40b088d6a2 | ||
|
|
f1125d64de | ||
|
|
0cba818364 | ||
|
|
4285c751b3 | ||
|
|
1b0cddfa72 | ||
|
|
231a599f09 | ||
|
|
7ffc4d388a | ||
|
|
1f24171238 | ||
|
|
24917a6df5 | ||
|
|
67fde17209 | ||
|
|
6abad666db | ||
|
|
222319d924 | ||
|
|
2d3f396571 | ||
|
|
201e1d3e6d | ||
|
|
2c72831be1 | ||
|
|
a4204880e8 | ||
|
|
51bcbdac75 | ||
|
|
5d120de70e | ||
|
|
e63889d5c4 | ||
|
|
fe6a7c58bf | ||
|
|
5f1b4e6183 | ||
|
|
1e81f8ecfc | ||
|
|
dac3cb15c4 | ||
|
|
ca654ccaf7 | ||
|
|
30512b7032 | ||
|
|
bc8fd62cff | ||
|
|
a9a51ee3c6 | ||
|
|
28c5fc48ee | ||
|
|
2d2da7c881 | ||
|
|
d22d8c4905 | ||
|
|
bcdc8eafa6 | ||
|
|
9cf6ace979 | ||
|
|
eef063cec2 | ||
|
|
8b9bdf9054 | ||
|
|
05f9e5eef1 | ||
|
|
eb28e63c08 | ||
|
|
1c7b898b01 | ||
|
|
7ad18da08e | ||
|
|
5530915b49 | ||
|
|
4a73e2d0e9 | ||
|
|
b499bdea3b | ||
|
|
de1de852fc | ||
|
|
77f380c94b | ||
|
|
c34d2f91cc | ||
|
|
ee42ea5f3b | ||
|
|
c79ab84fdf | ||
|
|
d77448d84e | ||
|
|
8fc5a2785f | ||
|
|
27da2b026f | ||
|
|
2e9cc2a74e | ||
|
|
ffd370176d | ||
|
|
44f2a375f6 | ||
|
|
05cb97819c | ||
|
|
282c834d9f | ||
|
|
4a6ff9e2aa | ||
|
|
ce972d4f19 | ||
|
|
b250d10320 | ||
|
|
8a2541c220 | ||
|
|
6991e5f3ef | ||
|
|
f63706d118 | ||
|
|
c41aa64719 | ||
|
|
285d246c65 | ||
|
|
2c85205259 | ||
|
|
2d866b9298 | ||
|
|
7a7629acf7 | ||
|
|
debf820037 | ||
|
|
f908ae8c40 | ||
|
|
022cbdda31 | ||
|
|
ae2523aa59 | ||
|
|
bf361d2b02 | ||
|
|
64f3303711 | ||
|
|
06f382c454 | ||
|
|
5f164d99ac | ||
|
|
2473ae3b47 | ||
|
|
3fb457ccd1 | ||
|
|
51333c9eda | ||
|
|
1aaf3961ff | ||
|
|
af4f3f62e9 | ||
|
|
cc31a12b8c | ||
|
|
bc9989f9be | ||
|
|
7f33bec71c | ||
|
|
3ea94c3484 | ||
|
|
68adaea128 | ||
|
|
4997068a0d | ||
|
|
4c59ec815e | ||
|
|
440ea666d9 | ||
|
|
6f1a6d5a56 | ||
|
|
69e80fd11c | ||
|
|
bef8cc2d70 | ||
|
|
eaa899e9cf | ||
|
|
743c95d0f9 | ||
|
|
a08cb52ad9 | ||
|
|
b9604bf3bc | ||
|
|
978a345ad8 | ||
|
|
d5ffe6acef | ||
|
|
622c1a1dad | ||
|
|
79fea549ef | ||
|
|
bce6e75cfa | ||
|
|
34f36fff5c | ||
|
|
f4e24038fe | ||
|
|
81747e1623 | ||
|
|
d6f1c379c0 | ||
|
|
6794260e3f | ||
|
|
1be840f19d | ||
|
|
f59bb6461a | ||
|
|
bd3bae3af0 | ||
|
|
0fbace7285 | ||
|
|
af8fec941c | ||
|
|
139791b0d8 | ||
|
|
7fe76d32d0 | ||
|
|
352ad3385a | ||
|
|
010baad532 | ||
|
|
69d2a8f4fa | ||
|
|
3bba8b2c26 | ||
|
|
d584076b93 | ||
|
|
b0b77d667c | ||
|
|
071ac0dc85 | ||
|
|
ed8f5dbd22 | ||
|
|
5dfeddf583 | ||
|
|
cd9306df45 | ||
|
|
e0dc530e94 | ||
|
|
af52b20c4a | ||
|
|
e82d171041 | ||
|
|
19a35673fa | ||
|
|
639a44d996 | ||
|
|
4f261389db | ||
|
|
6003fee33f | ||
|
|
b56c3eb035 | ||
|
|
015c929bd7 | ||
|
|
992efda7f6 | ||
|
|
32036d017e | ||
|
|
40ff57d8c4 | ||
|
|
cdabe50320 | ||
|
|
c62ee78cba | ||
|
|
a19a2c70ab | ||
|
|
40a491a80b | ||
|
|
a9a06ad51d | ||
|
|
c65b7d1591 | ||
|
|
c287405471 | ||
|
|
06ec91c899 | ||
|
|
10f9022d7c | ||
|
|
e78c48620f | ||
|
|
563dd898c1 | ||
|
|
a97bcc3ca7 | ||
|
|
fa31fc046e | ||
|
|
a68a179c1e | ||
|
|
77b0d36b55 | ||
|
|
1705734435 | ||
|
|
e5fd35db34 | ||
|
|
158b708eac | ||
|
|
aa47eeffb2 | ||
|
|
df00ac736f | ||
|
|
f2a6fc4d5a | ||
|
|
c9501d1119 | ||
|
|
323ff3d491 | ||
|
|
98b3126e32 | ||
|
|
88ea524f44 | ||
|
|
a0ab9113fc | ||
|
|
31398718e5 | ||
|
|
4dfe8b6f69 | ||
|
|
6dae8f44b9 | ||
|
|
4f798cfe56 | ||
|
|
d8ea970f18 | ||
|
|
ecafc7bf8f | ||
|
|
bba3f3000f | ||
|
|
4b382e0faf | ||
|
|
2380a26987 | ||
|
|
d40e21a7e0 | ||
|
|
94d2ae2a6a | ||
|
|
3099198e47 | ||
|
|
48e9b5f4be | ||
|
|
430e2e439b | ||
|
|
81a21c03b2 | ||
|
|
064a97e734 | ||
|
|
5e9ed95684 | ||
|
|
017d5617a5 | ||
|
|
26bb2e0193 | ||
|
|
ff91430fcc | ||
|
|
32a41a8422 | ||
|
|
8b93e20a0b | ||
|
|
92bec31ccb | ||
|
|
f133a9de79 | ||
|
|
9b2476451e | ||
|
|
b876d193c4 | ||
|
|
3944c37627 | ||
|
|
14080d1531 | ||
|
|
96a0d0aefa | ||
|
|
15f2b2cf9a | ||
|
|
bf9eaea334 | ||
|
|
7b45a2ec51 | ||
|
|
48eb2083f2 | ||
|
|
2c6ea276c1 | ||
|
|
5a3db0505f | ||
|
|
6ca73f6df0 | ||
|
|
762dab618a | ||
|
|
a65c61442e | ||
|
|
4883b2a296 | ||
|
|
b1abe72ab6 | ||
|
|
a640d55297 | ||
|
|
99009a11ed | ||
|
|
2150bbf191 | ||
|
|
6f8cb743b5 | ||
|
|
682d2a1675 | ||
|
|
6022784e42 | ||
|
|
4d4478d969 | ||
|
|
c0935c84ee | ||
|
|
17e040abe4 | ||
|
|
6152a5e3c2 | ||
|
|
4558486cbd | ||
|
|
3df592c702 | ||
|
|
05fabc73c2 | ||
|
|
e949761107 | ||
|
|
4311a20c5f | ||
|
|
1440d1a147 | ||
|
|
4b170ca9a3 | ||
|
|
d6e844c67c | ||
|
|
71a307270a | ||
|
|
0f88b470e8 | ||
|
|
4798aa4789 | ||
|
|
a9d96ccc8c | ||
|
|
e0c9ddbfba | ||
|
|
bbc5dae1d2 | ||
|
|
9e7c55728f | ||
|
|
a6fa01f89b | ||
|
|
6c71754e51 | ||
|
|
e729b3734d | ||
|
|
6337c77532 | ||
|
|
fb08b71884 | ||
|
|
d749549135 | ||
|
|
58a2ab4fbd | ||
|
|
cc96cfe0c7 | ||
|
|
656b3e53a8 | ||
|
|
4e5dcafa1b | ||
|
|
afc8380f23 | ||
|
|
0319051891 | ||
|
|
4805a3bc23 | ||
|
|
dc63f0ddd0 | ||
|
|
6ff188e4d9 | ||
|
|
44edaad19d | ||
|
|
5304221e46 | ||
|
|
c6b1fe5349 | ||
|
|
80574334cf | ||
|
|
dd4eaa0758 | ||
|
|
93550e9ea5 | ||
|
|
3157fc651d | ||
|
|
dd4f27e3fa | ||
|
|
455e80513b | ||
|
|
65af872ec6 | ||
|
|
917cd35005 | ||
|
|
b989183fce | ||
|
|
0845d5d451 | ||
|
|
f002ef105e | ||
|
|
3d202c2ef9 | ||
|
|
953eec7326 | ||
|
|
c3956b4d6f | ||
|
|
d2421bb216 | ||
|
|
afdc19ce9d | ||
|
|
4a9380cc95 | ||
|
|
9f60745e57 | ||
|
|
666d640216 | ||
|
|
cb479d737b | ||
|
|
4ee455fad2 | ||
|
|
2da04e72f5 | ||
|
|
141ea7ba91 | ||
|
|
5ae0239c26 | ||
|
|
2dc4434a49 | ||
|
|
39c068bd53 | ||
|
|
b26dfd8246 | ||
|
|
3185db9609 | ||
|
|
61a618e473 | ||
|
|
c4a4ecfc81 | ||
|
|
f9b0ce0f75 | ||
|
|
20607c00b1 | ||
|
|
12e2bf2f85 | ||
|
|
685a2fec6c | ||
|
|
f9cd4a4470 | ||
|
|
b761aad903 | ||
|
|
9ee4fcb36c | ||
|
|
1929490deb | ||
|
|
bb3b31829f | ||
|
|
e0a58dd1fe | ||
|
|
7d6e04ac77 | ||
|
|
6502cff8fe | ||
|
|
fdffb03eba | ||
|
|
d4d3ae7530 | ||
|
|
647feb7b33 | ||
|
|
625781c7f4 | ||
|
|
02ef6c4e07 | ||
|
|
43eba6cc31 | ||
|
|
3775991ac8 | ||
|
|
08d2492839 | ||
|
|
d0d0e8349f | ||
|
|
1a97f79d54 | ||
|
|
9fc6c4888f | ||
|
|
f2de18508a | ||
|
|
6342571afe | ||
|
|
00e5bb61fc | ||
|
|
b506e7c267 | ||
|
|
061d1262d4 | ||
|
|
8b3c89e267 | ||
|
|
c634ee81fc | ||
|
|
6969a4121c | ||
|
|
4f8b2ad245 | ||
|
|
03353cb652 | ||
|
|
6f80862517 | ||
|
|
edd8d63caf | ||
|
|
ffbdea78ee | ||
|
|
47a20e6a2f | ||
|
|
c00384ad06 | ||
|
|
31fe471da5 | ||
|
|
c684b1ddab | ||
|
|
477f66f56b | ||
|
|
359bca0634 | ||
|
|
8a80623d0c | ||
|
|
adbe8142b6 | ||
|
|
3579a18da8 | ||
|
|
36882ea2a4 | ||
|
|
3fec2cdfd6 | ||
|
|
07d8b542bf | ||
|
|
06de80f2a7 | ||
|
|
ef351bb9a3 | ||
|
|
28de5cbd97 | ||
|
|
39cdf85788 | ||
|
|
37e19cee20 | ||
|
|
b39fbff8fa | ||
|
|
5ff15f27b7 | ||
|
|
baf99e8650 | ||
|
|
7c3046e011 | ||
|
|
1bc8526640 | ||
|
|
0795af8d42 | ||
|
|
bd7c045b1c | ||
|
|
c6812f569f | ||
|
|
17ffb167e2 | ||
|
|
019d077f5a | ||
|
|
c000f438ae | ||
|
|
c0d7ddf1fb | ||
|
|
5bf794e24e | ||
|
|
a9cfb160c9 | ||
|
|
468c9a9061 | ||
|
|
da1279aa5b | ||
|
|
3ec053bea7 | ||
|
|
b2f9f81eaf | ||
|
|
939e957fda | ||
|
|
062fe72030 | ||
|
|
cdcbb872d5 | ||
|
|
048763053c | ||
|
|
c6489d9b01 | ||
|
|
299053f2d5 | ||
|
|
937ac84538 | ||
|
|
5c0d1355a5 | ||
|
|
a64604de6b | ||
|
|
1a3dac0c17 | ||
|
|
ffd73e8bfb | ||
|
|
27c536b1a1 | ||
|
|
631c7adf12 | ||
|
|
d8dca20332 | ||
|
|
dc5973a0f3 | ||
|
|
efed13b9fe | ||
|
|
34ab1e529b | ||
|
|
b812b1c579 | ||
|
|
c892d9d6f6 | ||
|
|
b89480a284 | ||
|
|
9951c3825e | ||
|
|
9ac1f6b26b | ||
|
|
dac4954215 | ||
|
|
5846c71095 | ||
|
|
142f081d1e | ||
|
|
ec2b4f584c | ||
|
|
88d991ef45 | ||
|
|
dc382a6df7 | ||
|
|
c6e57d64d7 | ||
|
|
dc3cd430c8 | ||
|
|
9848600335 | ||
|
|
cd79b73cb0 | ||
|
|
21aa1b43fd | ||
|
|
2f3a96f7a7 | ||
|
|
b761dcde45 | ||
|
|
b6cdb0f885 | ||
|
|
472969ae2a | ||
|
|
9e3514a993 | ||
|
|
aee3ddd06a | ||
|
|
9558d404fa | ||
|
|
0ca6b67132 | ||
|
|
9390b1eef5 | ||
|
|
0198167b27 | ||
|
|
83e9bc4816 | ||
|
|
87bf6b3800 | ||
|
|
7b3df02640 | ||
|
|
499246abae | ||
|
|
71f78cc895 | ||
|
|
5896eee693 | ||
|
|
ce6b60653c | ||
|
|
2b865e3505 | ||
|
|
dee0e5fce7 | ||
|
|
56269758c4 | ||
|
|
436f6bda3e | ||
|
|
4d1102db0b | ||
|
|
4987a2158e | ||
|
|
435a5a67bc | ||
|
|
fc686ca618 | ||
|
|
ec99096d52 | ||
|
|
9e30599f1f | ||
|
|
472e1c6d8e | ||
|
|
46edd6008f | ||
|
|
68e520fa2d | ||
|
|
d46e612cb1 | ||
|
|
e7ce371ee8 | ||
|
|
bfe1ef0733 | ||
|
|
56d449b2ce | ||
|
|
c690d4aae8 | ||
|
|
4966b10a0f | ||
|
|
c48df8522a | ||
|
|
b0a2c26b22 | ||
|
|
24579d86d2 | ||
|
|
71573fb5a0 | ||
|
|
f865da6d6a | ||
|
|
3a5f718da8 | ||
|
|
8699dec571 | ||
|
|
6f0c6281e7 | ||
|
|
58e4dc1b6c | ||
|
|
17c03bed21 | ||
|
|
4bfc5355db | ||
|
|
37b3b1fc3a | ||
|
|
d865618051 | ||
|
|
a995857cca | ||
|
|
fdbdebac32 | ||
|
|
a242c40b23 | ||
|
|
f82b84eac7 | ||
|
|
70be333691 | ||
|
|
3d9a4dcbf3 | ||
|
|
577efbf0c2 | ||
|
|
a3fca638ee | ||
|
|
027855f891 | ||
|
|
b9b04fd932 | ||
|
|
2b1dcaf5e3 | ||
|
|
9e32a5613d | ||
|
|
61b43a0828 | ||
|
|
7fc1fed91e | ||
|
|
1fd97f8732 | ||
|
|
aa03de8e52 | ||
|
|
eb9a7267bd | ||
|
|
21b7c6a2c0 | ||
|
|
4cd53ce119 | ||
|
|
e2283e53b6 | ||
|
|
d51d5af992 | ||
|
|
b98a8ee83a | ||
|
|
21cf1f6c47 | ||
|
|
918ea5d12f | ||
|
|
3b7551e1e4 | ||
|
|
5dbc0aedcc | ||
|
|
8819d559b7 | ||
|
|
448a5c00fd | ||
|
|
8194c62f4e | ||
|
|
e75debbf81 | ||
|
|
380f707285 | ||
|
|
1bc277fd87 | ||
|
|
5b475a05ef | ||
|
|
f69dcf38ef | ||
|
|
c669f8c612 | ||
|
|
966ba97b2c | ||
|
|
675688cb80 | ||
|
|
f068b2c1d3 | ||
|
|
3dd383ba11 | ||
|
|
3a0983d8bf | ||
|
|
59c7edfd90 | ||
|
|
660fbfd73c | ||
|
|
bc7ef28644 | ||
|
|
7b011c1d96 | ||
|
|
a0780ce48a | ||
|
|
8154b4f60d | ||
|
|
91a6ae756f | ||
|
|
3fffd08ae4 | ||
|
|
abc8077a96 | ||
|
|
eb6099a93f | ||
|
|
5a125c7fe5 | ||
|
|
02fb2baf62 | ||
|
|
23c9f973cc | ||
|
|
db90fa71d4 | ||
|
|
c416b09089 | ||
|
|
f144593c06 | ||
|
|
c3a6ae1622 | ||
|
|
76aab2a2ac | ||
|
|
5337756792 | ||
|
|
8ed4859bb5 | ||
|
|
a02effc32e | ||
|
|
965c1f0353 | ||
|
|
e7086cf6df | ||
|
|
af073dad46 | ||
|
|
1e7beffafd | ||
|
|
c2c16b6bd2 | ||
|
|
b79e8b8130 | ||
|
|
72e5a40e7e | ||
|
|
3b25200868 | ||
|
|
3985e52a3a | ||
|
|
fee44d83c8 | ||
|
|
d70c81f03b | ||
|
|
387ec89b95 | ||
|
|
60dd68490c | ||
|
|
378e55ed0c | ||
|
|
16900ad421 | ||
|
|
0bf1e8f252 | ||
|
|
0aa5505d7f | ||
|
|
60f68abd31 | ||
|
|
0a677449dc | ||
|
|
6e9723325f | ||
|
|
e9a046e74d | ||
|
|
44f0c749d5 | ||
|
|
082d2c739e | ||
|
|
6354b1aec0 | ||
|
|
512dbf1980 | ||
|
|
ed491b0caf | ||
|
|
d6814587ad | ||
|
|
586399a814 | ||
|
|
867186fd66 | ||
|
|
48c18ee8d1 | ||
|
|
f5d5d9a504 | ||
|
|
f958924b79 | ||
|
|
67582aaee4 | ||
|
|
8ebe260628 | ||
|
|
a6d2590834 | ||
|
|
923cd045cd | ||
|
|
f64bcf0d08 | ||
|
|
c48b6e23eb | ||
|
|
b2eabda5b6 | ||
|
|
305e12be1d | ||
|
|
30ad784d95 | ||
|
|
7be7b07c19 | ||
|
|
88c46f4612 | ||
|
|
1c1b9b5c9d | ||
|
|
f5f3256824 | ||
|
|
d056b1f1e1 | ||
|
|
c86a30921f | ||
|
|
14f09e3787 | ||
|
|
d2a342a94e | ||
|
|
c61e4c02bd | ||
|
|
b8ce61ae45 | ||
|
|
5a25b0885c | ||
|
|
337cbb2844 | ||
|
|
fa3b84a615 | ||
|
|
4e47447dec | ||
|
|
a1772d26b5 | ||
|
|
77e5e75b2f | ||
|
|
77bfd85e9e | ||
|
|
272ea9fe17 | ||
|
|
8aed1aa634 | ||
|
|
2bec41b80e | ||
|
|
38633b6db4 | ||
|
|
e62dc00d7b | ||
|
|
f619fc3e7e | ||
|
|
6c7d74c43b | ||
|
|
268cead331 | ||
|
|
70521f0756 | ||
|
|
1f283d93ca | ||
|
|
7dc422887c | ||
|
|
7dea0dcfc4 | ||
|
|
ab11604bfb | ||
|
|
0e8c026854 | ||
|
|
3b1cc1cc34 | ||
|
|
85a8f2f147 | ||
|
|
37c43199ca | ||
|
|
f22fcc2e59 | ||
|
|
05d7d58cd5 | ||
|
|
10e89f6802 | ||
|
|
551b802d89 | ||
|
|
5ae8607771 | ||
|
|
7b47f40979 | ||
|
|
a9a76b9010 | ||
|
|
cf68725c89 | ||
|
|
989d703d1d | ||
|
|
cf2ef0955d | ||
|
|
2fe3b0de55 | ||
|
|
b47047a91d | ||
|
|
1c56ac7e48 | ||
|
|
47fb553d38 | ||
|
|
bd6362f09f | ||
|
|
9cc735bdf2 | ||
|
|
f5d992f609 | ||
|
|
25407fb5f0 | ||
|
|
9eb9bd8488 | ||
|
|
7d6eafb2f2 | ||
|
|
bfdf25a162 | ||
|
|
6e0e6f5ec4 | ||
|
|
dce8b15937 | ||
|
|
25e2e07631 | ||
|
|
b5fb8b6d82 | ||
|
|
171c5aa50c | ||
|
|
b5d378c425 | ||
|
|
145d65fd60 | ||
|
|
5c78fe1070 | ||
|
|
31b1203317 | ||
|
|
eaa200a766 | ||
|
|
f422c84414 | ||
|
|
4562f31b6b | ||
|
|
6911e184f9 | ||
|
|
9627212510 | ||
|
|
0fc8c4e071 | ||
|
|
6c93dc6a4c | ||
|
|
1dbbfbbba6 | ||
|
|
88ab36c45b | ||
|
|
14247ddabb | ||
|
|
fd8561ac55 | ||
|
|
e1e6ba36ca | ||
|
|
435a50de4b | ||
|
|
93b2b9b7b0 | ||
|
|
faa5199a9a | ||
|
|
5768d7a247 | ||
|
|
d9f2fca66d | ||
|
|
91b48258f0 | ||
|
|
2ac7b9dabf | ||
|
|
505f0f65d0 | ||
|
|
8262a8dea2 | ||
|
|
406286b970 | ||
|
|
6346f835af | ||
|
|
73fc437c7d | ||
|
|
af66739207 | ||
|
|
65bb6a8b73 | ||
|
|
8bda5aa2a7 | ||
|
|
dd2b43dc73 | ||
|
|
c925014bb5 | ||
|
|
32b11b104f | ||
|
|
e25a73f9af | ||
|
|
15c1b48b25 | ||
|
|
d14a86069a | ||
|
|
186f753aec | ||
|
|
f180707b6f | ||
|
|
9a53779b6c | ||
|
|
551771c6cf | ||
|
|
a3aca0bae4 | ||
|
|
ed0c71fa56 | ||
|
|
574ecdb512 | ||
|
|
7848a35941 | ||
|
|
dfe0314ba0 | ||
|
|
4618ef0cbf | ||
|
|
bb281649fc | ||
|
|
012ffcf6f6 | ||
|
|
bef61cf019 | ||
|
|
5ce6464461 | ||
|
|
a0c3f99d80 | ||
|
|
f6ba577cf4 | ||
|
|
6ef03b7c6c | ||
|
|
05303dab45 | ||
|
|
9edaa407f2 | ||
|
|
be03f6adb6 | ||
|
|
c37496f2b0 | ||
|
|
8743ba2886 | ||
|
|
c100054d68 | ||
|
|
2fbb87e17a | ||
|
|
1eadf52f5e | ||
|
|
810f46c450 | ||
|
|
393dfd1e96 | ||
|
|
011e95b331 | ||
|
|
9f3681642a | ||
|
|
a4ca679b65 | ||
|
|
d198095d7a | ||
|
|
da451d013a | ||
|
|
d18244a8a1 | ||
|
|
6214872e0c | ||
|
|
6885cea0fd | ||
|
|
438455bc8c | ||
|
|
7914f65f68 | ||
|
|
d0b79ad335 | ||
|
|
7dc4484f6a | ||
|
|
257ea391da | ||
|
|
4c64bcfae7 | ||
|
|
99d2f537c2 | ||
|
|
6c32365e00 | ||
|
|
95925aafb1 | ||
|
|
77bfaa4245 | ||
|
|
0a3d4a5ab0 | ||
|
|
e8c11251de | ||
|
|
87a8b1b3f5 | ||
|
|
2328c60d12 | ||
|
|
61cd5cf4e6 | ||
|
|
1eb9efe2d5 | ||
|
|
d5882f2efe | ||
|
|
5f3991127b | ||
|
|
c5d2590293 | ||
|
|
b995dc54f1 | ||
|
|
79404e754e | ||
|
|
92318d5804 | ||
|
|
191f9daf4d | ||
|
|
b67280f0d4 | ||
|
|
ce8bdabab8 | ||
|
|
b40af07103 | ||
|
|
f3698cd625 | ||
|
|
c928618d1a | ||
|
|
3cc83ced93 | ||
|
|
0ad4f3b85e | ||
|
|
2d061ce45a | ||
|
|
6e2348d2b1 | ||
|
|
525c4d529e | ||
|
|
ea84149c87 | ||
|
|
af1855601b | ||
|
|
1409065005 | ||
|
|
623019ee56 | ||
|
|
740d477801 | ||
|
|
c2b8f21fd9 | ||
|
|
442ed87cfa | ||
|
|
c48b3ded2f | ||
|
|
5bed5b9870 | ||
|
|
2a261a32e2 | ||
|
|
7137a9986f | ||
|
|
4ce386c6dd | ||
|
|
8911da8380 | ||
|
|
a3384d514f | ||
|
|
b38f6e8062 | ||
|
|
6c006d0a78 | ||
|
|
0f7a55d031 | ||
|
|
62af77979b | ||
|
|
384687e19b | ||
|
|
f799ded434 | ||
|
|
d499f4e0d0 | ||
|
|
fc7b4df98a | ||
|
|
3f9a8ecc7e | ||
|
|
71a801bbb7 | ||
|
|
f333ef803a | ||
|
|
36b1dce7e4 | ||
|
|
877ef36bc7 | ||
|
|
6ce242df4d | ||
|
|
9640f0251c | ||
|
|
53846a15de | ||
|
|
1e4ff1e774 | ||
|
|
0f40e06d93 | ||
|
|
86b204ac08 | ||
|
|
8cd5a9963f | ||
|
|
df796e32eb | ||
|
|
e2dae1f484 | ||
|
|
8ca06e11de | ||
|
|
9f548b9dee | ||
|
|
31c75886c3 | ||
|
|
3e24a87ead | ||
|
|
992bcccee9 | ||
|
|
02bf5f9a58 | ||
|
|
adaf9cbc90 | ||
|
|
f9ede4bae9 | ||
|
|
1fe9132761 | ||
|
|
fe89c101df | ||
|
|
6cc7f012d8 | ||
|
|
3b82ac00d8 | ||
|
|
9b4c64298b | ||
|
|
4a4708e3fd | ||
|
|
f231af07f9 | ||
|
|
3da1c27692 | ||
|
|
beca72cc40 | ||
|
|
5ea07d0f07 | ||
|
|
674390dfa2 | ||
|
|
e47994013a | ||
|
|
6bbf2ff876 | ||
|
|
f7f567adc8 | ||
|
|
e737b51e9d | ||
|
|
6f2dd2e2a5 | ||
|
|
36b44d6971 | ||
|
|
dcea2c5f4e | ||
|
|
ed76d718cd | ||
|
|
00e1a1c442 | ||
|
|
f2b9ea1034 | ||
|
|
1cbc352c6c | ||
|
|
51b70a7884 | ||
|
|
d04f2d5e2b | ||
|
|
b1c9d5fd4a | ||
|
|
4b8b961705 | ||
|
|
2e26130d96 | ||
|
|
63c75f714f | ||
|
|
03095dfa74 | ||
|
|
f70bf61ff6 | ||
|
|
4a362704fd | ||
|
|
2c43fdc409 | ||
|
|
2a6a3a3af5 | ||
|
|
d8e2192179 | ||
|
|
cfc5fa87d0 | ||
|
|
5ad95a1c3e | ||
|
|
de8d456089 | ||
|
|
07610a5f92 | ||
|
|
5d6998cf0b | ||
|
|
21b96068e4 | ||
|
|
c9df61c43c | ||
|
|
c1266abb98 | ||
|
|
9dfb03aa0f | ||
|
|
3a35dee0e3 | ||
|
|
f4568ef589 | ||
|
|
f80871ce7e | ||
|
|
8fe4422d21 | ||
|
|
c19fafa581 | ||
|
|
cc43f15eab | ||
|
|
bfecb04697 | ||
|
|
52e1f5273c | ||
|
|
2b5e974132 | ||
|
|
e3d7e3c195 | ||
|
|
11446b3e53 | ||
|
|
637c720de5 | ||
|
|
0dae8f9977 | ||
|
|
9c5674a076 | ||
|
|
35368442ec | ||
|
|
035b5163fb | ||
|
|
a1d5e26f6b | ||
|
|
bdd2c9d033 | ||
|
|
8ad00faebc | ||
|
|
061bfffd4d | ||
|
|
bfb2376aa3 | ||
|
|
9e4656e43f | ||
|
|
c26926148c | ||
|
|
827e1846a8 | ||
|
|
2f58aa280c | ||
|
|
66db99faf8 | ||
|
|
0efafc50dc | ||
|
|
595aab4edf | ||
|
|
02696c8bca | ||
|
|
01f3d728ef | ||
|
|
bba76cac4d | ||
|
|
72ab721f22 | ||
|
|
f98db943a5 | ||
|
|
f122da58c1 | ||
|
|
125db1777a | ||
|
|
c592db4024 | ||
|
|
02af2dbe73 | ||
|
|
b1bd04566e | ||
|
|
15288b0e84 | ||
|
|
dfaa529fa3 | ||
|
|
ef54bb1b7c | ||
|
|
b3e459863e | ||
|
|
ada56ea8ce | ||
|
|
8ca589cdf5 | ||
|
|
390c4eed7e | ||
|
|
9fc93e285c | ||
|
|
1f417f0606 | ||
|
|
30bf838065 | ||
|
|
0019101f0e | ||
|
|
75b2e383c2 | ||
|
|
b5830e0cde | ||
|
|
e9dcf0e00b | ||
|
|
ccd5c01d91 | ||
|
|
2ea3f7a558 | ||
|
|
993a2daccc | ||
|
|
541baac49e | ||
|
|
d69b04e9ed | ||
|
|
78fab41310 | ||
|
|
4502ed068d | ||
|
|
5495e0f96a | ||
|
|
60a8fe31f1 | ||
|
|
93ab2a93d4 | ||
|
|
688c21cd94 | ||
|
|
39e19fdfba | ||
|
|
5525ccc409 | ||
|
|
db9949d403 | ||
|
|
f36d91336a | ||
|
|
da13de6af5 | ||
|
|
4029dc3165 | ||
|
|
6795383461 | ||
|
|
305d5c5aa9 | ||
|
|
49492e7bca | ||
|
|
bd0e96f166 | ||
|
|
2e7b145a81 | ||
|
|
6f114b17d6 | ||
|
|
5cd73cea4b | ||
|
|
347f6aed3f | ||
|
|
24ea23ce83 | ||
|
|
5cb4cfcdd2 | ||
|
|
58ef61a0bb | ||
|
|
58a494f33f | ||
|
|
c97213f177 | ||
|
|
a9abd2ff5c | ||
|
|
8cba594070 | ||
|
|
f2c9cc17df | ||
|
|
1a34c2eee3 | ||
|
|
a1632e4874 | ||
|
|
9534fccebf | ||
|
|
ae9d9ea284 | ||
|
|
8ea4351797 | ||
|
|
34a18514b6 | ||
|
|
7248358ac1 | ||
|
|
9e1e7be574 | ||
|
|
18e0a60b9e | ||
|
|
c727863616 | ||
|
|
75fc0c7017 | ||
|
|
e3b046aecf | ||
|
|
046f80b9f2 | ||
|
|
b1b54740f4 | ||
|
|
0c1bc378ba | ||
|
|
d067b69d20 | ||
|
|
ba928e18bf | ||
|
|
2d89cb0978 | ||
|
|
c7c88902bd | ||
|
|
8d7669bc08 | ||
|
|
1cff3a6751 | ||
|
|
6100336b2a | ||
|
|
da7455f859 | ||
|
|
5ec525a430 | ||
|
|
863e58a8e7 | ||
|
|
e441e97030 | ||
|
|
2b0152f6f4 | ||
|
|
eb20c5bb43 | ||
|
|
89c8547820 | ||
|
|
5087cd2b19 | ||
|
|
06e9256050 | ||
|
|
8aff6f50db | ||
|
|
b2aca66e2d | ||
|
|
add61cf783 | ||
|
|
a549bd73f9 | ||
|
|
f849b8cd4b | ||
|
|
d8cf427425 | ||
|
|
517ac3f111 | ||
|
|
43f1cfba5a | ||
|
|
dd18b6c836 | ||
|
|
d8b86a899d | ||
|
|
ed9e336c51 | ||
|
|
94fea502b9 | ||
|
|
1e79e39161 | ||
|
|
6f9c2211fa | ||
|
|
2e1e7462dc | ||
|
|
6fb36acc9a | ||
|
|
f35baffbef | ||
|
|
69b9189220 | ||
|
|
06fe572113 | ||
|
|
dfd1d09641 | ||
|
|
6d981afcbc | ||
|
|
f90db0ed50 | ||
|
|
9894023177 | ||
|
|
5ac6d28abf | ||
|
|
b813f48a7e | ||
|
|
a3b4e40982 | ||
|
|
0c42b9a68e | ||
|
|
48d3317136 | ||
|
|
30b62e172d | ||
|
|
5a0fa8c09f | ||
|
|
0a6b393b35 | ||
|
|
dadae53e51 | ||
|
|
c22c4b864c | ||
|
|
aa27c13b20 | ||
|
|
50771b0762 | ||
|
|
336cf768d8 | ||
|
|
a76d9b87f0 | ||
|
|
5794fc533f | ||
|
|
9ebf769b4d | ||
|
|
2ea21e1f96 | ||
|
|
b396563235 | ||
|
|
c5b992e330 | ||
|
|
ec6fae9e65 | ||
|
|
7f5c6ca18d | ||
|
|
9de60a9a7c | ||
|
|
3f64f5a5fc | ||
|
|
7b036cbe3c | ||
|
|
5100834156 | ||
|
|
a387d48b48 | ||
|
|
adfa341d6e | ||
|
|
0b03ca158b | ||
|
|
d9e6fcd730 | ||
|
|
feb20243e5 | ||
|
|
e3e6f511e7 | ||
|
|
293a2db369 | ||
|
|
1c72e0c40c | ||
|
|
3e6a4362ee | ||
|
|
3b327bab8f | ||
|
|
a50b594aac | ||
|
|
e186c9b8bc | ||
|
|
a5502a3751 | ||
|
|
bd0f694f6a | ||
|
|
c018b4bd6e | ||
|
|
2be71c577c | ||
|
|
238de4d8b4 | ||
|
|
6664890381 | ||
|
|
5221db5aa6 | ||
|
|
1e6d998a9c | ||
|
|
da62a5653a | ||
|
|
125ff957f5 | ||
|
|
73dcd9e11a | ||
|
|
50fd5512d6 | ||
|
|
291dd9bd48 | ||
|
|
549eeeb29d | ||
|
|
6d59d21770 | ||
|
|
2ee1063485 | ||
|
|
75bec29dbc | ||
|
|
20bc1afec8 | ||
|
|
e3f56f2645 | ||
|
|
ed2770e380 | ||
|
|
bb6d8ec99f | ||
|
|
ddb81de58b | ||
|
|
cf9a8a15cd | ||
|
|
c4219ba0ad | ||
|
|
47e54b2399 | ||
|
|
215fe16341 | ||
|
|
218d481209 | ||
|
|
78f4a83d05 | ||
|
|
733181a6b8 | ||
|
|
d79dcb952d | ||
|
|
bb64e812d8 | ||
|
|
9a324e35a4 | ||
|
|
605888bb78 | ||
|
|
ef69d692ef | ||
|
|
a54f05e287 | ||
|
|
4b0cd0776d | ||
|
|
4b75d67732 | ||
|
|
1ca61a948e | ||
|
|
e13c81e25b | ||
|
|
b69652fc0c | ||
|
|
0949b7ec0a | ||
|
|
75683c423d | ||
|
|
3f2c69aea3 | ||
|
|
9adc3941b9 |
17
.bra.toml
Normal file
17
.bra.toml
Normal file
@@ -0,0 +1,17 @@
|
||||
[run]
|
||||
init_cmds = [
|
||||
["go", "build", "-o", "./bin/grafana-server"],
|
||||
["./bin/grafana-server"]
|
||||
]
|
||||
watch_all = true
|
||||
watch_dirs = [
|
||||
"$WORKDIR/pkg",
|
||||
"$WORKDIR/public/views",
|
||||
"$WORKDIR/conf",
|
||||
]
|
||||
watch_exts = [".go", "conf/*"]
|
||||
build_delay = 1500
|
||||
cmds = [
|
||||
["go", "build", "-o", "./bin/grafana-server"],
|
||||
["./bin/grafana-server"]
|
||||
]
|
||||
32
.gitignore
vendored
32
.gitignore
vendored
@@ -1,6 +1,30 @@
|
||||
node_modules
|
||||
coverage/
|
||||
.aws-config.json
|
||||
dist
|
||||
web.config
|
||||
config.js
|
||||
*.sublime-workspace
|
||||
awsconfig
|
||||
/dist
|
||||
/tmp
|
||||
|
||||
docs/AWS_S3_BUCKET
|
||||
docs/GIT_BRANCH
|
||||
docs/VERSION
|
||||
docs/GITCOMMIT
|
||||
docs/changed-files
|
||||
docs/changed-files
|
||||
|
||||
# locally required config files
|
||||
public/css/*.min.css
|
||||
|
||||
# Editor junk
|
||||
*.sublime-workspace
|
||||
*.swp
|
||||
.idea/
|
||||
*.iml
|
||||
|
||||
/data/*
|
||||
/bin/*
|
||||
|
||||
conf/custom.ini
|
||||
fig.yml
|
||||
profile.cov
|
||||
|
||||
|
||||
7
.hooks/pre-commit
Executable file
7
.hooks/pre-commit
Executable file
@@ -0,0 +1,7 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
test -z "$(gofmt -s -l . | grep -v Godeps/_workspace/src/ | tee /dev/stderr)"
|
||||
if [ $? -gt 0 ]; then
|
||||
echo "Some files aren't formatted, please run 'go fmt ./pkg/...' to format your source code before committing"
|
||||
exit 1
|
||||
fi
|
||||
13
.jscs.json
Normal file
13
.jscs.json
Normal file
@@ -0,0 +1,13 @@
|
||||
{
|
||||
"disallowImplicitTypeConversion": ["string"],
|
||||
"disallowKeywords": ["with"],
|
||||
"disallowMultipleLineBreaks": true,
|
||||
"disallowMixedSpacesAndTabs": true,
|
||||
"disallowTrailingWhitespace": true,
|
||||
"requireSpacesInFunctionExpression": {
|
||||
"beforeOpeningCurlyBrace": true
|
||||
},
|
||||
"disallowSpacesInsideArrayBrackets": true,
|
||||
"disallowSpacesInsideParentheses": true,
|
||||
"validateIndentation": 2
|
||||
}
|
||||
21
.jsfmtrc
Normal file
21
.jsfmtrc
Normal file
@@ -0,0 +1,21 @@
|
||||
{
|
||||
"preset" : "default",
|
||||
|
||||
"lineBreak" : {
|
||||
"before" : {
|
||||
"VariableDeclarationWithoutInit" : 0,
|
||||
},
|
||||
|
||||
"after": {
|
||||
"AssignmentOperator": -1,
|
||||
"ArgumentListArrayExpression": ">=1"
|
||||
}
|
||||
},
|
||||
|
||||
"whiteSpace" : {
|
||||
"before" : {
|
||||
},
|
||||
"after" : {
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -18,12 +18,12 @@
|
||||
"noempty": true,
|
||||
"undef": true,
|
||||
"boss": true,
|
||||
"trailing": false,
|
||||
"trailing": true,
|
||||
"laxbreak": true,
|
||||
"laxcomma": true,
|
||||
"sub": true,
|
||||
"unused": true,
|
||||
|
||||
"maxdepth": 5,
|
||||
"maxlen": 140,
|
||||
|
||||
"globals": {
|
||||
|
||||
@@ -1,5 +0,0 @@
|
||||
language: node_js
|
||||
node_js:
|
||||
- "0.10"
|
||||
before_script:
|
||||
- npm install -g grunt-cli
|
||||
566
CHANGELOG.md
Normal file
566
CHANGELOG.md
Normal file
@@ -0,0 +1,566 @@
|
||||
# 2.1.0 (2015-08-04)
|
||||
|
||||
**Data sources**
|
||||
- [Issue #1525](https://github.com/grafana/grafana/issues/1525). InfluxDB: Full support for InfluxDB 0.9 with new adapted query editor
|
||||
- [Issue #2191](https://github.com/grafana/grafana/issues/2191). KariosDB: Grafana now ships with a KariosDB data source plugin, thx @masaori335
|
||||
- [Issue #1177](https://github.com/grafana/grafana/issues/1177). OpenTSDB: Limit tags by metric, OpenTSDB config option tsd.core.meta.enable_realtime_ts must enabled for OpenTSDB lookup api
|
||||
- [Issue #1250](https://github.com/grafana/grafana/issues/1250). OpenTSDB: Support for template variable values lookup queries
|
||||
|
||||
**New dashboard features**
|
||||
- [Issue #1144](https://github.com/grafana/grafana/issues/1144). Templating: You can now select multiple template variables values at the same time.
|
||||
- [Issue #1922](https://github.com/grafana/grafana/issues/1922). Templating: Specify multiple variable values via URL params.
|
||||
- [Issue #1888](https://github.com/grafana/grafana/issues/1144). Templating: Repeat panel or row for each selected template variable value
|
||||
- [Issue #1888](https://github.com/grafana/grafana/issues/1944). Dashboard: Custom Navigation links & dynamic links to related dashboards
|
||||
- [Issue #590](https://github.com/grafana/grafana/issues/590). Graph: Define series color using regex rule
|
||||
- [Issue #2162](https://github.com/grafana/grafana/issues/2162). Graph: New series style override, negative-y transform and stack groups
|
||||
- [Issue #2096](https://github.com/grafana/grafana/issues/2096). Dashboard list panel: Now supports search by multiple tags
|
||||
- [Issue #2203](https://github.com/grafana/grafana/issues/2203). Singlestat: Now support string values
|
||||
|
||||
**User or Organization admin**
|
||||
- [Issue #1899](https://github.com/grafana/grafana/issues/1899). Organization: You can now update the organization user role directly (without removing and readding the organization user).
|
||||
- [Issue #2088](https://github.com/grafana/grafana/issues/2088). Roles: New user role `Read Only Editor` that replaces the old `Viewer` role behavior
|
||||
|
||||
**Backend**
|
||||
- [Issue #2218](https://github.com/grafana/grafana/issues/2218). Auth: You can now authenicate against api with username / password using basic auth
|
||||
- [Issue #2095](https://github.com/grafana/grafana/issues/2095). Search: Search now supports filtering by multiple dashboard tags
|
||||
- [Issue #1905](https://github.com/grafana/grafana/issues/1905). Github OAuth: You can now configure a Github team membership requirement, thx @dewski
|
||||
- [Issue #2052](https://github.com/grafana/grafana/issues/2052). Github OAuth: You can now configure a Github organization requirement, thx @indrekj
|
||||
- [Issue #1891](https://github.com/grafana/grafana/issues/1891). Security: New config option to disable the use of gravatar for profile images
|
||||
- [Issue #1921](https://github.com/grafana/grafana/issues/1921). Auth: Support for user authentication via reverse proxy header (like X-Authenticated-User, or X-WEBAUTH-USER)
|
||||
- [Issue #960](https://github.com/grafana/grafana/issues/960). Search: Backend can now index a folder with json files, will be available in search (saving back to folder is not supported, this feature is meant for static generated json dashboards)
|
||||
|
||||
**Breaking changes**
|
||||
- [Issue #1826](https://github.com/grafana/grafana/issues/1826). User role 'Viewer' are now prohibited from entering edit mode (and doing other transient dashboard edits). A new role `Read Only Editor` will replace the old Viewer behavior
|
||||
- [Issue #1928](https://github.com/grafana/grafana/issues/1928). HTTP API: GET /api/dashboards/db/:slug response changed property `model` to `dashboard` to match the POST request nameing
|
||||
- Backend render URL changed from `/render/dashboard/solo` `render/dashboard-solo/` (in order to have consistent dashboard url `/dashboard/:type/:slug`)
|
||||
- Search HTTP API response has changed (simplified), tags list moved to seperate HTTP resource URI
|
||||
- Datasource HTTP api breaking change, ADD datasource is now POST /api/datasources/, update is now PUT /api/datasources/:id
|
||||
|
||||
**Fixes**
|
||||
- [Issue #2185](https://github.com/grafana/grafana/issues/2185). Graph: fixed PNG rendering of panels with legend table to the right
|
||||
- [Issue #2163](https://github.com/grafana/grafana/issues/2163). Backend: Load dashboards with capital letters in the dashboard url slug (url id)
|
||||
|
||||
# 2.0.3 (unreleased - 2.0.x branch)
|
||||
|
||||
**Fixes**
|
||||
- [Issue #1872](https://github.com/grafana/grafana/issues/1872). Firefox/IE issue, invisible text in dashboard search fixed
|
||||
- [Issue #1857](https://github.com/grafana/grafana/issues/1857). /api/login/ping Fix for issue when behind reverse proxy and subpath
|
||||
- [Issue #1863](https://github.com/grafana/grafana/issues/1863). MySQL: Dashboard.data column type changed to mediumtext (sql migration added)
|
||||
|
||||
# 2.0.2 (2015-04-22)
|
||||
|
||||
**Fixes**
|
||||
- [Issue #1832](https://github.com/grafana/grafana/issues/1832). Graph Panel + Legend Table mode: Many series casued zero height graph, now legend will never reduce the height of the graph below 50% of row height.
|
||||
- [Issue #1846](https://github.com/grafana/grafana/issues/1846). Snapshots: Fixed issue with snapshoting dashboards with an interval template variable
|
||||
- [Issue #1848](https://github.com/grafana/grafana/issues/1848). Panel timeshift: You can now use panel timeshift without a relative time override
|
||||
|
||||
# 2.0.1 (2015-04-20)
|
||||
|
||||
**Fixes**
|
||||
- [Issue #1784](https://github.com/grafana/grafana/issues/1784). Data source proxy: Fixed issue with using data source proxy when grafana is behind nginx suburl
|
||||
- [Issue #1749](https://github.com/grafana/grafana/issues/1749). Graph Panel: Table legends are now visible when rendered to PNG
|
||||
- [Issue #1786](https://github.com/grafana/grafana/issues/1786). Graph Panel: Legend in table mode now aligns, graph area is reduced depending on how many series
|
||||
- [Issue #1734](https://github.com/grafana/grafana/issues/1734). Support for unicode / international characters in dashboard title (improved slugify)
|
||||
- [Issue #1782](https://github.com/grafana/grafana/issues/1782). Github OAuth: Now works with Github for Enterprise, thanks @williamjoy
|
||||
- [Issue #1780](https://github.com/grafana/grafana/issues/1780). Dashboard snapshot: Should not require login to view snapshot, Fixes #1780
|
||||
|
||||
# 2.0.0-Beta3 (2015-04-12)
|
||||
|
||||
**RPM / DEB Package changes (to follow HFS)**
|
||||
- binary name changed to grafana-server
|
||||
- does not install to `/opt/grafana` any more, installs to `/usr/share/grafana`
|
||||
- binary to `/usr/sbin/grafana-server`
|
||||
- init.d script improvements, renamed to `/etc/init.d/grafana-server`
|
||||
- added default file with environment variables,
|
||||
- `/etc/default/grafana-server` (deb/ubuntu)
|
||||
- `/etc/sysconfig/grafana-server` (centos/redhat)
|
||||
|
||||
- added systemd service file, tested on debian jessie and centos7
|
||||
- config file in same location `/etc/grafana/grafana.ini` (now complete config file but with every setting commented out)
|
||||
- data directory (where sqlite3) file is stored is now by default `/var/lib/grafana`
|
||||
- no symlinking current to versions anymore
|
||||
- For more info see [Issue #1758](https://github.com/grafana/grafana/issues/1758).
|
||||
|
||||
**Config breaking change (setting rename)**
|
||||
- `[log] root_path` has changed to `[paths] logs`
|
||||
|
||||
# 2.0.0-Beta2 (...)
|
||||
|
||||
**Enhancements**
|
||||
- [Issue #1701](https://github.com/grafana/grafana/issues/1701). Share modal: Override UI theme via URL param for Share link, rendered panel, or embedded panel
|
||||
- [Issue #1660](https://github.com/grafana/grafana/issues/1660). OAuth: Specify allowed email address domains for google or and github oauth logins
|
||||
|
||||
**Fixes**
|
||||
- [Issue #1649](https://github.com/grafana/grafana/issues/1649). HTTP API: grafana /render calls nows with api keys
|
||||
- [Issue #1667](https://github.com/grafana/grafana/issues/1667). Datasource proxy & session timeout fix (casued 401 Unauthorized error after a while)
|
||||
- [Issue #1707](https://github.com/grafana/grafana/issues/1707). Unsaved changes: Do not show for snapshots, scripted and file based dashboards
|
||||
- [Issue #1703](https://github.com/grafana/grafana/issues/1703). Unsaved changes: Do not show for users with role `Viewer`
|
||||
- [Issue #1675](https://github.com/grafana/grafana/issues/1675). Data source proxy: Fixed issue with Gzip enabled and data source proxy
|
||||
- [Issue #1681](https://github.com/grafana/grafana/issues/1681). MySQL session: fixed problem using mysql as session store
|
||||
- [Issue #1671](https://github.com/grafana/grafana/issues/1671). Data sources: Fixed issue with changing default data source (should not require full page load to take effect, now fixed)
|
||||
- [Issue #1685](https://github.com/grafana/grafana/issues/1685). Search: Dashboard results should be sorted alphabetically
|
||||
- [Issue #1673](https://github.com/grafana/grafana/issues/1673). Basic auth: Fixed issue when using basic auth proxy infront of Grafana
|
||||
|
||||
# 2.0.0-Beta1 (2015-03-30)
|
||||
|
||||
**New features**
|
||||
- [Issue #1623](https://github.com/grafana/grafana/issues/1623). Share Dashboard: Dashboard snapshot sharing (dash and data snapshot), save to local or save to public snapshot dashboard snapshots.raintank.io site
|
||||
- [Issue #1622](https://github.com/grafana/grafana/issues/1622). Share Panel: The share modal now has an embed option, gives you an iframe that you can use to embedd a single graph on another web site
|
||||
- [Issue #718](https://github.com/grafana/grafana/issues/718). Dashboard: When saving a dashboard and another user has made changes inbetween the user is promted with a warning if he really wants to overwrite the other's changes
|
||||
- [Issue #1331](https://github.com/grafana/grafana/issues/1331). Graph & Singlestat: New axis/unit format selector and more units (kbytes, Joule, Watt, eV), and new design for graph axis & grid tab and single stat options tab views
|
||||
- [Issue #1241](https://github.com/grafana/grafana/issues/1242). Timepicker: New option in timepicker (under dashboard settings), to change ``now`` to be for example ``now-1m``, usefull when you want to ignore last minute because it contains incomplete data
|
||||
- [Issue #171](https://github.com/grafana/grafana/issues/171). Panel: Different time periods, panels can override dashboard relative time and/or add a time shift
|
||||
- [Issue #1488](https://github.com/grafana/grafana/issues/1488). Dashboard: Clone dashboard / Save as
|
||||
- [Issue #1458](https://github.com/grafana/grafana/issues/1458). User: persisted user option for dark or light theme (no longer an option on a dashboard)
|
||||
- [Issue #452](https://github.com/grafana/grafana/issues/452). Graph: Adds logarithmic scale option for base 10, base 16 and base 1024
|
||||
|
||||
**Enhancements**
|
||||
- [Issue #1366](https://github.com/grafana/grafana/issues/1366). Graph & Singlestat: Support for additional units, Fahrenheit (°F) and Celsius (°C), Humidity (%H), kW, watt-hour (Wh), kilowatt-hour (kWh), velocities (m/s, km/h, mpg, knot)
|
||||
- [Issue #978](https://github.com/grafana/grafana/issues/978). Graph: Shared tooltip improvement, can now support metrics of different resolution/intervals
|
||||
- [Issue #1297](https://github.com/grafana/grafana/issues/1297). Graphite: Added cumulative and minimumBelow graphite functions
|
||||
- [Issue #1296](https://github.com/grafana/grafana/issues/1296). InfluxDB: Auto escape column names with special characters. Thanks @steven-aerts
|
||||
- [Issue #1321](https://github.com/grafana/grafana/issues/1321). SingleStatPanel: You can now use template variables in pre & postfix
|
||||
- [Issue #599](https://github.com/grafana/grafana/issues/599). Graph: Added right y axis label setting and graph support
|
||||
- [Issue #1253](https://github.com/grafana/grafana/issues/1253). Graph & Singlestat: Users can now set decimal precision for legend and tooltips (override auto precision)
|
||||
- [Issue #1255](https://github.com/grafana/grafana/issues/1255). Templating: Dashboard will now wait to load until all template variables that have refresh on load set or are initialized via url to be fully loaded and so all variables are in valid state before panels start issuing metric requests.
|
||||
- [Issue #1344](https://github.com/grafana/grafana/issues/1344). OpenTSDB: Alias patterns (reference tag values), syntax is: $tag_tagname or [[tag_tagname]]
|
||||
|
||||
**Fixes**
|
||||
- [Issue #1298](https://github.com/grafana/grafana/issues/1298). InfluxDB: Fix handling of empty array in templating variable query
|
||||
- [Issue #1309](https://github.com/grafana/grafana/issues/1309). Graph: Fixed issue when using zero as a grid threshold
|
||||
- [Issue #1345](https://github.com/grafana/grafana/issues/1345). UI: Fixed position of confirm modal when scrolled down
|
||||
- [Issue #1372](https://github.com/grafana/grafana/issues/1372). Graphite: Fix for nested complex queries, where a query references a query that references another query (ie the #[A-Z] syntax)
|
||||
- [Issue #1363](https://github.com/grafana/grafana/issues/1363). Templating: Fix to allow custom template variables to contain white space, now only splits on ','
|
||||
- [Issue #1359](https://github.com/grafana/grafana/issues/1359). Graph: Fix for all series tooltip showing series with all null values when ``Hide Empty`` option is enabled
|
||||
- [Issue #1497](https://github.com/grafana/grafana/issues/1497). Dashboard: Fixed memory leak when switching dashboards
|
||||
|
||||
**Changes**
|
||||
- Dashboard title change & save will no longer create a new dashboard, it will just change the title.
|
||||
|
||||
**OpenTSDB breaking change**
|
||||
- [Issue #1438](https://github.com/grafana/grafana/issues/1438). OpenTSDB: Automatic downsample interval passed to OpenTSDB (depends on timespan and graph width)
|
||||
- NOTICE, Downsampling is now enabled by default, so if you have not picked a downsample aggregator in your metric query do so or your graphs will be missleading
|
||||
- This will make Grafana a lot quicker for OpenTSDB users when viewing large time spans without having to change the downsample interval manually.
|
||||
|
||||
**Tech**
|
||||
- [Issue #1311](https://github.com/grafana/grafana/issues/1311). Tech: Updated Font-Awesome from 3.2 to 4.2
|
||||
|
||||
# 1.9.1 (2014-12-29)
|
||||
|
||||
**Enhancements**
|
||||
- [Issue #1028](https://github.com/grafana/grafana/issues/1028). Graph: New legend option ``hideEmtpy`` to hide series with only null values from legend
|
||||
- [Issue #1242](https://github.com/grafana/grafana/issues/1242). OpenTSDB: Downsample query field now supports interval template variable
|
||||
- [Issue #1126](https://github.com/grafana/grafana/issues/1126). InfluxDB: Support more than 10 series name segments when using alias ``$number`` patterns
|
||||
|
||||
**Fixes**
|
||||
- [Issue #1251](https://github.com/grafana/grafana/issues/1251). Graph: Fix for y axis and scaled units (GiB etc) caused rounding, for example 400 GiB instead of 378 GiB
|
||||
- [Issue #1199](https://github.com/grafana/grafana/issues/1199). Graph: fix for series tooltip when one series is hidden/disabled
|
||||
- [Issue #1207](https://github.com/grafana/grafana/issues/1207). Graphite: movingAverage / movingMedian parameter type impovement, now handles int and interval parameter
|
||||
|
||||
# 1.9.0 (2014-12-02)
|
||||
|
||||
**Enhancements**
|
||||
- [Issue #1130](https://github.com/grafana/grafana/issues/1130). SinglestatPanel: Added null point handling, and value to text mapping
|
||||
|
||||
|
||||
**Fixes**
|
||||
- [Issue #1087](https://github.com/grafana/grafana/issues/1087). Panel: Fixed IE9 crash due to angular drag drop
|
||||
- [Issue #1093](https://github.com/grafana/grafana/issues/1093). SingleStatPanel: Fixed position for drilldown link tooltip when dashboard requires scrolling
|
||||
- [Issue #1095](https://github.com/grafana/grafana/issues/1095). DrilldownLink: template variables in params property was not interpolated
|
||||
- [Issue #1114](https://github.com/grafana/grafana/issues/1114). Graphite: Lexer fix, allow equal sign (=) in metric paths
|
||||
- [Issue #1136](https://github.com/grafana/grafana/issues/1136). Graph: Fix to legend value Max and negative values
|
||||
- [Issue #1150](https://github.com/grafana/grafana/issues/1150). SinglestatPanel: Fixed absolute drilldown link issue
|
||||
- [Issue #1123](https://github.com/grafana/grafana/issues/1123). Firefox: Workaround for Firefox bug, casued input text fields to not be selectable and not have placeable cursor
|
||||
- [Issue #1108](https://github.com/grafana/grafana/issues/1108). Graph: Fix for tooltip series order when series draw order was changed with zindex property
|
||||
|
||||
# 1.9.0-rc1 (2014-11-17)
|
||||
|
||||
**UI Improvements**
|
||||
- [Issue #770](https://github.com/grafana/grafana/issues/770). UI: Panel dropdown menu replaced with a new panel menu
|
||||
|
||||
**Graph**
|
||||
- [Issue #877](https://github.com/grafana/grafana/issues/877). Graph: Smart auto decimal precision when using scaled unit formats
|
||||
- [Issue #850](https://github.com/grafana/grafana/issues/850). Graph: Shared tooltip that shows multiple series & crosshair line, thx @toni-moreno
|
||||
- [Issue #940](https://github.com/grafana/grafana/issues/940). Graph: New series style override option "Fill below to", useful to visualize max & min as a shadow for the mean
|
||||
- [Issue #1030](https://github.com/grafana/grafana/issues/1030). Graph: Legend table display/look changed, now includes column headers for min/max/avg, and full width (unless on right side)
|
||||
- [Issue #861](https://github.com/grafana/grafana/issues/861). Graph: Export graph time series data as csv file
|
||||
|
||||
**New Panels**
|
||||
- [Issue #951](https://github.com/grafana/grafana/issues/951). SingleStat: New singlestat panel
|
||||
|
||||
**Misc**
|
||||
- [Issue #864](https://github.com/grafana/grafana/issues/846). Panel: Share panel feature, get a link to panel with the current time range
|
||||
- [Issue #938](https://github.com/grafana/grafana/issues/938). Panel: Plugin panels now reside outside of app/panels directory
|
||||
- [Issue #952](https://github.com/grafana/grafana/issues/952). Help: Shortcut "?" to open help modal with list of all shortcuts
|
||||
- [Issue #991](https://github.com/grafana/grafana/issues/991). ScriptedDashboard: datasource services are now available in scripted dashboards, you can query datasource for metric keys, generate dashboards, and even save them in a scripted dashboard (see scripted_gen_and_save.js for example)
|
||||
- [Issue #1041](https://github.com/grafana/grafana/issues/1041). Panel: All panels can now have links to other dashboards or absolute links, these links are available in the panel menu.
|
||||
|
||||
**Changes**
|
||||
- [Issue #1007](https://github.com/grafana/grafana/issues/1007). Graph: Series hide/show toggle changed to be default exclusive, so clicking on a series name will show only that series. (SHIFT or meta)+click will toggle hide/show.
|
||||
|
||||
**OpenTSDB**
|
||||
- [Issue #930](https://github.com/grafana/grafana/issues/930). OpenTSDB: Adding counter max and counter reset value to open tsdb query editor, thx @rsimiciuc
|
||||
- [Issue #917](https://github.com/grafana/grafana/issues/917). OpenTSDB: Templating support for OpenTSDB series name and tags, thx @mchataigner
|
||||
|
||||
**InfluxDB**
|
||||
- [Issue #714](https://github.com/grafana/grafana/issues/714). InfluxDB: Support for sub second resolution graphs
|
||||
|
||||
**Fixes**
|
||||
- [Issue #925](https://github.com/grafana/grafana/issues/925). Graph: bar width calculation fix for some edge cases (bars would render on top of each other)
|
||||
- [Issue #505](https://github.com/grafana/grafana/issues/505). Graph: fix for second y axis tick unit labels wrapping on the next line
|
||||
- [Issue #987](https://github.com/grafana/grafana/issues/987). Dashboard: Collapsed rows became invisible when hide controls was enabled
|
||||
|
||||
=======
|
||||
# 1.8.1 (2014-09-30)
|
||||
|
||||
**Fixes**
|
||||
- [Issue #855](https://github.com/grafana/grafana/issues/855). Graph: Fix for scroll issue in graph edit mode when dropdown goes below screen
|
||||
- [Issue #847](https://github.com/grafana/grafana/issues/847). Graph: Fix for series draw order not being the same after hiding/unhiding series
|
||||
- [Issue #851](https://github.com/grafana/grafana/issues/851). Annotations: Fix for annotations not reloaded when switching between 2 dashboards with annotations
|
||||
- [Issue #846](https://github.com/grafana/grafana/issues/846). Edit panes: Issue when open row or json editor when scrolled down the page, unable to scroll and you did not see editor
|
||||
- [Issue #840](https://github.com/grafana/grafana/issues/840). Import: Fixes to import from json file and import from graphite. Issues was lingering state from previous dashboard.
|
||||
- [Issue #859](https://github.com/grafana/grafana/issues/859). InfluxDB: Fix for bug when saving dashboard where title is the same as slugified url id
|
||||
- [Issue #852](https://github.com/grafana/grafana/issues/852). White theme: Fixes for hidden series legend text and disabled annotations color
|
||||
|
||||
# 1.8.0 (2014-09-22)
|
||||
|
||||
Read this [blog post](http://grafana.org/blog/2014/09/11/grafana-1-8-0-rc1-released.html) for an overview of all improvements.
|
||||
|
||||
**Fixes**
|
||||
- [Issue #802](https://github.com/grafana/grafana/issues/802). Annotations: Fix when using InfluxDB datasource
|
||||
- [Issue #795](https://github.com/grafana/grafana/issues/795). Chrome: Fix for display issue in chrome beta & chrome canary when entering edit mode
|
||||
- [Issue #818](https://github.com/grafana/grafana/issues/818). Graph: Added percent y-axis format
|
||||
- [Issue #828](https://github.com/grafana/grafana/issues/828). Elasticsearch: saving new dashboard with title equal to slugified url would cause it to deleted.
|
||||
- [Issue #830](https://github.com/grafana/grafana/issues/830). Annotations: Fix for elasticsearch annotations and mapping nested fields
|
||||
|
||||
# 1.8.0-RC1 (2014-09-12)
|
||||
|
||||
**UI polish / changes**
|
||||
- [Issue #725](https://github.com/grafana/grafana/issues/725). UI: All modal editors are removed and replaced by an edit pane under menu. The look of editors is also updated and polished. Search dropdown is also shown as pane under menu and has seen some UI polish.
|
||||
|
||||
**Filtering/Templating feature overhaul**
|
||||
- Filtering renamed to Templating, and filter items to variables
|
||||
- Filter editing has gotten its own edit pane with much improved UI and options
|
||||
- [Issue #296](https://github.com/grafana/grafana/issues/296). Templating: Can now retrieve variable values from a non-default data source
|
||||
- [Issue #219](https://github.com/grafana/grafana/issues/219). Templating: Template variable value selection is now a typeahead autocomplete dropdown
|
||||
- [Issue #760](https://github.com/grafana/grafana/issues/760). Templating: Extend template variable syntax to include $variable syntax replacement
|
||||
- [Issue #234](https://github.com/grafana/grafana/issues/234). Templating: Interval variable type for time intervals summarize/group by parameter, included "auto" option, and auto step counts option.
|
||||
- [Issue #262](https://github.com/grafana/grafana/issues/262). Templating: Ability to use template variables for function parameters via custom variable type, can be used as parameter for movingAverage or scaleToSeconds for example
|
||||
- [Issue #312](https://github.com/grafana/grafana/issues/312). Templating: Can now use template variables in panel titles
|
||||
- [Issue #613](https://github.com/grafana/grafana/issues/613). Templating: Full support for InfluxDB, filter by part of series names, extract series substrings, nested queries, multipe where clauses!
|
||||
- Template variables can be initialized from url, with var-my_varname=value, breaking change, before it was just my_varname.
|
||||
- Templating and url state sync has some issues that are not solved for this release, see [Issue #772](https://github.com/grafana/grafana/issues/772) for more details.
|
||||
|
||||
**InfluxDB Breaking changes**
|
||||
- To better support templating, fill(0) and group by time low limit some changes has been made to the editor and query model schema
|
||||
- Currently some of these changes are breaking
|
||||
- If you used custom condition filter you need to open the graph in edit mode, the editor will update the schema, and the queries should work again
|
||||
- If you used a raw query you need to remove the time filter and replace it with $timeFilter (this is done automatically when you switch from query editor to raw query, but old raw queries needs to updated)
|
||||
- If you used group by and later removed the group by the graph could break, open in editor and should correct it
|
||||
- InfluxDB annotation queries that used [[timeFilter]] should be updated to use $timeFilter syntax instead
|
||||
- Might write an upgrade tool to update dashboards automatically, but right now master (1.8) includes the above breaking changes
|
||||
|
||||
**InfluxDB query editor enhancements**
|
||||
- [Issue #756](https://github.com/grafana/grafana/issues/756). InfluxDB: Add option for fill(0) and fill(null), integrated help in editor for why this option is important when stacking series
|
||||
- [Issue #743](https://github.com/grafana/grafana/issues/743). InfluxDB: A group by time option for all queries in graph panel that supports a low limit for auto group by time, very important for stacking and fill(0)
|
||||
- The above to enhancements solves the problems associated with stacked bars and lines when points are missing, these issues are solved:
|
||||
- [Issue #673](https://github.com/grafana/grafana/issues/673). InfluxDB: stacked bars missing intermediate data points, unless lines also enabled
|
||||
- [Issue #674](https://github.com/grafana/grafana/issues/674). InfluxDB: stacked chart ignoring series without latest values
|
||||
- [Issue #534](https://github.com/grafana/grafana/issues/534). InfluxDB: No order in stacked bars mode
|
||||
|
||||
**New features and improvements**
|
||||
- [Issue #117](https://github.com/grafana/grafana/issues/117). Graphite: Graphite query builder can now handle functions that multiple series as arguments!
|
||||
- [Issue #281](https://github.com/grafana/grafana/issues/281). Graphite: Metric node/segment selection is now a textbox with autocomplete dropdown, allow for custom glob expression for single node segment without entering text editor mode.
|
||||
- [Issue #304](https://github.com/grafana/grafana/issues/304). Dashboard: View dashboard json, edit/update any panel using json editor, makes it possible to quickly copy a graph from one dashboard to another.
|
||||
- [Issue #578](https://github.com/grafana/grafana/issues/578). Dashboard: Row option to display row title even when the row is visible
|
||||
- [Issue #672](https://github.com/grafana/grafana/issues/672). Dashboard: panel fullscreen & edit state is present in url, can now link to graph in edit & fullscreen mode.
|
||||
- [Issue #709](https://github.com/grafana/grafana/issues/709). Dashboard: Small UI look polish to search results, made dashboard title link are larger
|
||||
- [Issue #425](https://github.com/grafana/grafana/issues/425). Graph: New section in 'Display Styles' tab to override any display setting on per series bases (mix and match lines, bars, points, fill, stack, line width etc)
|
||||
- [Issue #634](https://github.com/grafana/grafana/issues/634). Dashboard: Dashboard tags now in different colors (from fixed palette) determined by tag name.
|
||||
- [Issue #685](https://github.com/grafana/grafana/issues/685). Dashboard: New config.js option to change/remove window title prefix.
|
||||
- [Issue #781](https://github.com/grafana/grafana/issues/781). Dashboard: Title URL is now slugified for greater URL readability, works with both ES & InfluxDB storage, is backward compatible
|
||||
- [Issue #785](https://github.com/grafana/grafana/issues/785). Elasticsearch: Support for full elasticsearch lucene search grammar when searching for dashboards, better async search
|
||||
- [Issue #787](https://github.com/grafana/grafana/issues/787). Dashboard: time range can now be read from URL parameters, will override dashboard saved time range
|
||||
|
||||
**Fixes**
|
||||
- [Issue #696](https://github.com/grafana/grafana/issues/696). Graph: Fix for y-axis format 'none' when values are in scientific notation (ex 2.3e-13)
|
||||
- [Issue #733](https://github.com/grafana/grafana/issues/733). Graph: Fix for tooltip current value decimal precision when 'none' axis format was selected
|
||||
- [Issue #697](https://github.com/grafana/grafana/issues/697). Graphite: Fix for Glob syntax in graphite queries ([1-9] and ?) that made the query editor / parser bail and fallback to a text box.
|
||||
- [Issue #702](https://github.com/grafana/grafana/issues/702). Graphite: Fix for nonNegativeDerivative function, now possible to not include optional first parameter maxValue
|
||||
- [Issue #277](https://github.com/grafana/grafana/issues/277). Dashboard: Fix for timepicker date & tooltip when UTC timezone selected.
|
||||
- [Issue #699](https://github.com/grafana/grafana/issues/699). Dashboard: Fix for bug when adding rows from dashboard settings dialog.
|
||||
- [Issue #723](https://github.com/grafana/grafana/issues/723). Dashboard: Fix for hide controls setting not used/initialized on dashboard load
|
||||
- [Issue #724](https://github.com/grafana/grafana/issues/724). Dashboard: Fix for zoom out causing right hand "to" range to be set in the future.
|
||||
|
||||
**Tech**
|
||||
- Upgraded from angularjs 1.1.5 to 1.3 beta 17;
|
||||
- Switch from underscore to lodash
|
||||
- helpers to easily unit test angularjs controllers and services
|
||||
- Test coverage through coveralls
|
||||
- Upgrade from jquery 1.8.0 to 2.1.1 (**Removes support for IE7 & IE8**)
|
||||
|
||||
# 1.7.1 (unreleased)
|
||||
|
||||
**Fixes**
|
||||
- [Issue #691](https://github.com/grafana/grafana/issues/691). Dashboard: Tooltip fixes, sometimes they would not show, and sometimes they would get stuck.
|
||||
- [Issue #695](https://github.com/grafana/grafana/issues/695). Dashboard: Tooltip on goto home menu icon would get stuck after clicking on it
|
||||
|
||||
# 1.7.0 (2014-08-11)
|
||||
|
||||
**Fixes**
|
||||
- [Issue #652](https://github.com/grafana/grafana/issues/652). Timepicker: Entering custom date range impossible when refresh is low (now is constantly reset)
|
||||
- [Issue #450](https://github.com/grafana/grafana/issues/450). Graph: Tooltip does not disappear sometimes and would get stuck
|
||||
- [Issue #655](https://github.com/grafana/grafana/issues/655). General: Auto refresh not initiated / started after dashboard loading
|
||||
- [Issue #657](https://github.com/grafana/grafana/issues/657). General: Fix for refresh icon in IE browsers
|
||||
- [Issue #661](https://github.com/grafana/grafana/issues/661). Annotations: Elasticsearch querystring with filter template replacements was not interpolated
|
||||
- [Issue #660](https://github.com/grafana/grafana/issues/660). OpenTSDB: fix opentsdb queries that returned more than one series
|
||||
|
||||
**Change**
|
||||
- [Issue #681](https://github.com/grafana/grafana/issues/681). Dashboard: The panel error bar has been replaced with a small error indicator, this indicator does not change panel height and is a lot less intrusive. Hover over it for short details, click on it for more details.
|
||||
|
||||
# 1.7.0-rc1 (2014-08-05)
|
||||
|
||||
**New features or improvements**
|
||||
- [Issue #581](https://github.com/grafana/grafana/issues/581). InfluxDB: Add continuous query in series results (series typeahead).
|
||||
- [Issue #584](https://github.com/grafana/grafana/issues/584). InfluxDB: Support for alias & alias patterns when using raw query mode
|
||||
- [Issue #394](https://github.com/grafana/grafana/issues/394). InfluxDB: Annotation support
|
||||
- [Issue #633](https://github.com/grafana/grafana/issues/633). InfluxDB: InfluxDB can now act as a datastore for dashboards
|
||||
- [Issue #610](https://github.com/grafana/grafana/issues/610). InfluxDB: Support for InfluxdB v0.8 list series response schemea (series typeahead)
|
||||
- [Issue #525](https://github.com/grafana/grafana/issues/525). InfluxDB: Enhanced series aliasing (legend names) with pattern replacements
|
||||
- [Issue #266](https://github.com/grafana/grafana/issues/266). Graphite: New option cacheTimeout to override graphite default memcache timeout
|
||||
- [Issue #606](https://github.com/grafana/grafana/issues/606). General: New global option in config.js to specify admin password (useful to hinder users from accidentally make changes)
|
||||
- [Issue #201](https://github.com/grafana/grafana/issues/201). Annotations: Elasticsearch datasource support for events
|
||||
- [Issue #344](https://github.com/grafana/grafana/issues/344). Annotations: Annotations can now be fetched from non default datasources
|
||||
- [Issue #631](https://github.com/grafana/grafana/issues/631). Search: max_results config.js option & scroll in search results (To show more or all dashboards)
|
||||
- [Issue #511](https://github.com/grafana/grafana/issues/511). Text panel: Allow [[..]] filter notation in all text panels (markdown/html/text)
|
||||
- [Issue #136](https://github.com/grafana/grafana/issues/136). Graph: New legend display option "Align as table"
|
||||
- [Issue #556](https://github.com/grafana/grafana/issues/556). Graph: New legend display option "Right side", will show legend to the right of the graph
|
||||
- [Issue #604](https://github.com/grafana/grafana/issues/604). Graph: New axis format, 'bps' (SI unit in steps of 1000) useful for network gear metics
|
||||
- [Issue #626](https://github.com/grafana/grafana/issues/626). Graph: Downscale y axis to more precise unit, value of 0.1 for seconds format will be formated as 100 ms. Thanks @kamaradclimber
|
||||
- [Issue #618](https://github.com/grafana/grafana/issues/618). OpenTSDB: Series alias option to override metric name returned from opentsdb. Thanks @heldr
|
||||
|
||||
**Documentation**
|
||||
- [Issue #635](https://github.com/grafana/grafana/issues/635). Docs for features and changes in v1.7, new troubleshooting guide, new Getting started guide, improved install & config guide.
|
||||
|
||||
|
||||
**Changes**
|
||||
- [Issue #536](https://github.com/grafana/grafana/issues/536). Graphite: Use unix epoch for Graphite from/to for absolute time ranges
|
||||
- [Issue #641](https://github.com/grafana/grafana/issues/536). General: Dashboard save temp copy feature settings moved from dashboard to config.js, default is enabled, and ttl to 30 days
|
||||
- [Issue #532](https://github.com/grafana/grafana/issues/532). Schema: Dashboard schema changes, "Unsaved changes" should not appear for schema changes. All changes are backward compatible with old schema.
|
||||
|
||||
**Fixes**
|
||||
- [Issue #545](https://github.com/grafana/grafana/issues/545). Graph: Fix formatting negative values (axis formats, legend values)
|
||||
- [Issue #460](https://github.com/grafana/grafana/issues/460). Graph: fix for max legend value when max value is zero
|
||||
- [Issue #628](https://github.com/grafana/grafana/issues/628). Filtering: Fix for nested filters, changing a child filter could result in infinite recursion in some cases
|
||||
- [Issue #528](https://github.com/grafana/grafana/issues/528). Graphite: Fix for graphite expressions parser failure when metric expressions starts with curly brace segment
|
||||
|
||||
# 1.6.1 (2014-06-24)
|
||||
|
||||
**New features or improvements**
|
||||
- [Issue #360](https://github.com/grafana/grafana/issues/360). Ability to set y min/max for right y-axis (RR #519)
|
||||
|
||||
**Fixes**
|
||||
|
||||
- [Issue #500](https://github.com/grafana/grafana/issues/360). Fixes regex InfluxDB queries intoduced in 1.6.0
|
||||
- [Issue #506](https://github.com/grafana/grafana/issues/506). Bug in when using % sign in legends (aliases), fixed by removing url decoding of metric names
|
||||
- [Issue #522](https://github.com/grafana/grafana/issues/522). Series names and column name typeahead cache fix
|
||||
- [Issue #504](https://github.com/grafana/grafana/issues/504). Fixed influxdb issue with raw query that caused wrong value column detection
|
||||
- [Issue #526](https://github.com/grafana/grafana/issues/526). Default property that marks which datasource is default in config.js is now optional
|
||||
- [Issue #342](https://github.com/grafana/grafana/issues/342). Auto-refresh caused 2 refreshes (and hence mulitple queries) each time (at least in firefox)
|
||||
|
||||
# 1.6.0 (2014-06-16)
|
||||
|
||||
#### New features or improvements
|
||||
- [Issue #427](https://github.com/grafana/grafana/issues/427). New Y-axis formater for metric values that represent seconds, Thanks @jippi
|
||||
- [Issue #390](https://github.com/grafana/grafana/issues/390). Allow special characters in serie names (influxdb datasource), Thanks @majst01
|
||||
- [Issue #428](https://github.com/grafana/grafana/issues/428). Refactoring of filterSrv, Thanks @Tetha
|
||||
- [Issue #445](https://github.com/grafana/grafana/issues/445). New config for playlist feature. Set playlist_timespan to set default playlist interval, Thanks @rmca
|
||||
- [Issue #461](https://github.com/grafana/grafana/issues/461). New graphite function definition added isNonNull, Thanks @tmonk42
|
||||
- [Issue #455](https://github.com/grafana/grafana/issues/455). New InfluxDB function difference add to function dropdown
|
||||
- [Issue #459](https://github.com/grafana/grafana/issues/459). Added parameter to keepLastValue graphite function definition (default 100)
|
||||
[Issue #418](https://github.com/grafana/grafana/issues/418). to the browser cache when upgrading grafana and improve load performance
|
||||
- [Issue #327](https://github.com/grafana/grafana/issues/327). Partial support for url encoded metrics when using Graphite datasource. Thanks @axe-felix
|
||||
- [Issue #473](https://github.com/grafana/grafana/issues/473). Improvement to InfluxDB query editor and function/value column selection
|
||||
- [Issue #375](https://github.com/grafana/grafana/issues/375). Initial support for filtering (templated queries) for InfluxDB. Thanks @mavimo
|
||||
- [Issue #475](https://github.com/grafana/grafana/issues/475). Row editing and adding new panel is now a lot quicker and easier with the new row menu
|
||||
- [Issue #211](https://github.com/grafana/grafana/issues/211). New datasource! Initial support for OpenTSDB, Thanks @mpage
|
||||
- [Issue #492](https://github.com/grafana/grafana/issues/492). Improvement and polish to the OpenTSDB query editor
|
||||
- [Issue #441](https://github.com/grafana/grafana/issues/441). Influxdb group by support, Thanks @piis3
|
||||
- improved asset (css/js) build pipeline, added revision to css and js. Will remove issues related
|
||||
|
||||
|
||||
#### Changes
|
||||
- [Issue #475](https://github.com/grafana/grafana/issues/475). Add panel icon and Row edit button is replaced by the Row edit menu
|
||||
- New graphs now have a default empty query
|
||||
- Add Row button now creates a row with default height of 250px (no longer opens dashboard settings modal)
|
||||
- Clean up of config.sample.js, graphiteUrl removed (still works, but depricated, removed in future)
|
||||
Use datasources config instead. panel_names removed from config.js. Use plugins.panels to add custom panels
|
||||
- Graphite panel is now renamed graph (Existing dashboards will still work)
|
||||
|
||||
#### Fixes
|
||||
- [Issue #126](https://github.com/grafana/grafana/issues/126). Graphite query lexer change, can now handle regex parameters for aliasSub function
|
||||
- [Issue #447](https://github.com/grafana/grafana/issues/447). Filter option loading when having muliple nested filters now works better. Options are now reloaded correctly and there are no multiple renders/refresh inbetween.
|
||||
- [Issue #412](https://github.com/grafana/grafana/issues/412). After a filter option is changed and a nested template param is reloaded, if the current value exists after the options are reloaded the current selected value is kept.
|
||||
- [Issue #460](https://github.com/grafana/grafana/issues/460). Legend Current value did not display when value was zero
|
||||
- [Issue #328](https://github.com/grafana/grafana/issues/328). Fix to series toggling bug that caused annotations to be hidden when toggling/hiding series.
|
||||
- [Issue #293](https://github.com/grafana/grafana/issues/293). Fix for graphite function selection menu that some times draws outside screen. It now displays upward
|
||||
- [Issue #350](https://github.com/grafana/grafana/issues/350). Fix for exclusive series toggling (hold down CTRL, SHIFT or META key) and left click a series for exclusive toggling
|
||||
- [Issue #472](https://github.com/grafana/grafana/issues/472). CTRL does not work on MAC OSX but SHIFT or META should (depending on browser)
|
||||
|
||||
# 1.5.4 (2014-05-13)
|
||||
### New features and improvements
|
||||
- InfluxDB enhancement: support for multiple hosts (with retries) and raw queries ([Issue #318](https://github.com/grafana/grafana/issues/318), thx @toddboom)
|
||||
- Added rounding for graphites from and to time range filters
|
||||
for very short absolute ranges ([Issue #320](https://github.com/grafana/grafana/issues/320))
|
||||
- Increased resolution for graphite datapoints (maxDataPoints), now equal to panel pixel width. ([Issue #5](https://github.com/grafana/grafana/issues/5))
|
||||
- Improvement to influxdb query editor, can now add where clause and alias ([Issue #331](https://github.com/grafana/grafana/issues/331), thanks @mavimo)
|
||||
- New config setting for graphite datasource to control if json render request is POST or GET ([Issue #345](https://github.com/grafana/grafana/issues/345))
|
||||
- Unsaved changes warning feature ([Issue #324](https://github.com/grafana/grafana/issues/324))
|
||||
- Improvement to series toggling, CTRL+MouseClick on series name will now hide all others ([Issue #350](https://github.com/grafana/grafana/issues/350))
|
||||
|
||||
### Changes
|
||||
- Graph default setting for Y-Min changed from zero to auto scalling (will not effect existing dashboards). ([Issue #386](https://github.com/grafana/grafana/issues/386)) - thx @kamaradclimber
|
||||
|
||||
### Fixes
|
||||
- Fixes to filters and "All" option. It now never uses "*" as value, but all options in a {node1, node2, node3} expression ([Issue #228](https://github.com/grafana/grafana/issues/228), #359)
|
||||
- Fix for InfluxDB query generation with columns containing dots or dashes ([Issue #369](https://github.com/grafana/grafana/issues/369), #348) - Thanks to @jbripley
|
||||
|
||||
|
||||
# 1.5.3 (2014-04-17)
|
||||
- Add support for async scripted dashboards ([Issue #274](https://github.com/grafana/grafana/issues/274))
|
||||
- Text panel now accepts html (for links to other dashboards, etc) ([Issue #236](https://github.com/grafana/grafana/issues/236))
|
||||
- Fix for Text panel, now changes take effect directly ([Issue #251](https://github.com/grafana/grafana/issues/251))
|
||||
- Fix when adding functions without params that did not cause graph to update ([Issue #267](https://github.com/grafana/grafana/issues/267))
|
||||
- Graphite errors are now much easier to see and troubleshoot with the new inspector ([Issue #265](https://github.com/grafana/grafana/issues/265))
|
||||
- Use influxdb aliases to distinguish between multiple columns ([Issue #283](https://github.com/grafana/grafana/issues/283))
|
||||
- Correction to ms axis formater, now formats days correctly. ([Issue #189](https://github.com/grafana/grafana/issues/189))
|
||||
- Css fix for Firefox and using top menu dropdowns in panel fullscren / edit mode ([Issue #106](https://github.com/grafana/grafana/issues/106))
|
||||
- Browser page title is now Grafana - {{dashboard title}} ([Issue #294](https://github.com/grafana/grafana/issues/294))
|
||||
- Disable auto refresh zooming in (every time you change to an absolute time range), refresh will be restored when you change time range back to relative ([Issue #282](https://github.com/grafana/grafana/issues/282))
|
||||
- More graphite functions
|
||||
|
||||
# 1.5.2 (2014-03-24)
|
||||
### New Features and improvements
|
||||
- Support for second optional params for functions like aliasByNode ([Issue #167](https://github.com/grafana/grafana/issues/167)). Read the wiki on the [Function Editor](https://github.com/torkelo/grafana/wiki/Graphite-Function-Editor) for more info.
|
||||
- More functions added to InfluxDB query editor ([Issue #218](https://github.com/grafana/grafana/issues/218))
|
||||
- Filters can now be used inside other filters (templated segments) ([Issue #128](https://github.com/grafana/grafana/issues/128))
|
||||
- More graphite functions added
|
||||
|
||||
### Fixes
|
||||
- Float arguments now work for functions like scale ([Issue #223](https://github.com/grafana/grafana/issues/223))
|
||||
- Fix for graphite function editor, the graph & target was not updated after adding a function and leaving default params as is #191
|
||||
|
||||
The zip files now contains a sub folder with project name and version prefix. ([Issue #209](https://github.com/grafana/grafana/issues/209))
|
||||
|
||||
# 1.5.1 (2014-03-10)
|
||||
### Fixes
|
||||
- maxDataPoints must be an integer #184 (thanks @frejsoya for fixing this)
|
||||
|
||||
For people who are find Grafana slow for large time spans or high resolution metrics. This is most likely due to graphite returning a large number of datapoints. The maxDataPoints parameter solves this issue. For maxDataPoints to work you need to run the latest graphite-web (some builds of 0.9.12 does not include this feature).
|
||||
|
||||
Read this for more info:
|
||||
[Performance for large time spans](https://github.com/torkelo/grafana/wiki/Performance-for-large-time-spans)
|
||||
|
||||
# 1.5.0 (2014-03-09)
|
||||
### New Features and improvements
|
||||
- New function editor [video demo](http://youtu.be/I90WHRwE1ZM) ([Issue #178](https://github.com/grafana/grafana/issues/178))
|
||||
- Links to function documentation from function editor ([Issue #3](https://github.com/grafana/grafana/issues/3))
|
||||
- Reorder functions ([Issue #130](https://github.com/grafana/grafana/issues/130))
|
||||
- [Initial support for InfluxDB](https://github.com/torkelo/grafana/wiki/InfluxDB) as metric datasource (#103), need feedback!
|
||||
- [Dashboard playlist](https://github.com/torkelo/grafana/wiki/Dashboard-playlist) ([Issue #36](https://github.com/grafana/grafana/issues/36))
|
||||
- When adding aliasByNode smartly set node number ([Issue #175](https://github.com/grafana/grafana/issues/175))
|
||||
- Support graphite identifiers with embedded colons ([Issue #173](https://github.com/grafana/grafana/issues/173))
|
||||
- Typeahead & autocomplete when adding new function ([Issue #164](https://github.com/grafana/grafana/issues/164))
|
||||
- More graphite function definitions
|
||||
- Make "ms" axis format include hour, day, weeks, month and year ([Issue #149](https://github.com/grafana/grafana/issues/149))
|
||||
- Microsecond axis format ([Issue #146](https://github.com/grafana/grafana/issues/146))
|
||||
- Specify template parameters in URL ([Issue #123](https://github.com/grafana/grafana/issues/123))
|
||||
|
||||
### Fixes
|
||||
- Basic Auth fix ([Issue #152](https://github.com/grafana/grafana/issues/152))
|
||||
- Fix to annotations with graphite source & null values ([Issue #138](https://github.com/grafana/grafana/issues/138))
|
||||
|
||||
# 1.4.0 (2014-02-21)
|
||||
### New Features
|
||||
- #44 Annotations! Required a lot of work to get right. Read wiki article for more info. Supported annotations data sources are graphite metrics and graphite events. Support for more will be added in the future!
|
||||
- #35 Support for multiple graphite servers! (Read wiki article for more)
|
||||
- #116 Back to dashboard link in top menu to easily exist full screen / edit mode.
|
||||
- #114, #97 Legend values now use the same y axes formatter
|
||||
- #77 Improvements and polish to the light theme
|
||||
|
||||
### Changes
|
||||
- #98 Stack is no longer by default turned on in graph display settings.
|
||||
- Hide controls (Ctrl+h) now hides the sub menu row (where filtering, and annotations are). So if you had filtering enabled and hide controls enabled you will not see the filtering sub menu.
|
||||
|
||||
### Fixes:
|
||||
- #94 Fix for bug that caused dashboard settings to sometimes not contain timepicker tab.
|
||||
- #110 Graph with many many metrics caused legend to push down graph editor below screen. You can now scroll in edit mode & full screen mode for graphs with lots of series & legends.
|
||||
- #104 Improvement to graphite target editor, select wildcard now gives you a "select metric" link for the next node.
|
||||
- #105 Added zero as a possible node value in groupByAlias function
|
||||
|
||||
# 1.3.0 (2014-02-13)
|
||||
### New features or improvements
|
||||
- #86 Dashboard tags and search (see wiki article for details)
|
||||
- #54 Enhancement to filter / template. "Include All" improvement
|
||||
- #82 Dashboard search result sorted in alphabetical order
|
||||
|
||||
### Fixes
|
||||
- #91 Custom date selector is one day behind
|
||||
- #89 Filter / template does not work after switching dashboard
|
||||
- #88 Closed / Minimized row css bug
|
||||
- #85 Added all parameters to summarize function
|
||||
- #83 Stack as percent should now work a lot better!
|
||||
|
||||
# 1.2.0 (2014-02-10)
|
||||
### New features
|
||||
- #70 Grid Thresholds (warning and error regions or lines in graph)
|
||||
- #72 Added an example of a scripted dashboard and a short wiki article documenting scripted dashboards.
|
||||
|
||||
### Fixes
|
||||
- #81 Grid min/max values are ignored bug
|
||||
- #80 "stacked as percent" graphs should always use "max" value of 100 bug
|
||||
- #73 Left Y format change did not work
|
||||
- #42 Fixes to grid min/max auto scaling
|
||||
- #69 Fixes to lexer/parser for metrics segments like "10-20".
|
||||
- #67 Allow decimal input for scale function
|
||||
- #68 Bug when trying to open dashboard while in edit mode
|
||||
|
||||
# 1.1.0 (2014-02-06)
|
||||
### New features:
|
||||
|
||||
- #22 Support for native graphite png renderer, does not support click and select zoom yet
|
||||
- #60 Support for legend values (cactiStyle, min, max, current, total, avg). The options for these are found in the new "Axes & Grid" tab for now.
|
||||
- #62 There is now a "New" button in the search/open dashboard view to quickly open a clean empty dashboard.
|
||||
- #55 Basic auth is now supported for elastic search as well
|
||||
- some new function definitions added (will focus more on this for next release).
|
||||
|
||||
### Fixes
|
||||
- #45 zero values from graphite was handled as null.
|
||||
- #63 Kibana / Grafana on same host would use same localStorage keys, now fixed
|
||||
- #46 Impossible to edit graph without a name fixed.
|
||||
- #24 fix for dashboard search when elastic search is configured to disable _all field.
|
||||
- #38 Improvement to lexer / parser to support pure numeric literals in metric segments
|
||||
|
||||
Thanks to everyone who contributed fixes and provided feedback :+1:
|
||||
|
||||
# 1.0.4 (2014-01-24)
|
||||
- [Issue #28](https://github.com/grafana/grafana/issues/28) - Relative time range caused 500 graphite error in some cases (thx rsommer for the fix)
|
||||
|
||||
# 1.0.3 (2014-01-23)
|
||||
- #9 Add Y-axis format for milliseconds
|
||||
- #16 Add support for Basic Auth (use http://username:password@yourgraphitedomain.com)
|
||||
- #13 Relative time ranges now uses relative time ranges when issuing graphite query
|
||||
|
||||
# 1.0.2 (2014-01-21)
|
||||
- [Issue #12](https://github.com/grafana/grafana/issues/12), should now work ok without ElasticSearch
|
||||
|
||||
# 1.0.1 (2014-01-21)
|
||||
- Resize fix
|
||||
- Improvements to drag & drop
|
||||
- Added a few graphite function definitions
|
||||
- Fixed duplicate panel bug
|
||||
- Updated default dashboard with welcome message and randomWalk graph
|
||||
|
||||
# 1.0.0 (2014-01-19)
|
||||
|
||||
First public release
|
||||
14
CONTRIBUTING.md
Normal file
14
CONTRIBUTING.md
Normal file
@@ -0,0 +1,14 @@
|
||||
If you have any idea for an improvement or found a bug do not hesitate to open an issue.
|
||||
And if you have time clone this repo and submit a pull request and help me make Grafana the
|
||||
kickass metrics & devops dashboard we all dream about!
|
||||
|
||||
Prerequisites:
|
||||
- Nodejs (for jshint & grunt & development server)
|
||||
|
||||
Clone repository:
|
||||
|
||||
npm install
|
||||
grunt server (starts development web server in src folder)
|
||||
grunt (runs jshint and less -> css compilation)
|
||||
|
||||
Please remember to run grunt before doing pull request to verify that your code passes all the jshint validations.
|
||||
111
Godeps/Godeps.json
generated
Normal file
111
Godeps/Godeps.json
generated
Normal file
@@ -0,0 +1,111 @@
|
||||
{
|
||||
"ImportPath": "github.com/grafana/grafana",
|
||||
"GoVersion": "go1.4.2",
|
||||
"Packages": [
|
||||
"./pkg/..."
|
||||
],
|
||||
"Deps": [
|
||||
{
|
||||
"ImportPath": "github.com/BurntSushi/toml",
|
||||
"Comment": "v0.1.0-21-g056c9bc",
|
||||
"Rev": "056c9bc7be7190eaa7715723883caffa5f8fa3e4"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/Unknwon/com",
|
||||
"Rev": "d9bcf409c8a368d06c9b347705c381e7c12d54df"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/Unknwon/macaron",
|
||||
"Rev": "93de4f3fad97bf246b838f828e2348f46f21f20a"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/davecgh/go-spew/spew",
|
||||
"Rev": "2df174808ee097f90d259e432cc04442cf60be21"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/go-ldap/ldap",
|
||||
"Comment": "v1-19-g83e6542",
|
||||
"Rev": "83e65426fd1c06626e88aa8a085e5bfed0208e29"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/go-sql-driver/mysql",
|
||||
"Comment": "v1.2-26-g9543750",
|
||||
"Rev": "9543750295406ef070f7de8ae9c43ccddd44e15e"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/go-xorm/core",
|
||||
"Rev": "be6e7ac47dc57bd0ada25322fa526944f66ccaa6"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/go-xorm/xorm",
|
||||
"Comment": "v0.4.2-58-ge2889e5",
|
||||
"Rev": "e2889e5517600b82905f1d2ba8b70deb71823ffe"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/gosimple/slug",
|
||||
"Rev": "8d258463b4459f161f51d6a357edacd3eef9d663"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/jtolds/gls",
|
||||
"Rev": "f1ac7f4f24f50328e6bc838ca4437d1612a0243c"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/lib/pq",
|
||||
"Comment": "go1.0-cutoff-13-g19eeca3",
|
||||
"Rev": "19eeca3e30d2577b1761db471ec130810e67f532"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/macaron-contrib/binding",
|
||||
"Rev": "0fbe4b9707e6eb556ef843e5471592f55ce0a5e7"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/macaron-contrib/session",
|
||||
"Rev": "31e841d95c7302b9ac456c830ea2d6dfcef4f84a"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/mattn/go-sqlite3",
|
||||
"Rev": "e28cd440fabdd39b9520344bc26829f61db40ece"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/rainycape/unidecode",
|
||||
"Rev": "836ef0a715aedf08a12d595ed73ec8ed5b288cac"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/smartystreets/goconvey/convey",
|
||||
"Comment": "1.5.0-356-gfbc0a1c",
|
||||
"Rev": "fbc0a1c888f9f96263f9a559d1769905245f1123"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/streadway/amqp",
|
||||
"Rev": "150b7f24d6ad507e6026c13d85ce1f1391ac7400"
|
||||
},
|
||||
{
|
||||
"ImportPath": "golang.org/x/net/context",
|
||||
"Rev": "972f0c5fbe4ae29e666c3f78c3ed42ae7a448b0a"
|
||||
},
|
||||
{
|
||||
"ImportPath": "golang.org/x/oauth2",
|
||||
"Rev": "c58fcf0ffc1c772aa2e1ee4894bc19f2649263b2"
|
||||
},
|
||||
{
|
||||
"ImportPath": "gopkg.in/asn1-ber.v1",
|
||||
"Comment": "v1",
|
||||
"Rev": "9eae18c3681ae3d3c677ac2b80a8fe57de45fc09"
|
||||
},
|
||||
{
|
||||
"ImportPath": "gopkg.in/bufio.v1",
|
||||
"Comment": "v1",
|
||||
"Rev": "567b2bfa514e796916c4747494d6ff5132a1dfce"
|
||||
},
|
||||
{
|
||||
"ImportPath": "gopkg.in/ini.v1",
|
||||
"Comment": "v0-16-g1772191",
|
||||
"Rev": "177219109c97e7920c933e21c9b25f874357b237"
|
||||
},
|
||||
{
|
||||
"ImportPath": "gopkg.in/redis.v2",
|
||||
"Comment": "v2.3.2",
|
||||
"Rev": "e6179049628164864e6e84e973cfb56335748dea"
|
||||
}
|
||||
]
|
||||
}
|
||||
5
Godeps/Readme
generated
Normal file
5
Godeps/Readme
generated
Normal file
@@ -0,0 +1,5 @@
|
||||
This directory tree is generated automatically by godep.
|
||||
|
||||
Please do not edit.
|
||||
|
||||
See https://github.com/tools/godep for more information.
|
||||
2
Godeps/_workspace/.gitignore
generated
vendored
Normal file
2
Godeps/_workspace/.gitignore
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
/pkg
|
||||
/bin
|
||||
5
Godeps/_workspace/src/github.com/BurntSushi/toml/.gitignore
generated
vendored
Normal file
5
Godeps/_workspace/src/github.com/BurntSushi/toml/.gitignore
generated
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
TAGS
|
||||
tags
|
||||
.*.swp
|
||||
tomlcheck/tomlcheck
|
||||
toml.test
|
||||
12
Godeps/_workspace/src/github.com/BurntSushi/toml/.travis.yml
generated
vendored
Normal file
12
Godeps/_workspace/src/github.com/BurntSushi/toml/.travis.yml
generated
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
language: go
|
||||
go:
|
||||
- 1.1
|
||||
- 1.2
|
||||
- tip
|
||||
install:
|
||||
- go install ./...
|
||||
- go get github.com/BurntSushi/toml-test
|
||||
script:
|
||||
- export PATH="$PATH:$HOME/gopath/bin"
|
||||
- make test
|
||||
|
||||
3
Godeps/_workspace/src/github.com/BurntSushi/toml/COMPATIBLE
generated
vendored
Normal file
3
Godeps/_workspace/src/github.com/BurntSushi/toml/COMPATIBLE
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
Compatible with TOML version
|
||||
[v0.2.0](https://github.com/mojombo/toml/blob/master/versions/toml-v0.2.0.md)
|
||||
|
||||
14
Godeps/_workspace/src/github.com/BurntSushi/toml/COPYING
generated
vendored
Normal file
14
Godeps/_workspace/src/github.com/BurntSushi/toml/COPYING
generated
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE
|
||||
Version 2, December 2004
|
||||
|
||||
Copyright (C) 2004 Sam Hocevar <sam@hocevar.net>
|
||||
|
||||
Everyone is permitted to copy and distribute verbatim or modified
|
||||
copies of this license document, and changing it is allowed as long
|
||||
as the name is changed.
|
||||
|
||||
DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE
|
||||
TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
|
||||
|
||||
0. You just DO WHAT THE FUCK YOU WANT TO.
|
||||
|
||||
19
Godeps/_workspace/src/github.com/BurntSushi/toml/Makefile
generated
vendored
Normal file
19
Godeps/_workspace/src/github.com/BurntSushi/toml/Makefile
generated
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
install:
|
||||
go install ./...
|
||||
|
||||
test: install
|
||||
go test -v
|
||||
toml-test toml-test-decoder
|
||||
toml-test -encoder toml-test-encoder
|
||||
|
||||
fmt:
|
||||
gofmt -w *.go */*.go
|
||||
colcheck *.go */*.go
|
||||
|
||||
tags:
|
||||
find ./ -name '*.go' -print0 | xargs -0 gotags > TAGS
|
||||
|
||||
push:
|
||||
git push origin master
|
||||
git push github master
|
||||
|
||||
220
Godeps/_workspace/src/github.com/BurntSushi/toml/README.md
generated
vendored
Normal file
220
Godeps/_workspace/src/github.com/BurntSushi/toml/README.md
generated
vendored
Normal file
@@ -0,0 +1,220 @@
|
||||
## TOML parser and encoder for Go with reflection
|
||||
|
||||
TOML stands for Tom's Obvious, Minimal Language. This Go package provides a
|
||||
reflection interface similar to Go's standard library `json` and `xml`
|
||||
packages. This package also supports the `encoding.TextUnmarshaler` and
|
||||
`encoding.TextMarshaler` interfaces so that you can define custom data
|
||||
representations. (There is an example of this below.)
|
||||
|
||||
Spec: https://github.com/mojombo/toml
|
||||
|
||||
Compatible with TOML version
|
||||
[v0.2.0](https://github.com/toml-lang/toml/blob/master/versions/en/toml-v0.2.0.md)
|
||||
|
||||
Documentation: http://godoc.org/github.com/BurntSushi/toml
|
||||
|
||||
Installation:
|
||||
|
||||
```bash
|
||||
go get github.com/BurntSushi/toml
|
||||
```
|
||||
|
||||
Try the toml validator:
|
||||
|
||||
```bash
|
||||
go get github.com/BurntSushi/toml/cmd/tomlv
|
||||
tomlv some-toml-file.toml
|
||||
```
|
||||
|
||||
[](https://travis-ci.org/BurntSushi/toml)
|
||||
|
||||
|
||||
### Testing
|
||||
|
||||
This package passes all tests in
|
||||
[toml-test](https://github.com/BurntSushi/toml-test) for both the decoder
|
||||
and the encoder.
|
||||
|
||||
### Examples
|
||||
|
||||
This package works similarly to how the Go standard library handles `XML`
|
||||
and `JSON`. Namely, data is loaded into Go values via reflection.
|
||||
|
||||
For the simplest example, consider some TOML file as just a list of keys
|
||||
and values:
|
||||
|
||||
```toml
|
||||
Age = 25
|
||||
Cats = [ "Cauchy", "Plato" ]
|
||||
Pi = 3.14
|
||||
Perfection = [ 6, 28, 496, 8128 ]
|
||||
DOB = 1987-07-05T05:45:00Z
|
||||
```
|
||||
|
||||
Which could be defined in Go as:
|
||||
|
||||
```go
|
||||
type Config struct {
|
||||
Age int
|
||||
Cats []string
|
||||
Pi float64
|
||||
Perfection []int
|
||||
DOB time.Time // requires `import time`
|
||||
}
|
||||
```
|
||||
|
||||
And then decoded with:
|
||||
|
||||
```go
|
||||
var conf Config
|
||||
if _, err := toml.Decode(tomlData, &conf); err != nil {
|
||||
// handle error
|
||||
}
|
||||
```
|
||||
|
||||
You can also use struct tags if your struct field name doesn't map to a TOML
|
||||
key value directly:
|
||||
|
||||
```toml
|
||||
some_key_NAME = "wat"
|
||||
```
|
||||
|
||||
```go
|
||||
type TOML struct {
|
||||
ObscureKey string `toml:"some_key_NAME"`
|
||||
}
|
||||
```
|
||||
|
||||
### Using the `encoding.TextUnmarshaler` interface
|
||||
|
||||
Here's an example that automatically parses duration strings into
|
||||
`time.Duration` values:
|
||||
|
||||
```toml
|
||||
[[song]]
|
||||
name = "Thunder Road"
|
||||
duration = "4m49s"
|
||||
|
||||
[[song]]
|
||||
name = "Stairway to Heaven"
|
||||
duration = "8m03s"
|
||||
```
|
||||
|
||||
Which can be decoded with:
|
||||
|
||||
```go
|
||||
type song struct {
|
||||
Name string
|
||||
Duration duration
|
||||
}
|
||||
type songs struct {
|
||||
Song []song
|
||||
}
|
||||
var favorites songs
|
||||
if _, err := toml.Decode(blob, &favorites); err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
for _, s := range favorites.Song {
|
||||
fmt.Printf("%s (%s)\n", s.Name, s.Duration)
|
||||
}
|
||||
```
|
||||
|
||||
And you'll also need a `duration` type that satisfies the
|
||||
`encoding.TextUnmarshaler` interface:
|
||||
|
||||
```go
|
||||
type duration struct {
|
||||
time.Duration
|
||||
}
|
||||
|
||||
func (d *duration) UnmarshalText(text []byte) error {
|
||||
var err error
|
||||
d.Duration, err = time.ParseDuration(string(text))
|
||||
return err
|
||||
}
|
||||
```
|
||||
|
||||
### More complex usage
|
||||
|
||||
Here's an example of how to load the example from the official spec page:
|
||||
|
||||
```toml
|
||||
# This is a TOML document. Boom.
|
||||
|
||||
title = "TOML Example"
|
||||
|
||||
[owner]
|
||||
name = "Tom Preston-Werner"
|
||||
organization = "GitHub"
|
||||
bio = "GitHub Cofounder & CEO\nLikes tater tots and beer."
|
||||
dob = 1979-05-27T07:32:00Z # First class dates? Why not?
|
||||
|
||||
[database]
|
||||
server = "192.168.1.1"
|
||||
ports = [ 8001, 8001, 8002 ]
|
||||
connection_max = 5000
|
||||
enabled = true
|
||||
|
||||
[servers]
|
||||
|
||||
# You can indent as you please. Tabs or spaces. TOML don't care.
|
||||
[servers.alpha]
|
||||
ip = "10.0.0.1"
|
||||
dc = "eqdc10"
|
||||
|
||||
[servers.beta]
|
||||
ip = "10.0.0.2"
|
||||
dc = "eqdc10"
|
||||
|
||||
[clients]
|
||||
data = [ ["gamma", "delta"], [1, 2] ] # just an update to make sure parsers support it
|
||||
|
||||
# Line breaks are OK when inside arrays
|
||||
hosts = [
|
||||
"alpha",
|
||||
"omega"
|
||||
]
|
||||
```
|
||||
|
||||
And the corresponding Go types are:
|
||||
|
||||
```go
|
||||
type tomlConfig struct {
|
||||
Title string
|
||||
Owner ownerInfo
|
||||
DB database `toml:"database"`
|
||||
Servers map[string]server
|
||||
Clients clients
|
||||
}
|
||||
|
||||
type ownerInfo struct {
|
||||
Name string
|
||||
Org string `toml:"organization"`
|
||||
Bio string
|
||||
DOB time.Time
|
||||
}
|
||||
|
||||
type database struct {
|
||||
Server string
|
||||
Ports []int
|
||||
ConnMax int `toml:"connection_max"`
|
||||
Enabled bool
|
||||
}
|
||||
|
||||
type server struct {
|
||||
IP string
|
||||
DC string
|
||||
}
|
||||
|
||||
type clients struct {
|
||||
Data [][]interface{}
|
||||
Hosts []string
|
||||
}
|
||||
```
|
||||
|
||||
Note that a case insensitive match will be tried if an exact match can't be
|
||||
found.
|
||||
|
||||
A working example of the above can be found in `_examples/example.{go,toml}`.
|
||||
|
||||
14
Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/toml-test-decoder/COPYING
generated
vendored
Normal file
14
Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/toml-test-decoder/COPYING
generated
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE
|
||||
Version 2, December 2004
|
||||
|
||||
Copyright (C) 2004 Sam Hocevar <sam@hocevar.net>
|
||||
|
||||
Everyone is permitted to copy and distribute verbatim or modified
|
||||
copies of this license document, and changing it is allowed as long
|
||||
as the name is changed.
|
||||
|
||||
DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE
|
||||
TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
|
||||
|
||||
0. You just DO WHAT THE FUCK YOU WANT TO.
|
||||
|
||||
14
Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/toml-test-decoder/README.md
generated
vendored
Normal file
14
Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/toml-test-decoder/README.md
generated
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
# Implements the TOML test suite interface
|
||||
|
||||
This is an implementation of the interface expected by
|
||||
[toml-test](https://github.com/BurntSushi/toml-test) for my
|
||||
[toml parser written in Go](https://github.com/BurntSushi/toml).
|
||||
In particular, it maps TOML data on `stdin` to a JSON format on `stdout`.
|
||||
|
||||
|
||||
Compatible with TOML version
|
||||
[v0.2.0](https://github.com/mojombo/toml/blob/master/versions/toml-v0.2.0.md)
|
||||
|
||||
Compatible with `toml-test` version
|
||||
[v0.2.0](https://github.com/BurntSushi/toml-test/tree/v0.2.0)
|
||||
|
||||
90
Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/toml-test-decoder/main.go
generated
vendored
Normal file
90
Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/toml-test-decoder/main.go
generated
vendored
Normal file
@@ -0,0 +1,90 @@
|
||||
// Command toml-test-decoder satisfies the toml-test interface for testing
|
||||
// TOML decoders. Namely, it accepts TOML on stdin and outputs JSON on stdout.
|
||||
package main
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"flag"
|
||||
"fmt"
|
||||
"log"
|
||||
"os"
|
||||
"path"
|
||||
"time"
|
||||
|
||||
"github.com/BurntSushi/toml"
|
||||
)
|
||||
|
||||
func init() {
|
||||
log.SetFlags(0)
|
||||
|
||||
flag.Usage = usage
|
||||
flag.Parse()
|
||||
}
|
||||
|
||||
func usage() {
|
||||
log.Printf("Usage: %s < toml-file\n", path.Base(os.Args[0]))
|
||||
flag.PrintDefaults()
|
||||
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
func main() {
|
||||
if flag.NArg() != 0 {
|
||||
flag.Usage()
|
||||
}
|
||||
|
||||
var tmp interface{}
|
||||
if _, err := toml.DecodeReader(os.Stdin, &tmp); err != nil {
|
||||
log.Fatalf("Error decoding TOML: %s", err)
|
||||
}
|
||||
|
||||
typedTmp := translate(tmp)
|
||||
if err := json.NewEncoder(os.Stdout).Encode(typedTmp); err != nil {
|
||||
log.Fatalf("Error encoding JSON: %s", err)
|
||||
}
|
||||
}
|
||||
|
||||
func translate(tomlData interface{}) interface{} {
|
||||
switch orig := tomlData.(type) {
|
||||
case map[string]interface{}:
|
||||
typed := make(map[string]interface{}, len(orig))
|
||||
for k, v := range orig {
|
||||
typed[k] = translate(v)
|
||||
}
|
||||
return typed
|
||||
case []map[string]interface{}:
|
||||
typed := make([]map[string]interface{}, len(orig))
|
||||
for i, v := range orig {
|
||||
typed[i] = translate(v).(map[string]interface{})
|
||||
}
|
||||
return typed
|
||||
case []interface{}:
|
||||
typed := make([]interface{}, len(orig))
|
||||
for i, v := range orig {
|
||||
typed[i] = translate(v)
|
||||
}
|
||||
|
||||
// We don't really need to tag arrays, but let's be future proof.
|
||||
// (If TOML ever supports tuples, we'll need this.)
|
||||
return tag("array", typed)
|
||||
case time.Time:
|
||||
return tag("datetime", orig.Format("2006-01-02T15:04:05Z"))
|
||||
case bool:
|
||||
return tag("bool", fmt.Sprintf("%v", orig))
|
||||
case int64:
|
||||
return tag("integer", fmt.Sprintf("%d", orig))
|
||||
case float64:
|
||||
return tag("float", fmt.Sprintf("%v", orig))
|
||||
case string:
|
||||
return tag("string", orig)
|
||||
}
|
||||
|
||||
panic(fmt.Sprintf("Unknown type: %T", tomlData))
|
||||
}
|
||||
|
||||
func tag(typeName string, data interface{}) map[string]interface{} {
|
||||
return map[string]interface{}{
|
||||
"type": typeName,
|
||||
"value": data,
|
||||
}
|
||||
}
|
||||
14
Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/toml-test-encoder/COPYING
generated
vendored
Normal file
14
Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/toml-test-encoder/COPYING
generated
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE
|
||||
Version 2, December 2004
|
||||
|
||||
Copyright (C) 2004 Sam Hocevar <sam@hocevar.net>
|
||||
|
||||
Everyone is permitted to copy and distribute verbatim or modified
|
||||
copies of this license document, and changing it is allowed as long
|
||||
as the name is changed.
|
||||
|
||||
DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE
|
||||
TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
|
||||
|
||||
0. You just DO WHAT THE FUCK YOU WANT TO.
|
||||
|
||||
14
Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/toml-test-encoder/README.md
generated
vendored
Normal file
14
Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/toml-test-encoder/README.md
generated
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
# Implements the TOML test suite interface for TOML encoders
|
||||
|
||||
This is an implementation of the interface expected by
|
||||
[toml-test](https://github.com/BurntSushi/toml-test) for the
|
||||
[TOML encoder](https://github.com/BurntSushi/toml).
|
||||
In particular, it maps JSON data on `stdin` to a TOML format on `stdout`.
|
||||
|
||||
|
||||
Compatible with TOML version
|
||||
[v0.2.0](https://github.com/mojombo/toml/blob/master/versions/toml-v0.2.0.md)
|
||||
|
||||
Compatible with `toml-test` version
|
||||
[v0.2.0](https://github.com/BurntSushi/toml-test/tree/v0.2.0)
|
||||
|
||||
131
Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/toml-test-encoder/main.go
generated
vendored
Normal file
131
Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/toml-test-encoder/main.go
generated
vendored
Normal file
@@ -0,0 +1,131 @@
|
||||
// Command toml-test-encoder satisfies the toml-test interface for testing
|
||||
// TOML encoders. Namely, it accepts JSON on stdin and outputs TOML on stdout.
|
||||
package main
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"flag"
|
||||
"log"
|
||||
"os"
|
||||
"path"
|
||||
"strconv"
|
||||
"time"
|
||||
|
||||
"github.com/BurntSushi/toml"
|
||||
)
|
||||
|
||||
func init() {
|
||||
log.SetFlags(0)
|
||||
|
||||
flag.Usage = usage
|
||||
flag.Parse()
|
||||
}
|
||||
|
||||
func usage() {
|
||||
log.Printf("Usage: %s < json-file\n", path.Base(os.Args[0]))
|
||||
flag.PrintDefaults()
|
||||
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
func main() {
|
||||
if flag.NArg() != 0 {
|
||||
flag.Usage()
|
||||
}
|
||||
|
||||
var tmp interface{}
|
||||
if err := json.NewDecoder(os.Stdin).Decode(&tmp); err != nil {
|
||||
log.Fatalf("Error decoding JSON: %s", err)
|
||||
}
|
||||
|
||||
tomlData := translate(tmp)
|
||||
if err := toml.NewEncoder(os.Stdout).Encode(tomlData); err != nil {
|
||||
log.Fatalf("Error encoding TOML: %s", err)
|
||||
}
|
||||
}
|
||||
|
||||
func translate(typedJson interface{}) interface{} {
|
||||
switch v := typedJson.(type) {
|
||||
case map[string]interface{}:
|
||||
if len(v) == 2 && in("type", v) && in("value", v) {
|
||||
return untag(v)
|
||||
}
|
||||
m := make(map[string]interface{}, len(v))
|
||||
for k, v2 := range v {
|
||||
m[k] = translate(v2)
|
||||
}
|
||||
return m
|
||||
case []interface{}:
|
||||
tabArray := make([]map[string]interface{}, len(v))
|
||||
for i := range v {
|
||||
if m, ok := translate(v[i]).(map[string]interface{}); ok {
|
||||
tabArray[i] = m
|
||||
} else {
|
||||
log.Fatalf("JSON arrays may only contain objects. This " +
|
||||
"corresponds to only tables being allowed in " +
|
||||
"TOML table arrays.")
|
||||
}
|
||||
}
|
||||
return tabArray
|
||||
}
|
||||
log.Fatalf("Unrecognized JSON format '%T'.", typedJson)
|
||||
panic("unreachable")
|
||||
}
|
||||
|
||||
func untag(typed map[string]interface{}) interface{} {
|
||||
t := typed["type"].(string)
|
||||
v := typed["value"]
|
||||
switch t {
|
||||
case "string":
|
||||
return v.(string)
|
||||
case "integer":
|
||||
v := v.(string)
|
||||
n, err := strconv.Atoi(v)
|
||||
if err != nil {
|
||||
log.Fatalf("Could not parse '%s' as integer: %s", v, err)
|
||||
}
|
||||
return n
|
||||
case "float":
|
||||
v := v.(string)
|
||||
f, err := strconv.ParseFloat(v, 64)
|
||||
if err != nil {
|
||||
log.Fatalf("Could not parse '%s' as float64: %s", v, err)
|
||||
}
|
||||
return f
|
||||
case "datetime":
|
||||
v := v.(string)
|
||||
t, err := time.Parse("2006-01-02T15:04:05Z", v)
|
||||
if err != nil {
|
||||
log.Fatalf("Could not parse '%s' as a datetime: %s", v, err)
|
||||
}
|
||||
return t
|
||||
case "bool":
|
||||
v := v.(string)
|
||||
switch v {
|
||||
case "true":
|
||||
return true
|
||||
case "false":
|
||||
return false
|
||||
}
|
||||
log.Fatalf("Could not parse '%s' as a boolean.", v)
|
||||
case "array":
|
||||
v := v.([]interface{})
|
||||
array := make([]interface{}, len(v))
|
||||
for i := range v {
|
||||
if m, ok := v[i].(map[string]interface{}); ok {
|
||||
array[i] = untag(m)
|
||||
} else {
|
||||
log.Fatalf("Arrays may only contain other arrays or "+
|
||||
"primitive values, but found a '%T'.", m)
|
||||
}
|
||||
}
|
||||
return array
|
||||
}
|
||||
log.Fatalf("Unrecognized tag type '%s'.", t)
|
||||
panic("unreachable")
|
||||
}
|
||||
|
||||
func in(key string, m map[string]interface{}) bool {
|
||||
_, ok := m[key]
|
||||
return ok
|
||||
}
|
||||
14
Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/tomlv/COPYING
generated
vendored
Normal file
14
Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/tomlv/COPYING
generated
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE
|
||||
Version 2, December 2004
|
||||
|
||||
Copyright (C) 2004 Sam Hocevar <sam@hocevar.net>
|
||||
|
||||
Everyone is permitted to copy and distribute verbatim or modified
|
||||
copies of this license document, and changing it is allowed as long
|
||||
as the name is changed.
|
||||
|
||||
DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE
|
||||
TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
|
||||
|
||||
0. You just DO WHAT THE FUCK YOU WANT TO.
|
||||
|
||||
22
Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/tomlv/README.md
generated
vendored
Normal file
22
Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/tomlv/README.md
generated
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
# TOML Validator
|
||||
|
||||
If Go is installed, it's simple to try it out:
|
||||
|
||||
```bash
|
||||
go get github.com/BurntSushi/toml/cmd/tomlv
|
||||
tomlv some-toml-file.toml
|
||||
```
|
||||
|
||||
You can see the types of every key in a TOML file with:
|
||||
|
||||
```bash
|
||||
tomlv -types some-toml-file.toml
|
||||
```
|
||||
|
||||
At the moment, only one error message is reported at a time. Error messages
|
||||
include line numbers. No output means that the files given are valid TOML, or
|
||||
there is a bug in `tomlv`.
|
||||
|
||||
Compatible with TOML version
|
||||
[v0.1.0](https://github.com/mojombo/toml/blob/master/versions/toml-v0.1.0.md)
|
||||
|
||||
61
Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/tomlv/main.go
generated
vendored
Normal file
61
Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/tomlv/main.go
generated
vendored
Normal file
@@ -0,0 +1,61 @@
|
||||
// Command tomlv validates TOML documents and prints each key's type.
|
||||
package main
|
||||
|
||||
import (
|
||||
"flag"
|
||||
"fmt"
|
||||
"log"
|
||||
"os"
|
||||
"path"
|
||||
"strings"
|
||||
"text/tabwriter"
|
||||
|
||||
"github.com/BurntSushi/toml"
|
||||
)
|
||||
|
||||
var (
|
||||
flagTypes = false
|
||||
)
|
||||
|
||||
func init() {
|
||||
log.SetFlags(0)
|
||||
|
||||
flag.BoolVar(&flagTypes, "types", flagTypes,
|
||||
"When set, the types of every defined key will be shown.")
|
||||
|
||||
flag.Usage = usage
|
||||
flag.Parse()
|
||||
}
|
||||
|
||||
func usage() {
|
||||
log.Printf("Usage: %s toml-file [ toml-file ... ]\n",
|
||||
path.Base(os.Args[0]))
|
||||
flag.PrintDefaults()
|
||||
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
func main() {
|
||||
if flag.NArg() < 1 {
|
||||
flag.Usage()
|
||||
}
|
||||
for _, f := range flag.Args() {
|
||||
var tmp interface{}
|
||||
md, err := toml.DecodeFile(f, &tmp)
|
||||
if err != nil {
|
||||
log.Fatalf("Error in '%s': %s", f, err)
|
||||
}
|
||||
if flagTypes {
|
||||
printTypes(md)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func printTypes(md toml.MetaData) {
|
||||
tabw := tabwriter.NewWriter(os.Stdout, 0, 0, 2, ' ', 0)
|
||||
for _, key := range md.Keys() {
|
||||
fmt.Fprintf(tabw, "%s%s\t%s\n",
|
||||
strings.Repeat(" ", len(key)-1), key, md.Type(key...))
|
||||
}
|
||||
tabw.Flush()
|
||||
}
|
||||
492
Godeps/_workspace/src/github.com/BurntSushi/toml/decode.go
generated
vendored
Normal file
492
Godeps/_workspace/src/github.com/BurntSushi/toml/decode.go
generated
vendored
Normal file
@@ -0,0 +1,492 @@
|
||||
package toml
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"math"
|
||||
"reflect"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
var e = fmt.Errorf
|
||||
|
||||
// Unmarshaler is the interface implemented by objects that can unmarshal a
|
||||
// TOML description of themselves.
|
||||
type Unmarshaler interface {
|
||||
UnmarshalTOML(interface{}) error
|
||||
}
|
||||
|
||||
// Unmarshal decodes the contents of `p` in TOML format into a pointer `v`.
|
||||
func Unmarshal(p []byte, v interface{}) error {
|
||||
_, err := Decode(string(p), v)
|
||||
return err
|
||||
}
|
||||
|
||||
// Primitive is a TOML value that hasn't been decoded into a Go value.
|
||||
// When using the various `Decode*` functions, the type `Primitive` may
|
||||
// be given to any value, and its decoding will be delayed.
|
||||
//
|
||||
// A `Primitive` value can be decoded using the `PrimitiveDecode` function.
|
||||
//
|
||||
// The underlying representation of a `Primitive` value is subject to change.
|
||||
// Do not rely on it.
|
||||
//
|
||||
// N.B. Primitive values are still parsed, so using them will only avoid
|
||||
// the overhead of reflection. They can be useful when you don't know the
|
||||
// exact type of TOML data until run time.
|
||||
type Primitive struct {
|
||||
undecoded interface{}
|
||||
context Key
|
||||
}
|
||||
|
||||
// DEPRECATED!
|
||||
//
|
||||
// Use MetaData.PrimitiveDecode instead.
|
||||
func PrimitiveDecode(primValue Primitive, v interface{}) error {
|
||||
md := MetaData{decoded: make(map[string]bool)}
|
||||
return md.unify(primValue.undecoded, rvalue(v))
|
||||
}
|
||||
|
||||
// PrimitiveDecode is just like the other `Decode*` functions, except it
|
||||
// decodes a TOML value that has already been parsed. Valid primitive values
|
||||
// can *only* be obtained from values filled by the decoder functions,
|
||||
// including this method. (i.e., `v` may contain more `Primitive`
|
||||
// values.)
|
||||
//
|
||||
// Meta data for primitive values is included in the meta data returned by
|
||||
// the `Decode*` functions with one exception: keys returned by the Undecoded
|
||||
// method will only reflect keys that were decoded. Namely, any keys hidden
|
||||
// behind a Primitive will be considered undecoded. Executing this method will
|
||||
// update the undecoded keys in the meta data. (See the example.)
|
||||
func (md *MetaData) PrimitiveDecode(primValue Primitive, v interface{}) error {
|
||||
md.context = primValue.context
|
||||
defer func() { md.context = nil }()
|
||||
return md.unify(primValue.undecoded, rvalue(v))
|
||||
}
|
||||
|
||||
// Decode will decode the contents of `data` in TOML format into a pointer
|
||||
// `v`.
|
||||
//
|
||||
// TOML hashes correspond to Go structs or maps. (Dealer's choice. They can be
|
||||
// used interchangeably.)
|
||||
//
|
||||
// TOML arrays of tables correspond to either a slice of structs or a slice
|
||||
// of maps.
|
||||
//
|
||||
// TOML datetimes correspond to Go `time.Time` values.
|
||||
//
|
||||
// All other TOML types (float, string, int, bool and array) correspond
|
||||
// to the obvious Go types.
|
||||
//
|
||||
// An exception to the above rules is if a type implements the
|
||||
// encoding.TextUnmarshaler interface. In this case, any primitive TOML value
|
||||
// (floats, strings, integers, booleans and datetimes) will be converted to
|
||||
// a byte string and given to the value's UnmarshalText method. See the
|
||||
// Unmarshaler example for a demonstration with time duration strings.
|
||||
//
|
||||
// Key mapping
|
||||
//
|
||||
// TOML keys can map to either keys in a Go map or field names in a Go
|
||||
// struct. The special `toml` struct tag may be used to map TOML keys to
|
||||
// struct fields that don't match the key name exactly. (See the example.)
|
||||
// A case insensitive match to struct names will be tried if an exact match
|
||||
// can't be found.
|
||||
//
|
||||
// The mapping between TOML values and Go values is loose. That is, there
|
||||
// may exist TOML values that cannot be placed into your representation, and
|
||||
// there may be parts of your representation that do not correspond to
|
||||
// TOML values. This loose mapping can be made stricter by using the IsDefined
|
||||
// and/or Undecoded methods on the MetaData returned.
|
||||
//
|
||||
// This decoder will not handle cyclic types. If a cyclic type is passed,
|
||||
// `Decode` will not terminate.
|
||||
func Decode(data string, v interface{}) (MetaData, error) {
|
||||
p, err := parse(data)
|
||||
if err != nil {
|
||||
return MetaData{}, err
|
||||
}
|
||||
md := MetaData{
|
||||
p.mapping, p.types, p.ordered,
|
||||
make(map[string]bool, len(p.ordered)), nil,
|
||||
}
|
||||
return md, md.unify(p.mapping, rvalue(v))
|
||||
}
|
||||
|
||||
// DecodeFile is just like Decode, except it will automatically read the
|
||||
// contents of the file at `fpath` and decode it for you.
|
||||
func DecodeFile(fpath string, v interface{}) (MetaData, error) {
|
||||
bs, err := ioutil.ReadFile(fpath)
|
||||
if err != nil {
|
||||
return MetaData{}, err
|
||||
}
|
||||
return Decode(string(bs), v)
|
||||
}
|
||||
|
||||
// DecodeReader is just like Decode, except it will consume all bytes
|
||||
// from the reader and decode it for you.
|
||||
func DecodeReader(r io.Reader, v interface{}) (MetaData, error) {
|
||||
bs, err := ioutil.ReadAll(r)
|
||||
if err != nil {
|
||||
return MetaData{}, err
|
||||
}
|
||||
return Decode(string(bs), v)
|
||||
}
|
||||
|
||||
// unify performs a sort of type unification based on the structure of `rv`,
|
||||
// which is the client representation.
|
||||
//
|
||||
// Any type mismatch produces an error. Finding a type that we don't know
|
||||
// how to handle produces an unsupported type error.
|
||||
func (md *MetaData) unify(data interface{}, rv reflect.Value) error {
|
||||
|
||||
// Special case. Look for a `Primitive` value.
|
||||
if rv.Type() == reflect.TypeOf((*Primitive)(nil)).Elem() {
|
||||
// Save the undecoded data and the key context into the primitive
|
||||
// value.
|
||||
context := make(Key, len(md.context))
|
||||
copy(context, md.context)
|
||||
rv.Set(reflect.ValueOf(Primitive{
|
||||
undecoded: data,
|
||||
context: context,
|
||||
}))
|
||||
return nil
|
||||
}
|
||||
|
||||
// Special case. Unmarshaler Interface support.
|
||||
if rv.CanAddr() {
|
||||
if v, ok := rv.Addr().Interface().(Unmarshaler); ok {
|
||||
return v.UnmarshalTOML(data)
|
||||
}
|
||||
}
|
||||
|
||||
// Special case. Handle time.Time values specifically.
|
||||
// TODO: Remove this code when we decide to drop support for Go 1.1.
|
||||
// This isn't necessary in Go 1.2 because time.Time satisfies the encoding
|
||||
// interfaces.
|
||||
if rv.Type().AssignableTo(rvalue(time.Time{}).Type()) {
|
||||
return md.unifyDatetime(data, rv)
|
||||
}
|
||||
|
||||
// Special case. Look for a value satisfying the TextUnmarshaler interface.
|
||||
if v, ok := rv.Interface().(TextUnmarshaler); ok {
|
||||
return md.unifyText(data, v)
|
||||
}
|
||||
// BUG(burntsushi)
|
||||
// The behavior here is incorrect whenever a Go type satisfies the
|
||||
// encoding.TextUnmarshaler interface but also corresponds to a TOML
|
||||
// hash or array. In particular, the unmarshaler should only be applied
|
||||
// to primitive TOML values. But at this point, it will be applied to
|
||||
// all kinds of values and produce an incorrect error whenever those values
|
||||
// are hashes or arrays (including arrays of tables).
|
||||
|
||||
k := rv.Kind()
|
||||
|
||||
// laziness
|
||||
if k >= reflect.Int && k <= reflect.Uint64 {
|
||||
return md.unifyInt(data, rv)
|
||||
}
|
||||
switch k {
|
||||
case reflect.Ptr:
|
||||
elem := reflect.New(rv.Type().Elem())
|
||||
err := md.unify(data, reflect.Indirect(elem))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
rv.Set(elem)
|
||||
return nil
|
||||
case reflect.Struct:
|
||||
return md.unifyStruct(data, rv)
|
||||
case reflect.Map:
|
||||
return md.unifyMap(data, rv)
|
||||
case reflect.Array:
|
||||
return md.unifyArray(data, rv)
|
||||
case reflect.Slice:
|
||||
return md.unifySlice(data, rv)
|
||||
case reflect.String:
|
||||
return md.unifyString(data, rv)
|
||||
case reflect.Bool:
|
||||
return md.unifyBool(data, rv)
|
||||
case reflect.Interface:
|
||||
// we only support empty interfaces.
|
||||
if rv.NumMethod() > 0 {
|
||||
return e("Unsupported type '%s'.", rv.Kind())
|
||||
}
|
||||
return md.unifyAnything(data, rv)
|
||||
case reflect.Float32:
|
||||
fallthrough
|
||||
case reflect.Float64:
|
||||
return md.unifyFloat64(data, rv)
|
||||
}
|
||||
return e("Unsupported type '%s'.", rv.Kind())
|
||||
}
|
||||
|
||||
func (md *MetaData) unifyStruct(mapping interface{}, rv reflect.Value) error {
|
||||
tmap, ok := mapping.(map[string]interface{})
|
||||
if !ok {
|
||||
return mismatch(rv, "map", mapping)
|
||||
}
|
||||
|
||||
for key, datum := range tmap {
|
||||
var f *field
|
||||
fields := cachedTypeFields(rv.Type())
|
||||
for i := range fields {
|
||||
ff := &fields[i]
|
||||
if ff.name == key {
|
||||
f = ff
|
||||
break
|
||||
}
|
||||
if f == nil && strings.EqualFold(ff.name, key) {
|
||||
f = ff
|
||||
}
|
||||
}
|
||||
if f != nil {
|
||||
subv := rv
|
||||
for _, i := range f.index {
|
||||
subv = indirect(subv.Field(i))
|
||||
}
|
||||
if isUnifiable(subv) {
|
||||
md.decoded[md.context.add(key).String()] = true
|
||||
md.context = append(md.context, key)
|
||||
if err := md.unify(datum, subv); err != nil {
|
||||
return e("Type mismatch for '%s.%s': %s",
|
||||
rv.Type().String(), f.name, err)
|
||||
}
|
||||
md.context = md.context[0 : len(md.context)-1]
|
||||
} else if f.name != "" {
|
||||
// Bad user! No soup for you!
|
||||
return e("Field '%s.%s' is unexported, and therefore cannot "+
|
||||
"be loaded with reflection.", rv.Type().String(), f.name)
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (md *MetaData) unifyMap(mapping interface{}, rv reflect.Value) error {
|
||||
tmap, ok := mapping.(map[string]interface{})
|
||||
if !ok {
|
||||
return badtype("map", mapping)
|
||||
}
|
||||
if rv.IsNil() {
|
||||
rv.Set(reflect.MakeMap(rv.Type()))
|
||||
}
|
||||
for k, v := range tmap {
|
||||
md.decoded[md.context.add(k).String()] = true
|
||||
md.context = append(md.context, k)
|
||||
|
||||
rvkey := indirect(reflect.New(rv.Type().Key()))
|
||||
rvval := reflect.Indirect(reflect.New(rv.Type().Elem()))
|
||||
if err := md.unify(v, rvval); err != nil {
|
||||
return err
|
||||
}
|
||||
md.context = md.context[0 : len(md.context)-1]
|
||||
|
||||
rvkey.SetString(k)
|
||||
rv.SetMapIndex(rvkey, rvval)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (md *MetaData) unifyArray(data interface{}, rv reflect.Value) error {
|
||||
datav := reflect.ValueOf(data)
|
||||
if datav.Kind() != reflect.Slice {
|
||||
return badtype("slice", data)
|
||||
}
|
||||
sliceLen := datav.Len()
|
||||
if sliceLen != rv.Len() {
|
||||
return e("expected array length %d; got TOML array of length %d",
|
||||
rv.Len(), sliceLen)
|
||||
}
|
||||
return md.unifySliceArray(datav, rv)
|
||||
}
|
||||
|
||||
func (md *MetaData) unifySlice(data interface{}, rv reflect.Value) error {
|
||||
datav := reflect.ValueOf(data)
|
||||
if datav.Kind() != reflect.Slice {
|
||||
return badtype("slice", data)
|
||||
}
|
||||
sliceLen := datav.Len()
|
||||
if rv.IsNil() {
|
||||
rv.Set(reflect.MakeSlice(rv.Type(), sliceLen, sliceLen))
|
||||
}
|
||||
return md.unifySliceArray(datav, rv)
|
||||
}
|
||||
|
||||
func (md *MetaData) unifySliceArray(data, rv reflect.Value) error {
|
||||
sliceLen := data.Len()
|
||||
for i := 0; i < sliceLen; i++ {
|
||||
v := data.Index(i).Interface()
|
||||
sliceval := indirect(rv.Index(i))
|
||||
if err := md.unify(v, sliceval); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (md *MetaData) unifyDatetime(data interface{}, rv reflect.Value) error {
|
||||
if _, ok := data.(time.Time); ok {
|
||||
rv.Set(reflect.ValueOf(data))
|
||||
return nil
|
||||
}
|
||||
return badtype("time.Time", data)
|
||||
}
|
||||
|
||||
func (md *MetaData) unifyString(data interface{}, rv reflect.Value) error {
|
||||
if s, ok := data.(string); ok {
|
||||
rv.SetString(s)
|
||||
return nil
|
||||
}
|
||||
return badtype("string", data)
|
||||
}
|
||||
|
||||
func (md *MetaData) unifyFloat64(data interface{}, rv reflect.Value) error {
|
||||
if num, ok := data.(float64); ok {
|
||||
switch rv.Kind() {
|
||||
case reflect.Float32:
|
||||
fallthrough
|
||||
case reflect.Float64:
|
||||
rv.SetFloat(num)
|
||||
default:
|
||||
panic("bug")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
return badtype("float", data)
|
||||
}
|
||||
|
||||
func (md *MetaData) unifyInt(data interface{}, rv reflect.Value) error {
|
||||
if num, ok := data.(int64); ok {
|
||||
if rv.Kind() >= reflect.Int && rv.Kind() <= reflect.Int64 {
|
||||
switch rv.Kind() {
|
||||
case reflect.Int, reflect.Int64:
|
||||
// No bounds checking necessary.
|
||||
case reflect.Int8:
|
||||
if num < math.MinInt8 || num > math.MaxInt8 {
|
||||
return e("Value '%d' is out of range for int8.", num)
|
||||
}
|
||||
case reflect.Int16:
|
||||
if num < math.MinInt16 || num > math.MaxInt16 {
|
||||
return e("Value '%d' is out of range for int16.", num)
|
||||
}
|
||||
case reflect.Int32:
|
||||
if num < math.MinInt32 || num > math.MaxInt32 {
|
||||
return e("Value '%d' is out of range for int32.", num)
|
||||
}
|
||||
}
|
||||
rv.SetInt(num)
|
||||
} else if rv.Kind() >= reflect.Uint && rv.Kind() <= reflect.Uint64 {
|
||||
unum := uint64(num)
|
||||
switch rv.Kind() {
|
||||
case reflect.Uint, reflect.Uint64:
|
||||
// No bounds checking necessary.
|
||||
case reflect.Uint8:
|
||||
if num < 0 || unum > math.MaxUint8 {
|
||||
return e("Value '%d' is out of range for uint8.", num)
|
||||
}
|
||||
case reflect.Uint16:
|
||||
if num < 0 || unum > math.MaxUint16 {
|
||||
return e("Value '%d' is out of range for uint16.", num)
|
||||
}
|
||||
case reflect.Uint32:
|
||||
if num < 0 || unum > math.MaxUint32 {
|
||||
return e("Value '%d' is out of range for uint32.", num)
|
||||
}
|
||||
}
|
||||
rv.SetUint(unum)
|
||||
} else {
|
||||
panic("unreachable")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
return badtype("integer", data)
|
||||
}
|
||||
|
||||
func (md *MetaData) unifyBool(data interface{}, rv reflect.Value) error {
|
||||
if b, ok := data.(bool); ok {
|
||||
rv.SetBool(b)
|
||||
return nil
|
||||
}
|
||||
return badtype("boolean", data)
|
||||
}
|
||||
|
||||
func (md *MetaData) unifyAnything(data interface{}, rv reflect.Value) error {
|
||||
rv.Set(reflect.ValueOf(data))
|
||||
return nil
|
||||
}
|
||||
|
||||
func (md *MetaData) unifyText(data interface{}, v TextUnmarshaler) error {
|
||||
var s string
|
||||
switch sdata := data.(type) {
|
||||
case TextMarshaler:
|
||||
text, err := sdata.MarshalText()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
s = string(text)
|
||||
case fmt.Stringer:
|
||||
s = sdata.String()
|
||||
case string:
|
||||
s = sdata
|
||||
case bool:
|
||||
s = fmt.Sprintf("%v", sdata)
|
||||
case int64:
|
||||
s = fmt.Sprintf("%d", sdata)
|
||||
case float64:
|
||||
s = fmt.Sprintf("%f", sdata)
|
||||
default:
|
||||
return badtype("primitive (string-like)", data)
|
||||
}
|
||||
if err := v.UnmarshalText([]byte(s)); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// rvalue returns a reflect.Value of `v`. All pointers are resolved.
|
||||
func rvalue(v interface{}) reflect.Value {
|
||||
return indirect(reflect.ValueOf(v))
|
||||
}
|
||||
|
||||
// indirect returns the value pointed to by a pointer.
|
||||
// Pointers are followed until the value is not a pointer.
|
||||
// New values are allocated for each nil pointer.
|
||||
//
|
||||
// An exception to this rule is if the value satisfies an interface of
|
||||
// interest to us (like encoding.TextUnmarshaler).
|
||||
func indirect(v reflect.Value) reflect.Value {
|
||||
if v.Kind() != reflect.Ptr {
|
||||
if v.CanAddr() {
|
||||
pv := v.Addr()
|
||||
if _, ok := pv.Interface().(TextUnmarshaler); ok {
|
||||
return pv
|
||||
}
|
||||
}
|
||||
return v
|
||||
}
|
||||
if v.IsNil() {
|
||||
v.Set(reflect.New(v.Type().Elem()))
|
||||
}
|
||||
return indirect(reflect.Indirect(v))
|
||||
}
|
||||
|
||||
func isUnifiable(rv reflect.Value) bool {
|
||||
if rv.CanSet() {
|
||||
return true
|
||||
}
|
||||
if _, ok := rv.Interface().(TextUnmarshaler); ok {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func badtype(expected string, data interface{}) error {
|
||||
return e("Expected %s but found '%T'.", expected, data)
|
||||
}
|
||||
|
||||
func mismatch(user reflect.Value, expected string, data interface{}) error {
|
||||
return e("Type mismatch for %s. Expected %s but found '%T'.",
|
||||
user.Type().String(), expected, data)
|
||||
}
|
||||
122
Godeps/_workspace/src/github.com/BurntSushi/toml/decode_meta.go
generated
vendored
Normal file
122
Godeps/_workspace/src/github.com/BurntSushi/toml/decode_meta.go
generated
vendored
Normal file
@@ -0,0 +1,122 @@
|
||||
package toml
|
||||
|
||||
import "strings"
|
||||
|
||||
// MetaData allows access to meta information about TOML data that may not
|
||||
// be inferrable via reflection. In particular, whether a key has been defined
|
||||
// and the TOML type of a key.
|
||||
type MetaData struct {
|
||||
mapping map[string]interface{}
|
||||
types map[string]tomlType
|
||||
keys []Key
|
||||
decoded map[string]bool
|
||||
context Key // Used only during decoding.
|
||||
}
|
||||
|
||||
// IsDefined returns true if the key given exists in the TOML data. The key
|
||||
// should be specified hierarchially. e.g.,
|
||||
//
|
||||
// // access the TOML key 'a.b.c'
|
||||
// IsDefined("a", "b", "c")
|
||||
//
|
||||
// IsDefined will return false if an empty key given. Keys are case sensitive.
|
||||
func (md *MetaData) IsDefined(key ...string) bool {
|
||||
if len(key) == 0 {
|
||||
return false
|
||||
}
|
||||
|
||||
var hash map[string]interface{}
|
||||
var ok bool
|
||||
var hashOrVal interface{} = md.mapping
|
||||
for _, k := range key {
|
||||
if hash, ok = hashOrVal.(map[string]interface{}); !ok {
|
||||
return false
|
||||
}
|
||||
if hashOrVal, ok = hash[k]; !ok {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// Type returns a string representation of the type of the key specified.
|
||||
//
|
||||
// Type will return the empty string if given an empty key or a key that
|
||||
// does not exist. Keys are case sensitive.
|
||||
func (md *MetaData) Type(key ...string) string {
|
||||
fullkey := strings.Join(key, ".")
|
||||
if typ, ok := md.types[fullkey]; ok {
|
||||
return typ.typeString()
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
// Key is the type of any TOML key, including key groups. Use (MetaData).Keys
|
||||
// to get values of this type.
|
||||
type Key []string
|
||||
|
||||
func (k Key) String() string {
|
||||
return strings.Join(k, ".")
|
||||
}
|
||||
|
||||
func (k Key) maybeQuotedAll() string {
|
||||
var ss []string
|
||||
for i := range k {
|
||||
ss = append(ss, k.maybeQuoted(i))
|
||||
}
|
||||
return strings.Join(ss, ".")
|
||||
}
|
||||
|
||||
func (k Key) maybeQuoted(i int) string {
|
||||
quote := false
|
||||
for _, c := range k[i] {
|
||||
if !isBareKeyChar(c) {
|
||||
quote = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if quote {
|
||||
return "\"" + strings.Replace(k[i], "\"", "\\\"", -1) + "\""
|
||||
} else {
|
||||
return k[i]
|
||||
}
|
||||
}
|
||||
|
||||
func (k Key) add(piece string) Key {
|
||||
newKey := make(Key, len(k)+1)
|
||||
copy(newKey, k)
|
||||
newKey[len(k)] = piece
|
||||
return newKey
|
||||
}
|
||||
|
||||
// Keys returns a slice of every key in the TOML data, including key groups.
|
||||
// Each key is itself a slice, where the first element is the top of the
|
||||
// hierarchy and the last is the most specific.
|
||||
//
|
||||
// The list will have the same order as the keys appeared in the TOML data.
|
||||
//
|
||||
// All keys returned are non-empty.
|
||||
func (md *MetaData) Keys() []Key {
|
||||
return md.keys
|
||||
}
|
||||
|
||||
// Undecoded returns all keys that have not been decoded in the order in which
|
||||
// they appear in the original TOML document.
|
||||
//
|
||||
// This includes keys that haven't been decoded because of a Primitive value.
|
||||
// Once the Primitive value is decoded, the keys will be considered decoded.
|
||||
//
|
||||
// Also note that decoding into an empty interface will result in no decoding,
|
||||
// and so no keys will be considered decoded.
|
||||
//
|
||||
// In this sense, the Undecoded keys correspond to keys in the TOML document
|
||||
// that do not have a concrete type in your representation.
|
||||
func (md *MetaData) Undecoded() []Key {
|
||||
undecoded := make([]Key, 0, len(md.keys))
|
||||
for _, key := range md.keys {
|
||||
if !md.decoded[key.String()] {
|
||||
undecoded = append(undecoded, key)
|
||||
}
|
||||
}
|
||||
return undecoded
|
||||
}
|
||||
950
Godeps/_workspace/src/github.com/BurntSushi/toml/decode_test.go
generated
vendored
Normal file
950
Godeps/_workspace/src/github.com/BurntSushi/toml/decode_test.go
generated
vendored
Normal file
@@ -0,0 +1,950 @@
|
||||
package toml
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
"reflect"
|
||||
"testing"
|
||||
"time"
|
||||
)
|
||||
|
||||
func init() {
|
||||
log.SetFlags(0)
|
||||
}
|
||||
|
||||
func TestDecodeSimple(t *testing.T) {
|
||||
var testSimple = `
|
||||
age = 250
|
||||
andrew = "gallant"
|
||||
kait = "brady"
|
||||
now = 1987-07-05T05:45:00Z
|
||||
yesOrNo = true
|
||||
pi = 3.14
|
||||
colors = [
|
||||
["red", "green", "blue"],
|
||||
["cyan", "magenta", "yellow", "black"],
|
||||
]
|
||||
|
||||
[My.Cats]
|
||||
plato = "cat 1"
|
||||
cauchy = "cat 2"
|
||||
`
|
||||
|
||||
type cats struct {
|
||||
Plato string
|
||||
Cauchy string
|
||||
}
|
||||
type simple struct {
|
||||
Age int
|
||||
Colors [][]string
|
||||
Pi float64
|
||||
YesOrNo bool
|
||||
Now time.Time
|
||||
Andrew string
|
||||
Kait string
|
||||
My map[string]cats
|
||||
}
|
||||
|
||||
var val simple
|
||||
_, err := Decode(testSimple, &val)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
now, err := time.Parse("2006-01-02T15:04:05", "1987-07-05T05:45:00")
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
var answer = simple{
|
||||
Age: 250,
|
||||
Andrew: "gallant",
|
||||
Kait: "brady",
|
||||
Now: now,
|
||||
YesOrNo: true,
|
||||
Pi: 3.14,
|
||||
Colors: [][]string{
|
||||
{"red", "green", "blue"},
|
||||
{"cyan", "magenta", "yellow", "black"},
|
||||
},
|
||||
My: map[string]cats{
|
||||
"Cats": cats{Plato: "cat 1", Cauchy: "cat 2"},
|
||||
},
|
||||
}
|
||||
if !reflect.DeepEqual(val, answer) {
|
||||
t.Fatalf("Expected\n-----\n%#v\n-----\nbut got\n-----\n%#v\n",
|
||||
answer, val)
|
||||
}
|
||||
}
|
||||
|
||||
func TestDecodeEmbedded(t *testing.T) {
|
||||
type Dog struct{ Name string }
|
||||
type Age int
|
||||
|
||||
tests := map[string]struct {
|
||||
input string
|
||||
decodeInto interface{}
|
||||
wantDecoded interface{}
|
||||
}{
|
||||
"embedded struct": {
|
||||
input: `Name = "milton"`,
|
||||
decodeInto: &struct{ Dog }{},
|
||||
wantDecoded: &struct{ Dog }{Dog{"milton"}},
|
||||
},
|
||||
"embedded non-nil pointer to struct": {
|
||||
input: `Name = "milton"`,
|
||||
decodeInto: &struct{ *Dog }{},
|
||||
wantDecoded: &struct{ *Dog }{&Dog{"milton"}},
|
||||
},
|
||||
"embedded nil pointer to struct": {
|
||||
input: ``,
|
||||
decodeInto: &struct{ *Dog }{},
|
||||
wantDecoded: &struct{ *Dog }{nil},
|
||||
},
|
||||
"embedded int": {
|
||||
input: `Age = -5`,
|
||||
decodeInto: &struct{ Age }{},
|
||||
wantDecoded: &struct{ Age }{-5},
|
||||
},
|
||||
}
|
||||
|
||||
for label, test := range tests {
|
||||
_, err := Decode(test.input, test.decodeInto)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if !reflect.DeepEqual(test.wantDecoded, test.decodeInto) {
|
||||
t.Errorf("%s: want decoded == %+v, got %+v",
|
||||
label, test.wantDecoded, test.decodeInto)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestTableArrays(t *testing.T) {
|
||||
var tomlTableArrays = `
|
||||
[[albums]]
|
||||
name = "Born to Run"
|
||||
|
||||
[[albums.songs]]
|
||||
name = "Jungleland"
|
||||
|
||||
[[albums.songs]]
|
||||
name = "Meeting Across the River"
|
||||
|
||||
[[albums]]
|
||||
name = "Born in the USA"
|
||||
|
||||
[[albums.songs]]
|
||||
name = "Glory Days"
|
||||
|
||||
[[albums.songs]]
|
||||
name = "Dancing in the Dark"
|
||||
`
|
||||
|
||||
type Song struct {
|
||||
Name string
|
||||
}
|
||||
|
||||
type Album struct {
|
||||
Name string
|
||||
Songs []Song
|
||||
}
|
||||
|
||||
type Music struct {
|
||||
Albums []Album
|
||||
}
|
||||
|
||||
expected := Music{[]Album{
|
||||
{"Born to Run", []Song{{"Jungleland"}, {"Meeting Across the River"}}},
|
||||
{"Born in the USA", []Song{{"Glory Days"}, {"Dancing in the Dark"}}},
|
||||
}}
|
||||
var got Music
|
||||
if _, err := Decode(tomlTableArrays, &got); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if !reflect.DeepEqual(expected, got) {
|
||||
t.Fatalf("\n%#v\n!=\n%#v\n", expected, got)
|
||||
}
|
||||
}
|
||||
|
||||
// Case insensitive matching tests.
|
||||
// A bit more comprehensive than needed given the current implementation,
|
||||
// but implementations change.
|
||||
// Probably still missing demonstrations of some ugly corner cases regarding
|
||||
// case insensitive matching and multiple fields.
|
||||
func TestCase(t *testing.T) {
|
||||
var caseToml = `
|
||||
tOpString = "string"
|
||||
tOpInt = 1
|
||||
tOpFloat = 1.1
|
||||
tOpBool = true
|
||||
tOpdate = 2006-01-02T15:04:05Z
|
||||
tOparray = [ "array" ]
|
||||
Match = "i should be in Match only"
|
||||
MatcH = "i should be in MatcH only"
|
||||
once = "just once"
|
||||
[nEst.eD]
|
||||
nEstedString = "another string"
|
||||
`
|
||||
|
||||
type InsensitiveEd struct {
|
||||
NestedString string
|
||||
}
|
||||
|
||||
type InsensitiveNest struct {
|
||||
Ed InsensitiveEd
|
||||
}
|
||||
|
||||
type Insensitive struct {
|
||||
TopString string
|
||||
TopInt int
|
||||
TopFloat float64
|
||||
TopBool bool
|
||||
TopDate time.Time
|
||||
TopArray []string
|
||||
Match string
|
||||
MatcH string
|
||||
Once string
|
||||
OncE string
|
||||
Nest InsensitiveNest
|
||||
}
|
||||
|
||||
tme, err := time.Parse(time.RFC3339, time.RFC3339[:len(time.RFC3339)-5])
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
expected := Insensitive{
|
||||
TopString: "string",
|
||||
TopInt: 1,
|
||||
TopFloat: 1.1,
|
||||
TopBool: true,
|
||||
TopDate: tme,
|
||||
TopArray: []string{"array"},
|
||||
MatcH: "i should be in MatcH only",
|
||||
Match: "i should be in Match only",
|
||||
Once: "just once",
|
||||
OncE: "",
|
||||
Nest: InsensitiveNest{
|
||||
Ed: InsensitiveEd{NestedString: "another string"},
|
||||
},
|
||||
}
|
||||
var got Insensitive
|
||||
if _, err := Decode(caseToml, &got); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if !reflect.DeepEqual(expected, got) {
|
||||
t.Fatalf("\n%#v\n!=\n%#v\n", expected, got)
|
||||
}
|
||||
}
|
||||
|
||||
func TestPointers(t *testing.T) {
|
||||
type Object struct {
|
||||
Type string
|
||||
Description string
|
||||
}
|
||||
|
||||
type Dict struct {
|
||||
NamedObject map[string]*Object
|
||||
BaseObject *Object
|
||||
Strptr *string
|
||||
Strptrs []*string
|
||||
}
|
||||
s1, s2, s3 := "blah", "abc", "def"
|
||||
expected := &Dict{
|
||||
Strptr: &s1,
|
||||
Strptrs: []*string{&s2, &s3},
|
||||
NamedObject: map[string]*Object{
|
||||
"foo": {"FOO", "fooooo!!!"},
|
||||
"bar": {"BAR", "ba-ba-ba-ba-barrrr!!!"},
|
||||
},
|
||||
BaseObject: &Object{"BASE", "da base"},
|
||||
}
|
||||
|
||||
ex1 := `
|
||||
Strptr = "blah"
|
||||
Strptrs = ["abc", "def"]
|
||||
|
||||
[NamedObject.foo]
|
||||
Type = "FOO"
|
||||
Description = "fooooo!!!"
|
||||
|
||||
[NamedObject.bar]
|
||||
Type = "BAR"
|
||||
Description = "ba-ba-ba-ba-barrrr!!!"
|
||||
|
||||
[BaseObject]
|
||||
Type = "BASE"
|
||||
Description = "da base"
|
||||
`
|
||||
dict := new(Dict)
|
||||
_, err := Decode(ex1, dict)
|
||||
if err != nil {
|
||||
t.Errorf("Decode error: %v", err)
|
||||
}
|
||||
if !reflect.DeepEqual(expected, dict) {
|
||||
t.Fatalf("\n%#v\n!=\n%#v\n", expected, dict)
|
||||
}
|
||||
}
|
||||
|
||||
type sphere struct {
|
||||
Center [3]float64
|
||||
Radius float64
|
||||
}
|
||||
|
||||
func TestDecodeSimpleArray(t *testing.T) {
|
||||
var s1 sphere
|
||||
if _, err := Decode(`center = [0.0, 1.5, 0.0]`, &s1); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
}
|
||||
|
||||
func TestDecodeArrayWrongSize(t *testing.T) {
|
||||
var s1 sphere
|
||||
if _, err := Decode(`center = [0.1, 2.3]`, &s1); err == nil {
|
||||
t.Fatal("Expected array type mismatch error")
|
||||
}
|
||||
}
|
||||
|
||||
func TestDecodeLargeIntoSmallInt(t *testing.T) {
|
||||
type table struct {
|
||||
Value int8
|
||||
}
|
||||
var tab table
|
||||
if _, err := Decode(`value = 500`, &tab); err == nil {
|
||||
t.Fatal("Expected integer out-of-bounds error.")
|
||||
}
|
||||
}
|
||||
|
||||
func TestDecodeSizedInts(t *testing.T) {
|
||||
type table struct {
|
||||
U8 uint8
|
||||
U16 uint16
|
||||
U32 uint32
|
||||
U64 uint64
|
||||
U uint
|
||||
I8 int8
|
||||
I16 int16
|
||||
I32 int32
|
||||
I64 int64
|
||||
I int
|
||||
}
|
||||
answer := table{1, 1, 1, 1, 1, -1, -1, -1, -1, -1}
|
||||
toml := `
|
||||
u8 = 1
|
||||
u16 = 1
|
||||
u32 = 1
|
||||
u64 = 1
|
||||
u = 1
|
||||
i8 = -1
|
||||
i16 = -1
|
||||
i32 = -1
|
||||
i64 = -1
|
||||
i = -1
|
||||
`
|
||||
var tab table
|
||||
if _, err := Decode(toml, &tab); err != nil {
|
||||
t.Fatal(err.Error())
|
||||
}
|
||||
if answer != tab {
|
||||
t.Fatalf("Expected %#v but got %#v", answer, tab)
|
||||
}
|
||||
}
|
||||
|
||||
func TestUnmarshaler(t *testing.T) {
|
||||
|
||||
var tomlBlob = `
|
||||
[dishes.hamboogie]
|
||||
name = "Hamboogie with fries"
|
||||
price = 10.99
|
||||
|
||||
[[dishes.hamboogie.ingredients]]
|
||||
name = "Bread Bun"
|
||||
|
||||
[[dishes.hamboogie.ingredients]]
|
||||
name = "Lettuce"
|
||||
|
||||
[[dishes.hamboogie.ingredients]]
|
||||
name = "Real Beef Patty"
|
||||
|
||||
[[dishes.hamboogie.ingredients]]
|
||||
name = "Tomato"
|
||||
|
||||
[dishes.eggsalad]
|
||||
name = "Egg Salad with rice"
|
||||
price = 3.99
|
||||
|
||||
[[dishes.eggsalad.ingredients]]
|
||||
name = "Egg"
|
||||
|
||||
[[dishes.eggsalad.ingredients]]
|
||||
name = "Mayo"
|
||||
|
||||
[[dishes.eggsalad.ingredients]]
|
||||
name = "Rice"
|
||||
`
|
||||
m := &menu{}
|
||||
if _, err := Decode(tomlBlob, m); err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
if len(m.Dishes) != 2 {
|
||||
t.Log("two dishes should be loaded with UnmarshalTOML()")
|
||||
t.Errorf("expected %d but got %d", 2, len(m.Dishes))
|
||||
}
|
||||
|
||||
eggSalad := m.Dishes["eggsalad"]
|
||||
if _, ok := interface{}(eggSalad).(dish); !ok {
|
||||
t.Errorf("expected a dish")
|
||||
}
|
||||
|
||||
if eggSalad.Name != "Egg Salad with rice" {
|
||||
t.Errorf("expected the dish to be named 'Egg Salad with rice'")
|
||||
}
|
||||
|
||||
if len(eggSalad.Ingredients) != 3 {
|
||||
t.Log("dish should be loaded with UnmarshalTOML()")
|
||||
t.Errorf("expected %d but got %d", 3, len(eggSalad.Ingredients))
|
||||
}
|
||||
|
||||
found := false
|
||||
for _, i := range eggSalad.Ingredients {
|
||||
if i.Name == "Rice" {
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !found {
|
||||
t.Error("Rice was not loaded in UnmarshalTOML()")
|
||||
}
|
||||
|
||||
// test on a value - must be passed as *
|
||||
o := menu{}
|
||||
if _, err := Decode(tomlBlob, &o); err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
type menu struct {
|
||||
Dishes map[string]dish
|
||||
}
|
||||
|
||||
func (m *menu) UnmarshalTOML(p interface{}) error {
|
||||
m.Dishes = make(map[string]dish)
|
||||
data, _ := p.(map[string]interface{})
|
||||
dishes := data["dishes"].(map[string]interface{})
|
||||
for n, v := range dishes {
|
||||
if d, ok := v.(map[string]interface{}); ok {
|
||||
nd := dish{}
|
||||
nd.UnmarshalTOML(d)
|
||||
m.Dishes[n] = nd
|
||||
} else {
|
||||
return fmt.Errorf("not a dish")
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type dish struct {
|
||||
Name string
|
||||
Price float32
|
||||
Ingredients []ingredient
|
||||
}
|
||||
|
||||
func (d *dish) UnmarshalTOML(p interface{}) error {
|
||||
data, _ := p.(map[string]interface{})
|
||||
d.Name, _ = data["name"].(string)
|
||||
d.Price, _ = data["price"].(float32)
|
||||
ingredients, _ := data["ingredients"].([]map[string]interface{})
|
||||
for _, e := range ingredients {
|
||||
n, _ := interface{}(e).(map[string]interface{})
|
||||
name, _ := n["name"].(string)
|
||||
i := ingredient{name}
|
||||
d.Ingredients = append(d.Ingredients, i)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type ingredient struct {
|
||||
Name string
|
||||
}
|
||||
|
||||
func ExampleMetaData_PrimitiveDecode() {
|
||||
var md MetaData
|
||||
var err error
|
||||
|
||||
var tomlBlob = `
|
||||
ranking = ["Springsteen", "J Geils"]
|
||||
|
||||
[bands.Springsteen]
|
||||
started = 1973
|
||||
albums = ["Greetings", "WIESS", "Born to Run", "Darkness"]
|
||||
|
||||
[bands."J Geils"]
|
||||
started = 1970
|
||||
albums = ["The J. Geils Band", "Full House", "Blow Your Face Out"]
|
||||
`
|
||||
|
||||
type band struct {
|
||||
Started int
|
||||
Albums []string
|
||||
}
|
||||
type classics struct {
|
||||
Ranking []string
|
||||
Bands map[string]Primitive
|
||||
}
|
||||
|
||||
// Do the initial decode. Reflection is delayed on Primitive values.
|
||||
var music classics
|
||||
if md, err = Decode(tomlBlob, &music); err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
// MetaData still includes information on Primitive values.
|
||||
fmt.Printf("Is `bands.Springsteen` defined? %v\n",
|
||||
md.IsDefined("bands", "Springsteen"))
|
||||
|
||||
// Decode primitive data into Go values.
|
||||
for _, artist := range music.Ranking {
|
||||
// A band is a primitive value, so we need to decode it to get a
|
||||
// real `band` value.
|
||||
primValue := music.Bands[artist]
|
||||
|
||||
var aBand band
|
||||
if err = md.PrimitiveDecode(primValue, &aBand); err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
fmt.Printf("%s started in %d.\n", artist, aBand.Started)
|
||||
}
|
||||
// Check to see if there were any fields left undecoded.
|
||||
// Note that this won't be empty before decoding the Primitive value!
|
||||
fmt.Printf("Undecoded: %q\n", md.Undecoded())
|
||||
|
||||
// Output:
|
||||
// Is `bands.Springsteen` defined? true
|
||||
// Springsteen started in 1973.
|
||||
// J Geils started in 1970.
|
||||
// Undecoded: []
|
||||
}
|
||||
|
||||
func ExampleDecode() {
|
||||
var tomlBlob = `
|
||||
# Some comments.
|
||||
[alpha]
|
||||
ip = "10.0.0.1"
|
||||
|
||||
[alpha.config]
|
||||
Ports = [ 8001, 8002 ]
|
||||
Location = "Toronto"
|
||||
Created = 1987-07-05T05:45:00Z
|
||||
|
||||
[beta]
|
||||
ip = "10.0.0.2"
|
||||
|
||||
[beta.config]
|
||||
Ports = [ 9001, 9002 ]
|
||||
Location = "New Jersey"
|
||||
Created = 1887-01-05T05:55:00Z
|
||||
`
|
||||
|
||||
type serverConfig struct {
|
||||
Ports []int
|
||||
Location string
|
||||
Created time.Time
|
||||
}
|
||||
|
||||
type server struct {
|
||||
IP string `toml:"ip"`
|
||||
Config serverConfig `toml:"config"`
|
||||
}
|
||||
|
||||
type servers map[string]server
|
||||
|
||||
var config servers
|
||||
if _, err := Decode(tomlBlob, &config); err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
for _, name := range []string{"alpha", "beta"} {
|
||||
s := config[name]
|
||||
fmt.Printf("Server: %s (ip: %s) in %s created on %s\n",
|
||||
name, s.IP, s.Config.Location,
|
||||
s.Config.Created.Format("2006-01-02"))
|
||||
fmt.Printf("Ports: %v\n", s.Config.Ports)
|
||||
}
|
||||
|
||||
// Output:
|
||||
// Server: alpha (ip: 10.0.0.1) in Toronto created on 1987-07-05
|
||||
// Ports: [8001 8002]
|
||||
// Server: beta (ip: 10.0.0.2) in New Jersey created on 1887-01-05
|
||||
// Ports: [9001 9002]
|
||||
}
|
||||
|
||||
type duration struct {
|
||||
time.Duration
|
||||
}
|
||||
|
||||
func (d *duration) UnmarshalText(text []byte) error {
|
||||
var err error
|
||||
d.Duration, err = time.ParseDuration(string(text))
|
||||
return err
|
||||
}
|
||||
|
||||
// Example Unmarshaler shows how to decode TOML strings into your own
|
||||
// custom data type.
|
||||
func Example_unmarshaler() {
|
||||
blob := `
|
||||
[[song]]
|
||||
name = "Thunder Road"
|
||||
duration = "4m49s"
|
||||
|
||||
[[song]]
|
||||
name = "Stairway to Heaven"
|
||||
duration = "8m03s"
|
||||
`
|
||||
type song struct {
|
||||
Name string
|
||||
Duration duration
|
||||
}
|
||||
type songs struct {
|
||||
Song []song
|
||||
}
|
||||
var favorites songs
|
||||
if _, err := Decode(blob, &favorites); err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
// Code to implement the TextUnmarshaler interface for `duration`:
|
||||
//
|
||||
// type duration struct {
|
||||
// time.Duration
|
||||
// }
|
||||
//
|
||||
// func (d *duration) UnmarshalText(text []byte) error {
|
||||
// var err error
|
||||
// d.Duration, err = time.ParseDuration(string(text))
|
||||
// return err
|
||||
// }
|
||||
|
||||
for _, s := range favorites.Song {
|
||||
fmt.Printf("%s (%s)\n", s.Name, s.Duration)
|
||||
}
|
||||
// Output:
|
||||
// Thunder Road (4m49s)
|
||||
// Stairway to Heaven (8m3s)
|
||||
}
|
||||
|
||||
// Example StrictDecoding shows how to detect whether there are keys in the
|
||||
// TOML document that weren't decoded into the value given. This is useful
|
||||
// for returning an error to the user if they've included extraneous fields
|
||||
// in their configuration.
|
||||
func Example_strictDecoding() {
|
||||
var blob = `
|
||||
key1 = "value1"
|
||||
key2 = "value2"
|
||||
key3 = "value3"
|
||||
`
|
||||
type config struct {
|
||||
Key1 string
|
||||
Key3 string
|
||||
}
|
||||
|
||||
var conf config
|
||||
md, err := Decode(blob, &conf)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
fmt.Printf("Undecoded keys: %q\n", md.Undecoded())
|
||||
// Output:
|
||||
// Undecoded keys: ["key2"]
|
||||
}
|
||||
|
||||
// Example UnmarshalTOML shows how to implement a struct type that knows how to
|
||||
// unmarshal itself. The struct must take full responsibility for mapping the
|
||||
// values passed into the struct. The method may be used with interfaces in a
|
||||
// struct in cases where the actual type is not known until the data is
|
||||
// examined.
|
||||
func Example_unmarshalTOML() {
|
||||
|
||||
var blob = `
|
||||
[[parts]]
|
||||
type = "valve"
|
||||
id = "valve-1"
|
||||
size = 1.2
|
||||
rating = 4
|
||||
|
||||
[[parts]]
|
||||
type = "valve"
|
||||
id = "valve-2"
|
||||
size = 2.1
|
||||
rating = 5
|
||||
|
||||
[[parts]]
|
||||
type = "pipe"
|
||||
id = "pipe-1"
|
||||
length = 2.1
|
||||
diameter = 12
|
||||
|
||||
[[parts]]
|
||||
type = "cable"
|
||||
id = "cable-1"
|
||||
length = 12
|
||||
rating = 3.1
|
||||
`
|
||||
o := &order{}
|
||||
err := Unmarshal([]byte(blob), o)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
fmt.Println(len(o.parts))
|
||||
|
||||
for _, part := range o.parts {
|
||||
fmt.Println(part.Name())
|
||||
}
|
||||
|
||||
// Code to implement UmarshalJSON.
|
||||
|
||||
// type order struct {
|
||||
// // NOTE `order.parts` is a private slice of type `part` which is an
|
||||
// // interface and may only be loaded from toml using the
|
||||
// // UnmarshalTOML() method of the Umarshaler interface.
|
||||
// parts parts
|
||||
// }
|
||||
|
||||
// func (o *order) UnmarshalTOML(data interface{}) error {
|
||||
|
||||
// // NOTE the example below contains detailed type casting to show how
|
||||
// // the 'data' is retrieved. In operational use, a type cast wrapper
|
||||
// // may be prefered e.g.
|
||||
// //
|
||||
// // func AsMap(v interface{}) (map[string]interface{}, error) {
|
||||
// // return v.(map[string]interface{})
|
||||
// // }
|
||||
// //
|
||||
// // resulting in:
|
||||
// // d, _ := AsMap(data)
|
||||
// //
|
||||
|
||||
// d, _ := data.(map[string]interface{})
|
||||
// parts, _ := d["parts"].([]map[string]interface{})
|
||||
|
||||
// for _, p := range parts {
|
||||
|
||||
// typ, _ := p["type"].(string)
|
||||
// id, _ := p["id"].(string)
|
||||
|
||||
// // detect the type of part and handle each case
|
||||
// switch p["type"] {
|
||||
// case "valve":
|
||||
|
||||
// size := float32(p["size"].(float64))
|
||||
// rating := int(p["rating"].(int64))
|
||||
|
||||
// valve := &valve{
|
||||
// Type: typ,
|
||||
// ID: id,
|
||||
// Size: size,
|
||||
// Rating: rating,
|
||||
// }
|
||||
|
||||
// o.parts = append(o.parts, valve)
|
||||
|
||||
// case "pipe":
|
||||
|
||||
// length := float32(p["length"].(float64))
|
||||
// diameter := int(p["diameter"].(int64))
|
||||
|
||||
// pipe := &pipe{
|
||||
// Type: typ,
|
||||
// ID: id,
|
||||
// Length: length,
|
||||
// Diameter: diameter,
|
||||
// }
|
||||
|
||||
// o.parts = append(o.parts, pipe)
|
||||
|
||||
// case "cable":
|
||||
|
||||
// length := int(p["length"].(int64))
|
||||
// rating := float32(p["rating"].(float64))
|
||||
|
||||
// cable := &cable{
|
||||
// Type: typ,
|
||||
// ID: id,
|
||||
// Length: length,
|
||||
// Rating: rating,
|
||||
// }
|
||||
|
||||
// o.parts = append(o.parts, cable)
|
||||
|
||||
// }
|
||||
// }
|
||||
|
||||
// return nil
|
||||
// }
|
||||
|
||||
// type parts []part
|
||||
|
||||
// type part interface {
|
||||
// Name() string
|
||||
// }
|
||||
|
||||
// type valve struct {
|
||||
// Type string
|
||||
// ID string
|
||||
// Size float32
|
||||
// Rating int
|
||||
// }
|
||||
|
||||
// func (v *valve) Name() string {
|
||||
// return fmt.Sprintf("VALVE: %s", v.ID)
|
||||
// }
|
||||
|
||||
// type pipe struct {
|
||||
// Type string
|
||||
// ID string
|
||||
// Length float32
|
||||
// Diameter int
|
||||
// }
|
||||
|
||||
// func (p *pipe) Name() string {
|
||||
// return fmt.Sprintf("PIPE: %s", p.ID)
|
||||
// }
|
||||
|
||||
// type cable struct {
|
||||
// Type string
|
||||
// ID string
|
||||
// Length int
|
||||
// Rating float32
|
||||
// }
|
||||
|
||||
// func (c *cable) Name() string {
|
||||
// return fmt.Sprintf("CABLE: %s", c.ID)
|
||||
// }
|
||||
|
||||
// Output:
|
||||
// 4
|
||||
// VALVE: valve-1
|
||||
// VALVE: valve-2
|
||||
// PIPE: pipe-1
|
||||
// CABLE: cable-1
|
||||
|
||||
}
|
||||
|
||||
type order struct {
|
||||
// NOTE `order.parts` is a private slice of type `part` which is an
|
||||
// interface and may only be loaded from toml using the UnmarshalTOML()
|
||||
// method of the Umarshaler interface.
|
||||
parts parts
|
||||
}
|
||||
|
||||
func (o *order) UnmarshalTOML(data interface{}) error {
|
||||
|
||||
// NOTE the example below contains detailed type casting to show how
|
||||
// the 'data' is retrieved. In operational use, a type cast wrapper
|
||||
// may be prefered e.g.
|
||||
//
|
||||
// func AsMap(v interface{}) (map[string]interface{}, error) {
|
||||
// return v.(map[string]interface{})
|
||||
// }
|
||||
//
|
||||
// resulting in:
|
||||
// d, _ := AsMap(data)
|
||||
//
|
||||
|
||||
d, _ := data.(map[string]interface{})
|
||||
parts, _ := d["parts"].([]map[string]interface{})
|
||||
|
||||
for _, p := range parts {
|
||||
|
||||
typ, _ := p["type"].(string)
|
||||
id, _ := p["id"].(string)
|
||||
|
||||
// detect the type of part and handle each case
|
||||
switch p["type"] {
|
||||
case "valve":
|
||||
|
||||
size := float32(p["size"].(float64))
|
||||
rating := int(p["rating"].(int64))
|
||||
|
||||
valve := &valve{
|
||||
Type: typ,
|
||||
ID: id,
|
||||
Size: size,
|
||||
Rating: rating,
|
||||
}
|
||||
|
||||
o.parts = append(o.parts, valve)
|
||||
|
||||
case "pipe":
|
||||
|
||||
length := float32(p["length"].(float64))
|
||||
diameter := int(p["diameter"].(int64))
|
||||
|
||||
pipe := &pipe{
|
||||
Type: typ,
|
||||
ID: id,
|
||||
Length: length,
|
||||
Diameter: diameter,
|
||||
}
|
||||
|
||||
o.parts = append(o.parts, pipe)
|
||||
|
||||
case "cable":
|
||||
|
||||
length := int(p["length"].(int64))
|
||||
rating := float32(p["rating"].(float64))
|
||||
|
||||
cable := &cable{
|
||||
Type: typ,
|
||||
ID: id,
|
||||
Length: length,
|
||||
Rating: rating,
|
||||
}
|
||||
|
||||
o.parts = append(o.parts, cable)
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
type parts []part
|
||||
|
||||
type part interface {
|
||||
Name() string
|
||||
}
|
||||
|
||||
type valve struct {
|
||||
Type string
|
||||
ID string
|
||||
Size float32
|
||||
Rating int
|
||||
}
|
||||
|
||||
func (v *valve) Name() string {
|
||||
return fmt.Sprintf("VALVE: %s", v.ID)
|
||||
}
|
||||
|
||||
type pipe struct {
|
||||
Type string
|
||||
ID string
|
||||
Length float32
|
||||
Diameter int
|
||||
}
|
||||
|
||||
func (p *pipe) Name() string {
|
||||
return fmt.Sprintf("PIPE: %s", p.ID)
|
||||
}
|
||||
|
||||
type cable struct {
|
||||
Type string
|
||||
ID string
|
||||
Length int
|
||||
Rating float32
|
||||
}
|
||||
|
||||
func (c *cable) Name() string {
|
||||
return fmt.Sprintf("CABLE: %s", c.ID)
|
||||
}
|
||||
27
Godeps/_workspace/src/github.com/BurntSushi/toml/doc.go
generated
vendored
Normal file
27
Godeps/_workspace/src/github.com/BurntSushi/toml/doc.go
generated
vendored
Normal file
@@ -0,0 +1,27 @@
|
||||
/*
|
||||
Package toml provides facilities for decoding and encoding TOML configuration
|
||||
files via reflection. There is also support for delaying decoding with
|
||||
the Primitive type, and querying the set of keys in a TOML document with the
|
||||
MetaData type.
|
||||
|
||||
The specification implemented: https://github.com/mojombo/toml
|
||||
|
||||
The sub-command github.com/BurntSushi/toml/cmd/tomlv can be used to verify
|
||||
whether a file is a valid TOML document. It can also be used to print the
|
||||
type of each key in a TOML document.
|
||||
|
||||
Testing
|
||||
|
||||
There are two important types of tests used for this package. The first is
|
||||
contained inside '*_test.go' files and uses the standard Go unit testing
|
||||
framework. These tests are primarily devoted to holistically testing the
|
||||
decoder and encoder.
|
||||
|
||||
The second type of testing is used to verify the implementation's adherence
|
||||
to the TOML specification. These tests have been factored into their own
|
||||
project: https://github.com/BurntSushi/toml-test
|
||||
|
||||
The reason the tests are in a separate project is so that they can be used by
|
||||
any implementation of TOML. Namely, it is language agnostic.
|
||||
*/
|
||||
package toml
|
||||
551
Godeps/_workspace/src/github.com/BurntSushi/toml/encode.go
generated
vendored
Normal file
551
Godeps/_workspace/src/github.com/BurntSushi/toml/encode.go
generated
vendored
Normal file
@@ -0,0 +1,551 @@
|
||||
package toml
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"reflect"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
type tomlEncodeError struct{ error }
|
||||
|
||||
var (
|
||||
errArrayMixedElementTypes = errors.New(
|
||||
"can't encode array with mixed element types")
|
||||
errArrayNilElement = errors.New(
|
||||
"can't encode array with nil element")
|
||||
errNonString = errors.New(
|
||||
"can't encode a map with non-string key type")
|
||||
errAnonNonStruct = errors.New(
|
||||
"can't encode an anonymous field that is not a struct")
|
||||
errArrayNoTable = errors.New(
|
||||
"TOML array element can't contain a table")
|
||||
errNoKey = errors.New(
|
||||
"top-level values must be a Go map or struct")
|
||||
errAnything = errors.New("") // used in testing
|
||||
)
|
||||
|
||||
var quotedReplacer = strings.NewReplacer(
|
||||
"\t", "\\t",
|
||||
"\n", "\\n",
|
||||
"\r", "\\r",
|
||||
"\"", "\\\"",
|
||||
"\\", "\\\\",
|
||||
)
|
||||
|
||||
// Encoder controls the encoding of Go values to a TOML document to some
|
||||
// io.Writer.
|
||||
//
|
||||
// The indentation level can be controlled with the Indent field.
|
||||
type Encoder struct {
|
||||
// A single indentation level. By default it is two spaces.
|
||||
Indent string
|
||||
|
||||
// hasWritten is whether we have written any output to w yet.
|
||||
hasWritten bool
|
||||
w *bufio.Writer
|
||||
}
|
||||
|
||||
// NewEncoder returns a TOML encoder that encodes Go values to the io.Writer
|
||||
// given. By default, a single indentation level is 2 spaces.
|
||||
func NewEncoder(w io.Writer) *Encoder {
|
||||
return &Encoder{
|
||||
w: bufio.NewWriter(w),
|
||||
Indent: " ",
|
||||
}
|
||||
}
|
||||
|
||||
// Encode writes a TOML representation of the Go value to the underlying
|
||||
// io.Writer. If the value given cannot be encoded to a valid TOML document,
|
||||
// then an error is returned.
|
||||
//
|
||||
// The mapping between Go values and TOML values should be precisely the same
|
||||
// as for the Decode* functions. Similarly, the TextMarshaler interface is
|
||||
// supported by encoding the resulting bytes as strings. (If you want to write
|
||||
// arbitrary binary data then you will need to use something like base64 since
|
||||
// TOML does not have any binary types.)
|
||||
//
|
||||
// When encoding TOML hashes (i.e., Go maps or structs), keys without any
|
||||
// sub-hashes are encoded first.
|
||||
//
|
||||
// If a Go map is encoded, then its keys are sorted alphabetically for
|
||||
// deterministic output. More control over this behavior may be provided if
|
||||
// there is demand for it.
|
||||
//
|
||||
// Encoding Go values without a corresponding TOML representation---like map
|
||||
// types with non-string keys---will cause an error to be returned. Similarly
|
||||
// for mixed arrays/slices, arrays/slices with nil elements, embedded
|
||||
// non-struct types and nested slices containing maps or structs.
|
||||
// (e.g., [][]map[string]string is not allowed but []map[string]string is OK
|
||||
// and so is []map[string][]string.)
|
||||
func (enc *Encoder) Encode(v interface{}) error {
|
||||
rv := eindirect(reflect.ValueOf(v))
|
||||
if err := enc.safeEncode(Key([]string{}), rv); err != nil {
|
||||
return err
|
||||
}
|
||||
return enc.w.Flush()
|
||||
}
|
||||
|
||||
func (enc *Encoder) safeEncode(key Key, rv reflect.Value) (err error) {
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
if terr, ok := r.(tomlEncodeError); ok {
|
||||
err = terr.error
|
||||
return
|
||||
}
|
||||
panic(r)
|
||||
}
|
||||
}()
|
||||
enc.encode(key, rv)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (enc *Encoder) encode(key Key, rv reflect.Value) {
|
||||
// Special case. Time needs to be in ISO8601 format.
|
||||
// Special case. If we can marshal the type to text, then we used that.
|
||||
// Basically, this prevents the encoder for handling these types as
|
||||
// generic structs (or whatever the underlying type of a TextMarshaler is).
|
||||
switch rv.Interface().(type) {
|
||||
case time.Time, TextMarshaler:
|
||||
enc.keyEqElement(key, rv)
|
||||
return
|
||||
}
|
||||
|
||||
k := rv.Kind()
|
||||
switch k {
|
||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32,
|
||||
reflect.Int64,
|
||||
reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32,
|
||||
reflect.Uint64,
|
||||
reflect.Float32, reflect.Float64, reflect.String, reflect.Bool:
|
||||
enc.keyEqElement(key, rv)
|
||||
case reflect.Array, reflect.Slice:
|
||||
if typeEqual(tomlArrayHash, tomlTypeOfGo(rv)) {
|
||||
enc.eArrayOfTables(key, rv)
|
||||
} else {
|
||||
enc.keyEqElement(key, rv)
|
||||
}
|
||||
case reflect.Interface:
|
||||
if rv.IsNil() {
|
||||
return
|
||||
}
|
||||
enc.encode(key, rv.Elem())
|
||||
case reflect.Map:
|
||||
if rv.IsNil() {
|
||||
return
|
||||
}
|
||||
enc.eTable(key, rv)
|
||||
case reflect.Ptr:
|
||||
if rv.IsNil() {
|
||||
return
|
||||
}
|
||||
enc.encode(key, rv.Elem())
|
||||
case reflect.Struct:
|
||||
enc.eTable(key, rv)
|
||||
default:
|
||||
panic(e("Unsupported type for key '%s': %s", key, k))
|
||||
}
|
||||
}
|
||||
|
||||
// eElement encodes any value that can be an array element (primitives and
|
||||
// arrays).
|
||||
func (enc *Encoder) eElement(rv reflect.Value) {
|
||||
switch v := rv.Interface().(type) {
|
||||
case time.Time:
|
||||
// Special case time.Time as a primitive. Has to come before
|
||||
// TextMarshaler below because time.Time implements
|
||||
// encoding.TextMarshaler, but we need to always use UTC.
|
||||
enc.wf(v.In(time.FixedZone("UTC", 0)).Format("2006-01-02T15:04:05Z"))
|
||||
return
|
||||
case TextMarshaler:
|
||||
// Special case. Use text marshaler if it's available for this value.
|
||||
if s, err := v.MarshalText(); err != nil {
|
||||
encPanic(err)
|
||||
} else {
|
||||
enc.writeQuoted(string(s))
|
||||
}
|
||||
return
|
||||
}
|
||||
switch rv.Kind() {
|
||||
case reflect.Bool:
|
||||
enc.wf(strconv.FormatBool(rv.Bool()))
|
||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32,
|
||||
reflect.Int64:
|
||||
enc.wf(strconv.FormatInt(rv.Int(), 10))
|
||||
case reflect.Uint, reflect.Uint8, reflect.Uint16,
|
||||
reflect.Uint32, reflect.Uint64:
|
||||
enc.wf(strconv.FormatUint(rv.Uint(), 10))
|
||||
case reflect.Float32:
|
||||
enc.wf(floatAddDecimal(strconv.FormatFloat(rv.Float(), 'f', -1, 32)))
|
||||
case reflect.Float64:
|
||||
enc.wf(floatAddDecimal(strconv.FormatFloat(rv.Float(), 'f', -1, 64)))
|
||||
case reflect.Array, reflect.Slice:
|
||||
enc.eArrayOrSliceElement(rv)
|
||||
case reflect.Interface:
|
||||
enc.eElement(rv.Elem())
|
||||
case reflect.String:
|
||||
enc.writeQuoted(rv.String())
|
||||
default:
|
||||
panic(e("Unexpected primitive type: %s", rv.Kind()))
|
||||
}
|
||||
}
|
||||
|
||||
// By the TOML spec, all floats must have a decimal with at least one
|
||||
// number on either side.
|
||||
func floatAddDecimal(fstr string) string {
|
||||
if !strings.Contains(fstr, ".") {
|
||||
return fstr + ".0"
|
||||
}
|
||||
return fstr
|
||||
}
|
||||
|
||||
func (enc *Encoder) writeQuoted(s string) {
|
||||
enc.wf("\"%s\"", quotedReplacer.Replace(s))
|
||||
}
|
||||
|
||||
func (enc *Encoder) eArrayOrSliceElement(rv reflect.Value) {
|
||||
length := rv.Len()
|
||||
enc.wf("[")
|
||||
for i := 0; i < length; i++ {
|
||||
elem := rv.Index(i)
|
||||
enc.eElement(elem)
|
||||
if i != length-1 {
|
||||
enc.wf(", ")
|
||||
}
|
||||
}
|
||||
enc.wf("]")
|
||||
}
|
||||
|
||||
func (enc *Encoder) eArrayOfTables(key Key, rv reflect.Value) {
|
||||
if len(key) == 0 {
|
||||
encPanic(errNoKey)
|
||||
}
|
||||
for i := 0; i < rv.Len(); i++ {
|
||||
trv := rv.Index(i)
|
||||
if isNil(trv) {
|
||||
continue
|
||||
}
|
||||
panicIfInvalidKey(key)
|
||||
enc.newline()
|
||||
enc.wf("%s[[%s]]", enc.indentStr(key), key.maybeQuotedAll())
|
||||
enc.newline()
|
||||
enc.eMapOrStruct(key, trv)
|
||||
}
|
||||
}
|
||||
|
||||
func (enc *Encoder) eTable(key Key, rv reflect.Value) {
|
||||
panicIfInvalidKey(key)
|
||||
if len(key) == 1 {
|
||||
// Output an extra new line between top-level tables.
|
||||
// (The newline isn't written if nothing else has been written though.)
|
||||
enc.newline()
|
||||
}
|
||||
if len(key) > 0 {
|
||||
enc.wf("%s[%s]", enc.indentStr(key), key.maybeQuotedAll())
|
||||
enc.newline()
|
||||
}
|
||||
enc.eMapOrStruct(key, rv)
|
||||
}
|
||||
|
||||
func (enc *Encoder) eMapOrStruct(key Key, rv reflect.Value) {
|
||||
switch rv := eindirect(rv); rv.Kind() {
|
||||
case reflect.Map:
|
||||
enc.eMap(key, rv)
|
||||
case reflect.Struct:
|
||||
enc.eStruct(key, rv)
|
||||
default:
|
||||
panic("eTable: unhandled reflect.Value Kind: " + rv.Kind().String())
|
||||
}
|
||||
}
|
||||
|
||||
func (enc *Encoder) eMap(key Key, rv reflect.Value) {
|
||||
rt := rv.Type()
|
||||
if rt.Key().Kind() != reflect.String {
|
||||
encPanic(errNonString)
|
||||
}
|
||||
|
||||
// Sort keys so that we have deterministic output. And write keys directly
|
||||
// underneath this key first, before writing sub-structs or sub-maps.
|
||||
var mapKeysDirect, mapKeysSub []string
|
||||
for _, mapKey := range rv.MapKeys() {
|
||||
k := mapKey.String()
|
||||
if typeIsHash(tomlTypeOfGo(rv.MapIndex(mapKey))) {
|
||||
mapKeysSub = append(mapKeysSub, k)
|
||||
} else {
|
||||
mapKeysDirect = append(mapKeysDirect, k)
|
||||
}
|
||||
}
|
||||
|
||||
var writeMapKeys = func(mapKeys []string) {
|
||||
sort.Strings(mapKeys)
|
||||
for _, mapKey := range mapKeys {
|
||||
mrv := rv.MapIndex(reflect.ValueOf(mapKey))
|
||||
if isNil(mrv) {
|
||||
// Don't write anything for nil fields.
|
||||
continue
|
||||
}
|
||||
enc.encode(key.add(mapKey), mrv)
|
||||
}
|
||||
}
|
||||
writeMapKeys(mapKeysDirect)
|
||||
writeMapKeys(mapKeysSub)
|
||||
}
|
||||
|
||||
func (enc *Encoder) eStruct(key Key, rv reflect.Value) {
|
||||
// Write keys for fields directly under this key first, because if we write
|
||||
// a field that creates a new table, then all keys under it will be in that
|
||||
// table (not the one we're writing here).
|
||||
rt := rv.Type()
|
||||
var fieldsDirect, fieldsSub [][]int
|
||||
var addFields func(rt reflect.Type, rv reflect.Value, start []int)
|
||||
addFields = func(rt reflect.Type, rv reflect.Value, start []int) {
|
||||
for i := 0; i < rt.NumField(); i++ {
|
||||
f := rt.Field(i)
|
||||
// skip unexporded fields
|
||||
if f.PkgPath != "" {
|
||||
continue
|
||||
}
|
||||
frv := rv.Field(i)
|
||||
if f.Anonymous {
|
||||
frv := eindirect(frv)
|
||||
t := frv.Type()
|
||||
if t.Kind() != reflect.Struct {
|
||||
encPanic(errAnonNonStruct)
|
||||
}
|
||||
addFields(t, frv, f.Index)
|
||||
} else if typeIsHash(tomlTypeOfGo(frv)) {
|
||||
fieldsSub = append(fieldsSub, append(start, f.Index...))
|
||||
} else {
|
||||
fieldsDirect = append(fieldsDirect, append(start, f.Index...))
|
||||
}
|
||||
}
|
||||
}
|
||||
addFields(rt, rv, nil)
|
||||
|
||||
var writeFields = func(fields [][]int) {
|
||||
for _, fieldIndex := range fields {
|
||||
sft := rt.FieldByIndex(fieldIndex)
|
||||
sf := rv.FieldByIndex(fieldIndex)
|
||||
if isNil(sf) {
|
||||
// Don't write anything for nil fields.
|
||||
continue
|
||||
}
|
||||
|
||||
keyName := sft.Tag.Get("toml")
|
||||
if keyName == "-" {
|
||||
continue
|
||||
}
|
||||
if keyName == "" {
|
||||
keyName = sft.Name
|
||||
}
|
||||
|
||||
keyName, opts := getOptions(keyName)
|
||||
if _, ok := opts["omitempty"]; ok && isEmpty(sf) {
|
||||
continue
|
||||
} else if _, ok := opts["omitzero"]; ok && isZero(sf) {
|
||||
continue
|
||||
}
|
||||
|
||||
enc.encode(key.add(keyName), sf)
|
||||
}
|
||||
}
|
||||
writeFields(fieldsDirect)
|
||||
writeFields(fieldsSub)
|
||||
}
|
||||
|
||||
// tomlTypeName returns the TOML type name of the Go value's type. It is
|
||||
// used to determine whether the types of array elements are mixed (which is
|
||||
// forbidden). If the Go value is nil, then it is illegal for it to be an array
|
||||
// element, and valueIsNil is returned as true.
|
||||
|
||||
// Returns the TOML type of a Go value. The type may be `nil`, which means
|
||||
// no concrete TOML type could be found.
|
||||
func tomlTypeOfGo(rv reflect.Value) tomlType {
|
||||
if isNil(rv) || !rv.IsValid() {
|
||||
return nil
|
||||
}
|
||||
switch rv.Kind() {
|
||||
case reflect.Bool:
|
||||
return tomlBool
|
||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32,
|
||||
reflect.Int64,
|
||||
reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32,
|
||||
reflect.Uint64:
|
||||
return tomlInteger
|
||||
case reflect.Float32, reflect.Float64:
|
||||
return tomlFloat
|
||||
case reflect.Array, reflect.Slice:
|
||||
if typeEqual(tomlHash, tomlArrayType(rv)) {
|
||||
return tomlArrayHash
|
||||
} else {
|
||||
return tomlArray
|
||||
}
|
||||
case reflect.Ptr, reflect.Interface:
|
||||
return tomlTypeOfGo(rv.Elem())
|
||||
case reflect.String:
|
||||
return tomlString
|
||||
case reflect.Map:
|
||||
return tomlHash
|
||||
case reflect.Struct:
|
||||
switch rv.Interface().(type) {
|
||||
case time.Time:
|
||||
return tomlDatetime
|
||||
case TextMarshaler:
|
||||
return tomlString
|
||||
default:
|
||||
return tomlHash
|
||||
}
|
||||
default:
|
||||
panic("unexpected reflect.Kind: " + rv.Kind().String())
|
||||
}
|
||||
}
|
||||
|
||||
// tomlArrayType returns the element type of a TOML array. The type returned
|
||||
// may be nil if it cannot be determined (e.g., a nil slice or a zero length
|
||||
// slize). This function may also panic if it finds a type that cannot be
|
||||
// expressed in TOML (such as nil elements, heterogeneous arrays or directly
|
||||
// nested arrays of tables).
|
||||
func tomlArrayType(rv reflect.Value) tomlType {
|
||||
if isNil(rv) || !rv.IsValid() || rv.Len() == 0 {
|
||||
return nil
|
||||
}
|
||||
firstType := tomlTypeOfGo(rv.Index(0))
|
||||
if firstType == nil {
|
||||
encPanic(errArrayNilElement)
|
||||
}
|
||||
|
||||
rvlen := rv.Len()
|
||||
for i := 1; i < rvlen; i++ {
|
||||
elem := rv.Index(i)
|
||||
switch elemType := tomlTypeOfGo(elem); {
|
||||
case elemType == nil:
|
||||
encPanic(errArrayNilElement)
|
||||
case !typeEqual(firstType, elemType):
|
||||
encPanic(errArrayMixedElementTypes)
|
||||
}
|
||||
}
|
||||
// If we have a nested array, then we must make sure that the nested
|
||||
// array contains ONLY primitives.
|
||||
// This checks arbitrarily nested arrays.
|
||||
if typeEqual(firstType, tomlArray) || typeEqual(firstType, tomlArrayHash) {
|
||||
nest := tomlArrayType(eindirect(rv.Index(0)))
|
||||
if typeEqual(nest, tomlHash) || typeEqual(nest, tomlArrayHash) {
|
||||
encPanic(errArrayNoTable)
|
||||
}
|
||||
}
|
||||
return firstType
|
||||
}
|
||||
|
||||
func getOptions(keyName string) (string, map[string]struct{}) {
|
||||
opts := make(map[string]struct{})
|
||||
ss := strings.Split(keyName, ",")
|
||||
name := ss[0]
|
||||
if len(ss) > 1 {
|
||||
for _, opt := range ss {
|
||||
opts[opt] = struct{}{}
|
||||
}
|
||||
}
|
||||
|
||||
return name, opts
|
||||
}
|
||||
|
||||
func isZero(rv reflect.Value) bool {
|
||||
switch rv.Kind() {
|
||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
||||
if rv.Int() == 0 {
|
||||
return true
|
||||
}
|
||||
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
|
||||
if rv.Uint() == 0 {
|
||||
return true
|
||||
}
|
||||
case reflect.Float32, reflect.Float64:
|
||||
if rv.Float() == 0.0 {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
func isEmpty(rv reflect.Value) bool {
|
||||
switch rv.Kind() {
|
||||
case reflect.String:
|
||||
if len(strings.TrimSpace(rv.String())) == 0 {
|
||||
return true
|
||||
}
|
||||
case reflect.Array, reflect.Slice, reflect.Map:
|
||||
if rv.Len() == 0 {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
func (enc *Encoder) newline() {
|
||||
if enc.hasWritten {
|
||||
enc.wf("\n")
|
||||
}
|
||||
}
|
||||
|
||||
func (enc *Encoder) keyEqElement(key Key, val reflect.Value) {
|
||||
if len(key) == 0 {
|
||||
encPanic(errNoKey)
|
||||
}
|
||||
panicIfInvalidKey(key)
|
||||
enc.wf("%s%s = ", enc.indentStr(key), key.maybeQuoted(len(key)-1))
|
||||
enc.eElement(val)
|
||||
enc.newline()
|
||||
}
|
||||
|
||||
func (enc *Encoder) wf(format string, v ...interface{}) {
|
||||
if _, err := fmt.Fprintf(enc.w, format, v...); err != nil {
|
||||
encPanic(err)
|
||||
}
|
||||
enc.hasWritten = true
|
||||
}
|
||||
|
||||
func (enc *Encoder) indentStr(key Key) string {
|
||||
return strings.Repeat(enc.Indent, len(key)-1)
|
||||
}
|
||||
|
||||
func encPanic(err error) {
|
||||
panic(tomlEncodeError{err})
|
||||
}
|
||||
|
||||
func eindirect(v reflect.Value) reflect.Value {
|
||||
switch v.Kind() {
|
||||
case reflect.Ptr, reflect.Interface:
|
||||
return eindirect(v.Elem())
|
||||
default:
|
||||
return v
|
||||
}
|
||||
}
|
||||
|
||||
func isNil(rv reflect.Value) bool {
|
||||
switch rv.Kind() {
|
||||
case reflect.Interface, reflect.Map, reflect.Ptr, reflect.Slice:
|
||||
return rv.IsNil()
|
||||
default:
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
func panicIfInvalidKey(key Key) {
|
||||
for _, k := range key {
|
||||
if len(k) == 0 {
|
||||
encPanic(e("Key '%s' is not a valid table name. Key names "+
|
||||
"cannot be empty.", key.maybeQuotedAll()))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func isValidKeyName(s string) bool {
|
||||
return len(s) != 0
|
||||
}
|
||||
542
Godeps/_workspace/src/github.com/BurntSushi/toml/encode_test.go
generated
vendored
Normal file
542
Godeps/_workspace/src/github.com/BurntSushi/toml/encode_test.go
generated
vendored
Normal file
@@ -0,0 +1,542 @@
|
||||
package toml
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"log"
|
||||
"net"
|
||||
"testing"
|
||||
"time"
|
||||
)
|
||||
|
||||
func TestEncodeRoundTrip(t *testing.T) {
|
||||
type Config struct {
|
||||
Age int
|
||||
Cats []string
|
||||
Pi float64
|
||||
Perfection []int
|
||||
DOB time.Time
|
||||
Ipaddress net.IP
|
||||
}
|
||||
|
||||
var inputs = Config{
|
||||
13,
|
||||
[]string{"one", "two", "three"},
|
||||
3.145,
|
||||
[]int{11, 2, 3, 4},
|
||||
time.Now(),
|
||||
net.ParseIP("192.168.59.254"),
|
||||
}
|
||||
|
||||
var firstBuffer bytes.Buffer
|
||||
e := NewEncoder(&firstBuffer)
|
||||
err := e.Encode(inputs)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
var outputs Config
|
||||
if _, err := Decode(firstBuffer.String(), &outputs); err != nil {
|
||||
log.Printf("Could not decode:\n-----\n%s\n-----\n",
|
||||
firstBuffer.String())
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
// could test each value individually, but I'm lazy
|
||||
var secondBuffer bytes.Buffer
|
||||
e2 := NewEncoder(&secondBuffer)
|
||||
err = e2.Encode(outputs)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if firstBuffer.String() != secondBuffer.String() {
|
||||
t.Error(
|
||||
firstBuffer.String(),
|
||||
"\n\n is not identical to\n\n",
|
||||
secondBuffer.String())
|
||||
}
|
||||
}
|
||||
|
||||
// XXX(burntsushi)
|
||||
// I think these tests probably should be removed. They are good, but they
|
||||
// ought to be obsolete by toml-test.
|
||||
func TestEncode(t *testing.T) {
|
||||
type Embedded struct {
|
||||
Int int `toml:"_int"`
|
||||
}
|
||||
type NonStruct int
|
||||
|
||||
date := time.Date(2014, 5, 11, 20, 30, 40, 0, time.FixedZone("IST", 3600))
|
||||
dateStr := "2014-05-11T19:30:40Z"
|
||||
|
||||
tests := map[string]struct {
|
||||
input interface{}
|
||||
wantOutput string
|
||||
wantError error
|
||||
}{
|
||||
"bool field": {
|
||||
input: struct {
|
||||
BoolTrue bool
|
||||
BoolFalse bool
|
||||
}{true, false},
|
||||
wantOutput: "BoolTrue = true\nBoolFalse = false\n",
|
||||
},
|
||||
"int fields": {
|
||||
input: struct {
|
||||
Int int
|
||||
Int8 int8
|
||||
Int16 int16
|
||||
Int32 int32
|
||||
Int64 int64
|
||||
}{1, 2, 3, 4, 5},
|
||||
wantOutput: "Int = 1\nInt8 = 2\nInt16 = 3\nInt32 = 4\nInt64 = 5\n",
|
||||
},
|
||||
"uint fields": {
|
||||
input: struct {
|
||||
Uint uint
|
||||
Uint8 uint8
|
||||
Uint16 uint16
|
||||
Uint32 uint32
|
||||
Uint64 uint64
|
||||
}{1, 2, 3, 4, 5},
|
||||
wantOutput: "Uint = 1\nUint8 = 2\nUint16 = 3\nUint32 = 4" +
|
||||
"\nUint64 = 5\n",
|
||||
},
|
||||
"float fields": {
|
||||
input: struct {
|
||||
Float32 float32
|
||||
Float64 float64
|
||||
}{1.5, 2.5},
|
||||
wantOutput: "Float32 = 1.5\nFloat64 = 2.5\n",
|
||||
},
|
||||
"string field": {
|
||||
input: struct{ String string }{"foo"},
|
||||
wantOutput: "String = \"foo\"\n",
|
||||
},
|
||||
"string field and unexported field": {
|
||||
input: struct {
|
||||
String string
|
||||
unexported int
|
||||
}{"foo", 0},
|
||||
wantOutput: "String = \"foo\"\n",
|
||||
},
|
||||
"datetime field in UTC": {
|
||||
input: struct{ Date time.Time }{date},
|
||||
wantOutput: fmt.Sprintf("Date = %s\n", dateStr),
|
||||
},
|
||||
"datetime field as primitive": {
|
||||
// Using a map here to fail if isStructOrMap() returns true for
|
||||
// time.Time.
|
||||
input: map[string]interface{}{
|
||||
"Date": date,
|
||||
"Int": 1,
|
||||
},
|
||||
wantOutput: fmt.Sprintf("Date = %s\nInt = 1\n", dateStr),
|
||||
},
|
||||
"array fields": {
|
||||
input: struct {
|
||||
IntArray0 [0]int
|
||||
IntArray3 [3]int
|
||||
}{[0]int{}, [3]int{1, 2, 3}},
|
||||
wantOutput: "IntArray0 = []\nIntArray3 = [1, 2, 3]\n",
|
||||
},
|
||||
"slice fields": {
|
||||
input: struct{ IntSliceNil, IntSlice0, IntSlice3 []int }{
|
||||
nil, []int{}, []int{1, 2, 3},
|
||||
},
|
||||
wantOutput: "IntSlice0 = []\nIntSlice3 = [1, 2, 3]\n",
|
||||
},
|
||||
"datetime slices": {
|
||||
input: struct{ DatetimeSlice []time.Time }{
|
||||
[]time.Time{date, date},
|
||||
},
|
||||
wantOutput: fmt.Sprintf("DatetimeSlice = [%s, %s]\n",
|
||||
dateStr, dateStr),
|
||||
},
|
||||
"nested arrays and slices": {
|
||||
input: struct {
|
||||
SliceOfArrays [][2]int
|
||||
ArrayOfSlices [2][]int
|
||||
SliceOfArraysOfSlices [][2][]int
|
||||
ArrayOfSlicesOfArrays [2][][2]int
|
||||
SliceOfMixedArrays [][2]interface{}
|
||||
ArrayOfMixedSlices [2][]interface{}
|
||||
}{
|
||||
[][2]int{{1, 2}, {3, 4}},
|
||||
[2][]int{{1, 2}, {3, 4}},
|
||||
[][2][]int{
|
||||
{
|
||||
{1, 2}, {3, 4},
|
||||
},
|
||||
{
|
||||
{5, 6}, {7, 8},
|
||||
},
|
||||
},
|
||||
[2][][2]int{
|
||||
{
|
||||
{1, 2}, {3, 4},
|
||||
},
|
||||
{
|
||||
{5, 6}, {7, 8},
|
||||
},
|
||||
},
|
||||
[][2]interface{}{
|
||||
{1, 2}, {"a", "b"},
|
||||
},
|
||||
[2][]interface{}{
|
||||
{1, 2}, {"a", "b"},
|
||||
},
|
||||
},
|
||||
wantOutput: `SliceOfArrays = [[1, 2], [3, 4]]
|
||||
ArrayOfSlices = [[1, 2], [3, 4]]
|
||||
SliceOfArraysOfSlices = [[[1, 2], [3, 4]], [[5, 6], [7, 8]]]
|
||||
ArrayOfSlicesOfArrays = [[[1, 2], [3, 4]], [[5, 6], [7, 8]]]
|
||||
SliceOfMixedArrays = [[1, 2], ["a", "b"]]
|
||||
ArrayOfMixedSlices = [[1, 2], ["a", "b"]]
|
||||
`,
|
||||
},
|
||||
"empty slice": {
|
||||
input: struct{ Empty []interface{} }{[]interface{}{}},
|
||||
wantOutput: "Empty = []\n",
|
||||
},
|
||||
"(error) slice with element type mismatch (string and integer)": {
|
||||
input: struct{ Mixed []interface{} }{[]interface{}{1, "a"}},
|
||||
wantError: errArrayMixedElementTypes,
|
||||
},
|
||||
"(error) slice with element type mismatch (integer and float)": {
|
||||
input: struct{ Mixed []interface{} }{[]interface{}{1, 2.5}},
|
||||
wantError: errArrayMixedElementTypes,
|
||||
},
|
||||
"slice with elems of differing Go types, same TOML types": {
|
||||
input: struct {
|
||||
MixedInts []interface{}
|
||||
MixedFloats []interface{}
|
||||
}{
|
||||
[]interface{}{
|
||||
int(1), int8(2), int16(3), int32(4), int64(5),
|
||||
uint(1), uint8(2), uint16(3), uint32(4), uint64(5),
|
||||
},
|
||||
[]interface{}{float32(1.5), float64(2.5)},
|
||||
},
|
||||
wantOutput: "MixedInts = [1, 2, 3, 4, 5, 1, 2, 3, 4, 5]\n" +
|
||||
"MixedFloats = [1.5, 2.5]\n",
|
||||
},
|
||||
"(error) slice w/ element type mismatch (one is nested array)": {
|
||||
input: struct{ Mixed []interface{} }{
|
||||
[]interface{}{1, []interface{}{2}},
|
||||
},
|
||||
wantError: errArrayMixedElementTypes,
|
||||
},
|
||||
"(error) slice with 1 nil element": {
|
||||
input: struct{ NilElement1 []interface{} }{[]interface{}{nil}},
|
||||
wantError: errArrayNilElement,
|
||||
},
|
||||
"(error) slice with 1 nil element (and other non-nil elements)": {
|
||||
input: struct{ NilElement []interface{} }{
|
||||
[]interface{}{1, nil},
|
||||
},
|
||||
wantError: errArrayNilElement,
|
||||
},
|
||||
"simple map": {
|
||||
input: map[string]int{"a": 1, "b": 2},
|
||||
wantOutput: "a = 1\nb = 2\n",
|
||||
},
|
||||
"map with interface{} value type": {
|
||||
input: map[string]interface{}{"a": 1, "b": "c"},
|
||||
wantOutput: "a = 1\nb = \"c\"\n",
|
||||
},
|
||||
"map with interface{} value type, some of which are structs": {
|
||||
input: map[string]interface{}{
|
||||
"a": struct{ Int int }{2},
|
||||
"b": 1,
|
||||
},
|
||||
wantOutput: "b = 1\n\n[a]\n Int = 2\n",
|
||||
},
|
||||
"nested map": {
|
||||
input: map[string]map[string]int{
|
||||
"a": {"b": 1},
|
||||
"c": {"d": 2},
|
||||
},
|
||||
wantOutput: "[a]\n b = 1\n\n[c]\n d = 2\n",
|
||||
},
|
||||
"nested struct": {
|
||||
input: struct{ Struct struct{ Int int } }{
|
||||
struct{ Int int }{1},
|
||||
},
|
||||
wantOutput: "[Struct]\n Int = 1\n",
|
||||
},
|
||||
"nested struct and non-struct field": {
|
||||
input: struct {
|
||||
Struct struct{ Int int }
|
||||
Bool bool
|
||||
}{struct{ Int int }{1}, true},
|
||||
wantOutput: "Bool = true\n\n[Struct]\n Int = 1\n",
|
||||
},
|
||||
"2 nested structs": {
|
||||
input: struct{ Struct1, Struct2 struct{ Int int } }{
|
||||
struct{ Int int }{1}, struct{ Int int }{2},
|
||||
},
|
||||
wantOutput: "[Struct1]\n Int = 1\n\n[Struct2]\n Int = 2\n",
|
||||
},
|
||||
"deeply nested structs": {
|
||||
input: struct {
|
||||
Struct1, Struct2 struct{ Struct3 *struct{ Int int } }
|
||||
}{
|
||||
struct{ Struct3 *struct{ Int int } }{&struct{ Int int }{1}},
|
||||
struct{ Struct3 *struct{ Int int } }{nil},
|
||||
},
|
||||
wantOutput: "[Struct1]\n [Struct1.Struct3]\n Int = 1" +
|
||||
"\n\n[Struct2]\n",
|
||||
},
|
||||
"nested struct with nil struct elem": {
|
||||
input: struct {
|
||||
Struct struct{ Inner *struct{ Int int } }
|
||||
}{
|
||||
struct{ Inner *struct{ Int int } }{nil},
|
||||
},
|
||||
wantOutput: "[Struct]\n",
|
||||
},
|
||||
"nested struct with no fields": {
|
||||
input: struct {
|
||||
Struct struct{ Inner struct{} }
|
||||
}{
|
||||
struct{ Inner struct{} }{struct{}{}},
|
||||
},
|
||||
wantOutput: "[Struct]\n [Struct.Inner]\n",
|
||||
},
|
||||
"struct with tags": {
|
||||
input: struct {
|
||||
Struct struct {
|
||||
Int int `toml:"_int"`
|
||||
} `toml:"_struct"`
|
||||
Bool bool `toml:"_bool"`
|
||||
}{
|
||||
struct {
|
||||
Int int `toml:"_int"`
|
||||
}{1}, true,
|
||||
},
|
||||
wantOutput: "_bool = true\n\n[_struct]\n _int = 1\n",
|
||||
},
|
||||
"embedded struct": {
|
||||
input: struct{ Embedded }{Embedded{1}},
|
||||
wantOutput: "_int = 1\n",
|
||||
},
|
||||
"embedded *struct": {
|
||||
input: struct{ *Embedded }{&Embedded{1}},
|
||||
wantOutput: "_int = 1\n",
|
||||
},
|
||||
"nested embedded struct": {
|
||||
input: struct {
|
||||
Struct struct{ Embedded } `toml:"_struct"`
|
||||
}{struct{ Embedded }{Embedded{1}}},
|
||||
wantOutput: "[_struct]\n _int = 1\n",
|
||||
},
|
||||
"nested embedded *struct": {
|
||||
input: struct {
|
||||
Struct struct{ *Embedded } `toml:"_struct"`
|
||||
}{struct{ *Embedded }{&Embedded{1}}},
|
||||
wantOutput: "[_struct]\n _int = 1\n",
|
||||
},
|
||||
"array of tables": {
|
||||
input: struct {
|
||||
Structs []*struct{ Int int } `toml:"struct"`
|
||||
}{
|
||||
[]*struct{ Int int }{{1}, {3}},
|
||||
},
|
||||
wantOutput: "[[struct]]\n Int = 1\n\n[[struct]]\n Int = 3\n",
|
||||
},
|
||||
"array of tables order": {
|
||||
input: map[string]interface{}{
|
||||
"map": map[string]interface{}{
|
||||
"zero": 5,
|
||||
"arr": []map[string]int{
|
||||
map[string]int{
|
||||
"friend": 5,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
wantOutput: "[map]\n zero = 5\n\n [[map.arr]]\n friend = 5\n",
|
||||
},
|
||||
"(error) top-level slice": {
|
||||
input: []struct{ Int int }{{1}, {2}, {3}},
|
||||
wantError: errNoKey,
|
||||
},
|
||||
"(error) slice of slice": {
|
||||
input: struct {
|
||||
Slices [][]struct{ Int int }
|
||||
}{
|
||||
[][]struct{ Int int }{{{1}}, {{2}}, {{3}}},
|
||||
},
|
||||
wantError: errArrayNoTable,
|
||||
},
|
||||
"(error) map no string key": {
|
||||
input: map[int]string{1: ""},
|
||||
wantError: errNonString,
|
||||
},
|
||||
"(error) anonymous non-struct": {
|
||||
input: struct{ NonStruct }{5},
|
||||
wantError: errAnonNonStruct,
|
||||
},
|
||||
"(error) empty key name": {
|
||||
input: map[string]int{"": 1},
|
||||
wantError: errAnything,
|
||||
},
|
||||
"(error) empty map name": {
|
||||
input: map[string]interface{}{
|
||||
"": map[string]int{"v": 1},
|
||||
},
|
||||
wantError: errAnything,
|
||||
},
|
||||
}
|
||||
for label, test := range tests {
|
||||
encodeExpected(t, label, test.input, test.wantOutput, test.wantError)
|
||||
}
|
||||
}
|
||||
|
||||
func TestEncodeNestedTableArrays(t *testing.T) {
|
||||
type song struct {
|
||||
Name string `toml:"name"`
|
||||
}
|
||||
type album struct {
|
||||
Name string `toml:"name"`
|
||||
Songs []song `toml:"songs"`
|
||||
}
|
||||
type springsteen struct {
|
||||
Albums []album `toml:"albums"`
|
||||
}
|
||||
value := springsteen{
|
||||
[]album{
|
||||
{"Born to Run",
|
||||
[]song{{"Jungleland"}, {"Meeting Across the River"}}},
|
||||
{"Born in the USA",
|
||||
[]song{{"Glory Days"}, {"Dancing in the Dark"}}},
|
||||
},
|
||||
}
|
||||
expected := `[[albums]]
|
||||
name = "Born to Run"
|
||||
|
||||
[[albums.songs]]
|
||||
name = "Jungleland"
|
||||
|
||||
[[albums.songs]]
|
||||
name = "Meeting Across the River"
|
||||
|
||||
[[albums]]
|
||||
name = "Born in the USA"
|
||||
|
||||
[[albums.songs]]
|
||||
name = "Glory Days"
|
||||
|
||||
[[albums.songs]]
|
||||
name = "Dancing in the Dark"
|
||||
`
|
||||
encodeExpected(t, "nested table arrays", value, expected, nil)
|
||||
}
|
||||
|
||||
func TestEncodeArrayHashWithNormalHashOrder(t *testing.T) {
|
||||
type Alpha struct {
|
||||
V int
|
||||
}
|
||||
type Beta struct {
|
||||
V int
|
||||
}
|
||||
type Conf struct {
|
||||
V int
|
||||
A Alpha
|
||||
B []Beta
|
||||
}
|
||||
|
||||
val := Conf{
|
||||
V: 1,
|
||||
A: Alpha{2},
|
||||
B: []Beta{{3}},
|
||||
}
|
||||
expected := "V = 1\n\n[A]\n V = 2\n\n[[B]]\n V = 3\n"
|
||||
encodeExpected(t, "array hash with normal hash order", val, expected, nil)
|
||||
}
|
||||
|
||||
func TestEncodeWithOmitEmpty(t *testing.T) {
|
||||
type simple struct {
|
||||
User string `toml:"user"`
|
||||
Pass string `toml:"password,omitempty"`
|
||||
}
|
||||
|
||||
value := simple{"Testing", ""}
|
||||
expected := fmt.Sprintf("user = %q\n", value.User)
|
||||
encodeExpected(t, "simple with omitempty, is empty", value, expected, nil)
|
||||
value.Pass = "some password"
|
||||
expected = fmt.Sprintf("user = %q\npassword = %q\n", value.User, value.Pass)
|
||||
encodeExpected(t, "simple with omitempty, not empty", value, expected, nil)
|
||||
}
|
||||
|
||||
func TestEncodeWithOmitZero(t *testing.T) {
|
||||
type simple struct {
|
||||
Number int `toml:"number,omitzero"`
|
||||
Real float64 `toml:"real,omitzero"`
|
||||
Unsigned uint `toml:"unsigned,omitzero"`
|
||||
}
|
||||
|
||||
value := simple{0, 0.0, uint(0)}
|
||||
expected := ""
|
||||
|
||||
encodeExpected(t, "simple with omitzero, all zero", value, expected, nil)
|
||||
|
||||
value.Number = 10
|
||||
value.Real = 20
|
||||
value.Unsigned = 5
|
||||
expected = `number = 10
|
||||
real = 20.0
|
||||
unsigned = 5
|
||||
`
|
||||
encodeExpected(t, "simple with omitzero, non-zero", value, expected, nil)
|
||||
}
|
||||
|
||||
func encodeExpected(
|
||||
t *testing.T, label string, val interface{}, wantStr string, wantErr error,
|
||||
) {
|
||||
var buf bytes.Buffer
|
||||
enc := NewEncoder(&buf)
|
||||
err := enc.Encode(val)
|
||||
if err != wantErr {
|
||||
if wantErr != nil {
|
||||
if wantErr == errAnything && err != nil {
|
||||
return
|
||||
}
|
||||
t.Errorf("%s: want Encode error %v, got %v", label, wantErr, err)
|
||||
} else {
|
||||
t.Errorf("%s: Encode failed: %s", label, err)
|
||||
}
|
||||
}
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
if got := buf.String(); wantStr != got {
|
||||
t.Errorf("%s: want\n-----\n%q\n-----\nbut got\n-----\n%q\n-----\n",
|
||||
label, wantStr, got)
|
||||
}
|
||||
}
|
||||
|
||||
func ExampleEncoder_Encode() {
|
||||
date, _ := time.Parse(time.RFC822, "14 Mar 10 18:00 UTC")
|
||||
var config = map[string]interface{}{
|
||||
"date": date,
|
||||
"counts": []int{1, 1, 2, 3, 5, 8},
|
||||
"hash": map[string]string{
|
||||
"key1": "val1",
|
||||
"key2": "val2",
|
||||
},
|
||||
}
|
||||
buf := new(bytes.Buffer)
|
||||
if err := NewEncoder(buf).Encode(config); err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
fmt.Println(buf.String())
|
||||
|
||||
// Output:
|
||||
// counts = [1, 1, 2, 3, 5, 8]
|
||||
// date = 2010-03-14T18:00:00Z
|
||||
//
|
||||
// [hash]
|
||||
// key1 = "val1"
|
||||
// key2 = "val2"
|
||||
}
|
||||
19
Godeps/_workspace/src/github.com/BurntSushi/toml/encoding_types.go
generated
vendored
Normal file
19
Godeps/_workspace/src/github.com/BurntSushi/toml/encoding_types.go
generated
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
// +build go1.2
|
||||
|
||||
package toml
|
||||
|
||||
// In order to support Go 1.1, we define our own TextMarshaler and
|
||||
// TextUnmarshaler types. For Go 1.2+, we just alias them with the
|
||||
// standard library interfaces.
|
||||
|
||||
import (
|
||||
"encoding"
|
||||
)
|
||||
|
||||
// TextMarshaler is a synonym for encoding.TextMarshaler. It is defined here
|
||||
// so that Go 1.1 can be supported.
|
||||
type TextMarshaler encoding.TextMarshaler
|
||||
|
||||
// TextUnmarshaler is a synonym for encoding.TextUnmarshaler. It is defined
|
||||
// here so that Go 1.1 can be supported.
|
||||
type TextUnmarshaler encoding.TextUnmarshaler
|
||||
18
Godeps/_workspace/src/github.com/BurntSushi/toml/encoding_types_1.1.go
generated
vendored
Normal file
18
Godeps/_workspace/src/github.com/BurntSushi/toml/encoding_types_1.1.go
generated
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
// +build !go1.2
|
||||
|
||||
package toml
|
||||
|
||||
// These interfaces were introduced in Go 1.2, so we add them manually when
|
||||
// compiling for Go 1.1.
|
||||
|
||||
// TextMarshaler is a synonym for encoding.TextMarshaler. It is defined here
|
||||
// so that Go 1.1 can be supported.
|
||||
type TextMarshaler interface {
|
||||
MarshalText() (text []byte, err error)
|
||||
}
|
||||
|
||||
// TextUnmarshaler is a synonym for encoding.TextUnmarshaler. It is defined
|
||||
// here so that Go 1.1 can be supported.
|
||||
type TextUnmarshaler interface {
|
||||
UnmarshalText(text []byte) error
|
||||
}
|
||||
874
Godeps/_workspace/src/github.com/BurntSushi/toml/lex.go
generated
vendored
Normal file
874
Godeps/_workspace/src/github.com/BurntSushi/toml/lex.go
generated
vendored
Normal file
@@ -0,0 +1,874 @@
|
||||
package toml
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
type itemType int
|
||||
|
||||
const (
|
||||
itemError itemType = iota
|
||||
itemNIL // used in the parser to indicate no type
|
||||
itemEOF
|
||||
itemText
|
||||
itemString
|
||||
itemRawString
|
||||
itemMultilineString
|
||||
itemRawMultilineString
|
||||
itemBool
|
||||
itemInteger
|
||||
itemFloat
|
||||
itemDatetime
|
||||
itemArray // the start of an array
|
||||
itemArrayEnd
|
||||
itemTableStart
|
||||
itemTableEnd
|
||||
itemArrayTableStart
|
||||
itemArrayTableEnd
|
||||
itemKeyStart
|
||||
itemCommentStart
|
||||
)
|
||||
|
||||
const (
|
||||
eof = 0
|
||||
tableStart = '['
|
||||
tableEnd = ']'
|
||||
arrayTableStart = '['
|
||||
arrayTableEnd = ']'
|
||||
tableSep = '.'
|
||||
keySep = '='
|
||||
arrayStart = '['
|
||||
arrayEnd = ']'
|
||||
arrayValTerm = ','
|
||||
commentStart = '#'
|
||||
stringStart = '"'
|
||||
stringEnd = '"'
|
||||
rawStringStart = '\''
|
||||
rawStringEnd = '\''
|
||||
)
|
||||
|
||||
type stateFn func(lx *lexer) stateFn
|
||||
|
||||
type lexer struct {
|
||||
input string
|
||||
start int
|
||||
pos int
|
||||
width int
|
||||
line int
|
||||
state stateFn
|
||||
items chan item
|
||||
|
||||
// A stack of state functions used to maintain context.
|
||||
// The idea is to reuse parts of the state machine in various places.
|
||||
// For example, values can appear at the top level or within arbitrarily
|
||||
// nested arrays. The last state on the stack is used after a value has
|
||||
// been lexed. Similarly for comments.
|
||||
stack []stateFn
|
||||
}
|
||||
|
||||
type item struct {
|
||||
typ itemType
|
||||
val string
|
||||
line int
|
||||
}
|
||||
|
||||
func (lx *lexer) nextItem() item {
|
||||
for {
|
||||
select {
|
||||
case item := <-lx.items:
|
||||
return item
|
||||
default:
|
||||
lx.state = lx.state(lx)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func lex(input string) *lexer {
|
||||
lx := &lexer{
|
||||
input: input + "\n",
|
||||
state: lexTop,
|
||||
line: 1,
|
||||
items: make(chan item, 10),
|
||||
stack: make([]stateFn, 0, 10),
|
||||
}
|
||||
return lx
|
||||
}
|
||||
|
||||
func (lx *lexer) push(state stateFn) {
|
||||
lx.stack = append(lx.stack, state)
|
||||
}
|
||||
|
||||
func (lx *lexer) pop() stateFn {
|
||||
if len(lx.stack) == 0 {
|
||||
return lx.errorf("BUG in lexer: no states to pop.")
|
||||
}
|
||||
last := lx.stack[len(lx.stack)-1]
|
||||
lx.stack = lx.stack[0 : len(lx.stack)-1]
|
||||
return last
|
||||
}
|
||||
|
||||
func (lx *lexer) current() string {
|
||||
return lx.input[lx.start:lx.pos]
|
||||
}
|
||||
|
||||
func (lx *lexer) emit(typ itemType) {
|
||||
lx.items <- item{typ, lx.current(), lx.line}
|
||||
lx.start = lx.pos
|
||||
}
|
||||
|
||||
func (lx *lexer) emitTrim(typ itemType) {
|
||||
lx.items <- item{typ, strings.TrimSpace(lx.current()), lx.line}
|
||||
lx.start = lx.pos
|
||||
}
|
||||
|
||||
func (lx *lexer) next() (r rune) {
|
||||
if lx.pos >= len(lx.input) {
|
||||
lx.width = 0
|
||||
return eof
|
||||
}
|
||||
|
||||
if lx.input[lx.pos] == '\n' {
|
||||
lx.line++
|
||||
}
|
||||
r, lx.width = utf8.DecodeRuneInString(lx.input[lx.pos:])
|
||||
lx.pos += lx.width
|
||||
return r
|
||||
}
|
||||
|
||||
// ignore skips over the pending input before this point.
|
||||
func (lx *lexer) ignore() {
|
||||
lx.start = lx.pos
|
||||
}
|
||||
|
||||
// backup steps back one rune. Can be called only once per call of next.
|
||||
func (lx *lexer) backup() {
|
||||
lx.pos -= lx.width
|
||||
if lx.pos < len(lx.input) && lx.input[lx.pos] == '\n' {
|
||||
lx.line--
|
||||
}
|
||||
}
|
||||
|
||||
// accept consumes the next rune if it's equal to `valid`.
|
||||
func (lx *lexer) accept(valid rune) bool {
|
||||
if lx.next() == valid {
|
||||
return true
|
||||
}
|
||||
lx.backup()
|
||||
return false
|
||||
}
|
||||
|
||||
// peek returns but does not consume the next rune in the input.
|
||||
func (lx *lexer) peek() rune {
|
||||
r := lx.next()
|
||||
lx.backup()
|
||||
return r
|
||||
}
|
||||
|
||||
// errorf stops all lexing by emitting an error and returning `nil`.
|
||||
// Note that any value that is a character is escaped if it's a special
|
||||
// character (new lines, tabs, etc.).
|
||||
func (lx *lexer) errorf(format string, values ...interface{}) stateFn {
|
||||
lx.items <- item{
|
||||
itemError,
|
||||
fmt.Sprintf(format, values...),
|
||||
lx.line,
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// lexTop consumes elements at the top level of TOML data.
|
||||
func lexTop(lx *lexer) stateFn {
|
||||
r := lx.next()
|
||||
if isWhitespace(r) || isNL(r) {
|
||||
return lexSkip(lx, lexTop)
|
||||
}
|
||||
|
||||
switch r {
|
||||
case commentStart:
|
||||
lx.push(lexTop)
|
||||
return lexCommentStart
|
||||
case tableStart:
|
||||
return lexTableStart
|
||||
case eof:
|
||||
if lx.pos > lx.start {
|
||||
return lx.errorf("Unexpected EOF.")
|
||||
}
|
||||
lx.emit(itemEOF)
|
||||
return nil
|
||||
}
|
||||
|
||||
// At this point, the only valid item can be a key, so we back up
|
||||
// and let the key lexer do the rest.
|
||||
lx.backup()
|
||||
lx.push(lexTopEnd)
|
||||
return lexKeyStart
|
||||
}
|
||||
|
||||
// lexTopEnd is entered whenever a top-level item has been consumed. (A value
|
||||
// or a table.) It must see only whitespace, and will turn back to lexTop
|
||||
// upon a new line. If it sees EOF, it will quit the lexer successfully.
|
||||
func lexTopEnd(lx *lexer) stateFn {
|
||||
r := lx.next()
|
||||
switch {
|
||||
case r == commentStart:
|
||||
// a comment will read to a new line for us.
|
||||
lx.push(lexTop)
|
||||
return lexCommentStart
|
||||
case isWhitespace(r):
|
||||
return lexTopEnd
|
||||
case isNL(r):
|
||||
lx.ignore()
|
||||
return lexTop
|
||||
case r == eof:
|
||||
lx.ignore()
|
||||
return lexTop
|
||||
}
|
||||
return lx.errorf("Expected a top-level item to end with a new line, "+
|
||||
"comment or EOF, but got %q instead.", r)
|
||||
}
|
||||
|
||||
// lexTable lexes the beginning of a table. Namely, it makes sure that
|
||||
// it starts with a character other than '.' and ']'.
|
||||
// It assumes that '[' has already been consumed.
|
||||
// It also handles the case that this is an item in an array of tables.
|
||||
// e.g., '[[name]]'.
|
||||
func lexTableStart(lx *lexer) stateFn {
|
||||
if lx.peek() == arrayTableStart {
|
||||
lx.next()
|
||||
lx.emit(itemArrayTableStart)
|
||||
lx.push(lexArrayTableEnd)
|
||||
} else {
|
||||
lx.emit(itemTableStart)
|
||||
lx.push(lexTableEnd)
|
||||
}
|
||||
return lexTableNameStart
|
||||
}
|
||||
|
||||
func lexTableEnd(lx *lexer) stateFn {
|
||||
lx.emit(itemTableEnd)
|
||||
return lexTopEnd
|
||||
}
|
||||
|
||||
func lexArrayTableEnd(lx *lexer) stateFn {
|
||||
if r := lx.next(); r != arrayTableEnd {
|
||||
return lx.errorf("Expected end of table array name delimiter %q, "+
|
||||
"but got %q instead.", arrayTableEnd, r)
|
||||
}
|
||||
lx.emit(itemArrayTableEnd)
|
||||
return lexTopEnd
|
||||
}
|
||||
|
||||
func lexTableNameStart(lx *lexer) stateFn {
|
||||
switch r := lx.peek(); {
|
||||
case r == tableEnd || r == eof:
|
||||
return lx.errorf("Unexpected end of table name. (Table names cannot " +
|
||||
"be empty.)")
|
||||
case r == tableSep:
|
||||
return lx.errorf("Unexpected table separator. (Table names cannot " +
|
||||
"be empty.)")
|
||||
case r == stringStart || r == rawStringStart:
|
||||
lx.ignore()
|
||||
lx.push(lexTableNameEnd)
|
||||
return lexValue // reuse string lexing
|
||||
case isWhitespace(r):
|
||||
return lexTableNameStart
|
||||
default:
|
||||
return lexBareTableName
|
||||
}
|
||||
}
|
||||
|
||||
// lexTableName lexes the name of a table. It assumes that at least one
|
||||
// valid character for the table has already been read.
|
||||
func lexBareTableName(lx *lexer) stateFn {
|
||||
switch r := lx.next(); {
|
||||
case isBareKeyChar(r):
|
||||
return lexBareTableName
|
||||
case r == tableSep || r == tableEnd:
|
||||
lx.backup()
|
||||
lx.emitTrim(itemText)
|
||||
return lexTableNameEnd
|
||||
default:
|
||||
return lx.errorf("Bare keys cannot contain %q.", r)
|
||||
}
|
||||
}
|
||||
|
||||
// lexTableNameEnd reads the end of a piece of a table name, optionally
|
||||
// consuming whitespace.
|
||||
func lexTableNameEnd(lx *lexer) stateFn {
|
||||
switch r := lx.next(); {
|
||||
case isWhitespace(r):
|
||||
return lexTableNameEnd
|
||||
case r == tableSep:
|
||||
lx.ignore()
|
||||
return lexTableNameStart
|
||||
case r == tableEnd:
|
||||
return lx.pop()
|
||||
default:
|
||||
return lx.errorf("Expected '.' or ']' to end table name, but got %q "+
|
||||
"instead.", r)
|
||||
}
|
||||
}
|
||||
|
||||
// lexKeyStart consumes a key name up until the first non-whitespace character.
|
||||
// lexKeyStart will ignore whitespace.
|
||||
func lexKeyStart(lx *lexer) stateFn {
|
||||
r := lx.peek()
|
||||
switch {
|
||||
case r == keySep:
|
||||
return lx.errorf("Unexpected key separator %q.", keySep)
|
||||
case isWhitespace(r) || isNL(r):
|
||||
lx.next()
|
||||
return lexSkip(lx, lexKeyStart)
|
||||
case r == stringStart || r == rawStringStart:
|
||||
lx.ignore()
|
||||
lx.emit(itemKeyStart)
|
||||
lx.push(lexKeyEnd)
|
||||
return lexValue // reuse string lexing
|
||||
default:
|
||||
lx.ignore()
|
||||
lx.emit(itemKeyStart)
|
||||
return lexBareKey
|
||||
}
|
||||
}
|
||||
|
||||
// lexBareKey consumes the text of a bare key. Assumes that the first character
|
||||
// (which is not whitespace) has not yet been consumed.
|
||||
func lexBareKey(lx *lexer) stateFn {
|
||||
switch r := lx.next(); {
|
||||
case isBareKeyChar(r):
|
||||
return lexBareKey
|
||||
case isWhitespace(r):
|
||||
lx.emitTrim(itemText)
|
||||
return lexKeyEnd
|
||||
case r == keySep:
|
||||
lx.backup()
|
||||
lx.emitTrim(itemText)
|
||||
return lexKeyEnd
|
||||
default:
|
||||
return lx.errorf("Bare keys cannot contain %q.", r)
|
||||
}
|
||||
}
|
||||
|
||||
// lexKeyEnd consumes the end of a key and trims whitespace (up to the key
|
||||
// separator).
|
||||
func lexKeyEnd(lx *lexer) stateFn {
|
||||
switch r := lx.next(); {
|
||||
case r == keySep:
|
||||
return lexSkip(lx, lexValue)
|
||||
case isWhitespace(r):
|
||||
return lexSkip(lx, lexKeyEnd)
|
||||
default:
|
||||
return lx.errorf("Expected key separator %q, but got %q instead.",
|
||||
keySep, r)
|
||||
}
|
||||
}
|
||||
|
||||
// lexValue starts the consumption of a value anywhere a value is expected.
|
||||
// lexValue will ignore whitespace.
|
||||
// After a value is lexed, the last state on the next is popped and returned.
|
||||
func lexValue(lx *lexer) stateFn {
|
||||
// We allow whitespace to precede a value, but NOT new lines.
|
||||
// In array syntax, the array states are responsible for ignoring new
|
||||
// lines.
|
||||
r := lx.next()
|
||||
if isWhitespace(r) {
|
||||
return lexSkip(lx, lexValue)
|
||||
}
|
||||
|
||||
switch {
|
||||
case r == arrayStart:
|
||||
lx.ignore()
|
||||
lx.emit(itemArray)
|
||||
return lexArrayValue
|
||||
case r == stringStart:
|
||||
if lx.accept(stringStart) {
|
||||
if lx.accept(stringStart) {
|
||||
lx.ignore() // Ignore """
|
||||
return lexMultilineString
|
||||
}
|
||||
lx.backup()
|
||||
}
|
||||
lx.ignore() // ignore the '"'
|
||||
return lexString
|
||||
case r == rawStringStart:
|
||||
if lx.accept(rawStringStart) {
|
||||
if lx.accept(rawStringStart) {
|
||||
lx.ignore() // Ignore """
|
||||
return lexMultilineRawString
|
||||
}
|
||||
lx.backup()
|
||||
}
|
||||
lx.ignore() // ignore the "'"
|
||||
return lexRawString
|
||||
case r == 't':
|
||||
return lexTrue
|
||||
case r == 'f':
|
||||
return lexFalse
|
||||
case r == '-':
|
||||
return lexNumberStart
|
||||
case isDigit(r):
|
||||
lx.backup() // avoid an extra state and use the same as above
|
||||
return lexNumberOrDateStart
|
||||
case r == '.': // special error case, be kind to users
|
||||
return lx.errorf("Floats must start with a digit, not '.'.")
|
||||
}
|
||||
return lx.errorf("Expected value but found %q instead.", r)
|
||||
}
|
||||
|
||||
// lexArrayValue consumes one value in an array. It assumes that '[' or ','
|
||||
// have already been consumed. All whitespace and new lines are ignored.
|
||||
func lexArrayValue(lx *lexer) stateFn {
|
||||
r := lx.next()
|
||||
switch {
|
||||
case isWhitespace(r) || isNL(r):
|
||||
return lexSkip(lx, lexArrayValue)
|
||||
case r == commentStart:
|
||||
lx.push(lexArrayValue)
|
||||
return lexCommentStart
|
||||
case r == arrayValTerm:
|
||||
return lx.errorf("Unexpected array value terminator %q.",
|
||||
arrayValTerm)
|
||||
case r == arrayEnd:
|
||||
return lexArrayEnd
|
||||
}
|
||||
|
||||
lx.backup()
|
||||
lx.push(lexArrayValueEnd)
|
||||
return lexValue
|
||||
}
|
||||
|
||||
// lexArrayValueEnd consumes the cruft between values of an array. Namely,
|
||||
// it ignores whitespace and expects either a ',' or a ']'.
|
||||
func lexArrayValueEnd(lx *lexer) stateFn {
|
||||
r := lx.next()
|
||||
switch {
|
||||
case isWhitespace(r) || isNL(r):
|
||||
return lexSkip(lx, lexArrayValueEnd)
|
||||
case r == commentStart:
|
||||
lx.push(lexArrayValueEnd)
|
||||
return lexCommentStart
|
||||
case r == arrayValTerm:
|
||||
lx.ignore()
|
||||
return lexArrayValue // move on to the next value
|
||||
case r == arrayEnd:
|
||||
return lexArrayEnd
|
||||
}
|
||||
return lx.errorf("Expected an array value terminator %q or an array "+
|
||||
"terminator %q, but got %q instead.", arrayValTerm, arrayEnd, r)
|
||||
}
|
||||
|
||||
// lexArrayEnd finishes the lexing of an array. It assumes that a ']' has
|
||||
// just been consumed.
|
||||
func lexArrayEnd(lx *lexer) stateFn {
|
||||
lx.ignore()
|
||||
lx.emit(itemArrayEnd)
|
||||
return lx.pop()
|
||||
}
|
||||
|
||||
// lexString consumes the inner contents of a string. It assumes that the
|
||||
// beginning '"' has already been consumed and ignored.
|
||||
func lexString(lx *lexer) stateFn {
|
||||
r := lx.next()
|
||||
switch {
|
||||
case isNL(r):
|
||||
return lx.errorf("Strings cannot contain new lines.")
|
||||
case r == '\\':
|
||||
lx.push(lexString)
|
||||
return lexStringEscape
|
||||
case r == stringEnd:
|
||||
lx.backup()
|
||||
lx.emit(itemString)
|
||||
lx.next()
|
||||
lx.ignore()
|
||||
return lx.pop()
|
||||
}
|
||||
return lexString
|
||||
}
|
||||
|
||||
// lexMultilineString consumes the inner contents of a string. It assumes that
|
||||
// the beginning '"""' has already been consumed and ignored.
|
||||
func lexMultilineString(lx *lexer) stateFn {
|
||||
r := lx.next()
|
||||
switch {
|
||||
case r == '\\':
|
||||
return lexMultilineStringEscape
|
||||
case r == stringEnd:
|
||||
if lx.accept(stringEnd) {
|
||||
if lx.accept(stringEnd) {
|
||||
lx.backup()
|
||||
lx.backup()
|
||||
lx.backup()
|
||||
lx.emit(itemMultilineString)
|
||||
lx.next()
|
||||
lx.next()
|
||||
lx.next()
|
||||
lx.ignore()
|
||||
return lx.pop()
|
||||
}
|
||||
lx.backup()
|
||||
}
|
||||
}
|
||||
return lexMultilineString
|
||||
}
|
||||
|
||||
// lexRawString consumes a raw string. Nothing can be escaped in such a string.
|
||||
// It assumes that the beginning "'" has already been consumed and ignored.
|
||||
func lexRawString(lx *lexer) stateFn {
|
||||
r := lx.next()
|
||||
switch {
|
||||
case isNL(r):
|
||||
return lx.errorf("Strings cannot contain new lines.")
|
||||
case r == rawStringEnd:
|
||||
lx.backup()
|
||||
lx.emit(itemRawString)
|
||||
lx.next()
|
||||
lx.ignore()
|
||||
return lx.pop()
|
||||
}
|
||||
return lexRawString
|
||||
}
|
||||
|
||||
// lexMultilineRawString consumes a raw string. Nothing can be escaped in such
|
||||
// a string. It assumes that the beginning "'" has already been consumed and
|
||||
// ignored.
|
||||
func lexMultilineRawString(lx *lexer) stateFn {
|
||||
r := lx.next()
|
||||
switch {
|
||||
case r == rawStringEnd:
|
||||
if lx.accept(rawStringEnd) {
|
||||
if lx.accept(rawStringEnd) {
|
||||
lx.backup()
|
||||
lx.backup()
|
||||
lx.backup()
|
||||
lx.emit(itemRawMultilineString)
|
||||
lx.next()
|
||||
lx.next()
|
||||
lx.next()
|
||||
lx.ignore()
|
||||
return lx.pop()
|
||||
}
|
||||
lx.backup()
|
||||
}
|
||||
}
|
||||
return lexMultilineRawString
|
||||
}
|
||||
|
||||
// lexMultilineStringEscape consumes an escaped character. It assumes that the
|
||||
// preceding '\\' has already been consumed.
|
||||
func lexMultilineStringEscape(lx *lexer) stateFn {
|
||||
// Handle the special case first:
|
||||
if isNL(lx.next()) {
|
||||
lx.next()
|
||||
return lexMultilineString
|
||||
} else {
|
||||
lx.backup()
|
||||
lx.push(lexMultilineString)
|
||||
return lexStringEscape(lx)
|
||||
}
|
||||
}
|
||||
|
||||
func lexStringEscape(lx *lexer) stateFn {
|
||||
r := lx.next()
|
||||
switch r {
|
||||
case 'b':
|
||||
fallthrough
|
||||
case 't':
|
||||
fallthrough
|
||||
case 'n':
|
||||
fallthrough
|
||||
case 'f':
|
||||
fallthrough
|
||||
case 'r':
|
||||
fallthrough
|
||||
case '"':
|
||||
fallthrough
|
||||
case '\\':
|
||||
return lx.pop()
|
||||
case 'u':
|
||||
return lexShortUnicodeEscape
|
||||
case 'U':
|
||||
return lexLongUnicodeEscape
|
||||
}
|
||||
return lx.errorf("Invalid escape character %q. Only the following "+
|
||||
"escape characters are allowed: "+
|
||||
"\\b, \\t, \\n, \\f, \\r, \\\", \\/, \\\\, "+
|
||||
"\\uXXXX and \\UXXXXXXXX.", r)
|
||||
}
|
||||
|
||||
func lexShortUnicodeEscape(lx *lexer) stateFn {
|
||||
var r rune
|
||||
for i := 0; i < 4; i++ {
|
||||
r = lx.next()
|
||||
if !isHexadecimal(r) {
|
||||
return lx.errorf("Expected four hexadecimal digits after '\\u', "+
|
||||
"but got '%s' instead.", lx.current())
|
||||
}
|
||||
}
|
||||
return lx.pop()
|
||||
}
|
||||
|
||||
func lexLongUnicodeEscape(lx *lexer) stateFn {
|
||||
var r rune
|
||||
for i := 0; i < 8; i++ {
|
||||
r = lx.next()
|
||||
if !isHexadecimal(r) {
|
||||
return lx.errorf("Expected eight hexadecimal digits after '\\U', "+
|
||||
"but got '%s' instead.", lx.current())
|
||||
}
|
||||
}
|
||||
return lx.pop()
|
||||
}
|
||||
|
||||
// lexNumberOrDateStart consumes either a (positive) integer, float or
|
||||
// datetime. It assumes that NO negative sign has been consumed.
|
||||
func lexNumberOrDateStart(lx *lexer) stateFn {
|
||||
r := lx.next()
|
||||
if !isDigit(r) {
|
||||
if r == '.' {
|
||||
return lx.errorf("Floats must start with a digit, not '.'.")
|
||||
} else {
|
||||
return lx.errorf("Expected a digit but got %q.", r)
|
||||
}
|
||||
}
|
||||
return lexNumberOrDate
|
||||
}
|
||||
|
||||
// lexNumberOrDate consumes either a (positive) integer, float or datetime.
|
||||
func lexNumberOrDate(lx *lexer) stateFn {
|
||||
r := lx.next()
|
||||
switch {
|
||||
case r == '-':
|
||||
if lx.pos-lx.start != 5 {
|
||||
return lx.errorf("All ISO8601 dates must be in full Zulu form.")
|
||||
}
|
||||
return lexDateAfterYear
|
||||
case isDigit(r):
|
||||
return lexNumberOrDate
|
||||
case r == '.':
|
||||
return lexFloatStart
|
||||
}
|
||||
|
||||
lx.backup()
|
||||
lx.emit(itemInteger)
|
||||
return lx.pop()
|
||||
}
|
||||
|
||||
// lexDateAfterYear consumes a full Zulu Datetime in ISO8601 format.
|
||||
// It assumes that "YYYY-" has already been consumed.
|
||||
func lexDateAfterYear(lx *lexer) stateFn {
|
||||
formats := []rune{
|
||||
// digits are '0'.
|
||||
// everything else is direct equality.
|
||||
'0', '0', '-', '0', '0',
|
||||
'T',
|
||||
'0', '0', ':', '0', '0', ':', '0', '0',
|
||||
'Z',
|
||||
}
|
||||
for _, f := range formats {
|
||||
r := lx.next()
|
||||
if f == '0' {
|
||||
if !isDigit(r) {
|
||||
return lx.errorf("Expected digit in ISO8601 datetime, "+
|
||||
"but found %q instead.", r)
|
||||
}
|
||||
} else if f != r {
|
||||
return lx.errorf("Expected %q in ISO8601 datetime, "+
|
||||
"but found %q instead.", f, r)
|
||||
}
|
||||
}
|
||||
lx.emit(itemDatetime)
|
||||
return lx.pop()
|
||||
}
|
||||
|
||||
// lexNumberStart consumes either an integer or a float. It assumes that
|
||||
// a negative sign has already been read, but that *no* digits have been
|
||||
// consumed. lexNumberStart will move to the appropriate integer or float
|
||||
// states.
|
||||
func lexNumberStart(lx *lexer) stateFn {
|
||||
// we MUST see a digit. Even floats have to start with a digit.
|
||||
r := lx.next()
|
||||
if !isDigit(r) {
|
||||
if r == '.' {
|
||||
return lx.errorf("Floats must start with a digit, not '.'.")
|
||||
} else {
|
||||
return lx.errorf("Expected a digit but got %q.", r)
|
||||
}
|
||||
}
|
||||
return lexNumber
|
||||
}
|
||||
|
||||
// lexNumber consumes an integer or a float after seeing the first digit.
|
||||
func lexNumber(lx *lexer) stateFn {
|
||||
r := lx.next()
|
||||
switch {
|
||||
case isDigit(r):
|
||||
return lexNumber
|
||||
case r == '.':
|
||||
return lexFloatStart
|
||||
}
|
||||
|
||||
lx.backup()
|
||||
lx.emit(itemInteger)
|
||||
return lx.pop()
|
||||
}
|
||||
|
||||
// lexFloatStart starts the consumption of digits of a float after a '.'.
|
||||
// Namely, at least one digit is required.
|
||||
func lexFloatStart(lx *lexer) stateFn {
|
||||
r := lx.next()
|
||||
if !isDigit(r) {
|
||||
return lx.errorf("Floats must have a digit after the '.', but got "+
|
||||
"%q instead.", r)
|
||||
}
|
||||
return lexFloat
|
||||
}
|
||||
|
||||
// lexFloat consumes the digits of a float after a '.'.
|
||||
// Assumes that one digit has been consumed after a '.' already.
|
||||
func lexFloat(lx *lexer) stateFn {
|
||||
r := lx.next()
|
||||
if isDigit(r) {
|
||||
return lexFloat
|
||||
}
|
||||
|
||||
lx.backup()
|
||||
lx.emit(itemFloat)
|
||||
return lx.pop()
|
||||
}
|
||||
|
||||
// lexConst consumes the s[1:] in s. It assumes that s[0] has already been
|
||||
// consumed.
|
||||
func lexConst(lx *lexer, s string) stateFn {
|
||||
for i := range s[1:] {
|
||||
if r := lx.next(); r != rune(s[i+1]) {
|
||||
return lx.errorf("Expected %q, but found %q instead.", s[:i+1],
|
||||
s[:i]+string(r))
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// lexTrue consumes the "rue" in "true". It assumes that 't' has already
|
||||
// been consumed.
|
||||
func lexTrue(lx *lexer) stateFn {
|
||||
if fn := lexConst(lx, "true"); fn != nil {
|
||||
return fn
|
||||
}
|
||||
lx.emit(itemBool)
|
||||
return lx.pop()
|
||||
}
|
||||
|
||||
// lexFalse consumes the "alse" in "false". It assumes that 'f' has already
|
||||
// been consumed.
|
||||
func lexFalse(lx *lexer) stateFn {
|
||||
if fn := lexConst(lx, "false"); fn != nil {
|
||||
return fn
|
||||
}
|
||||
lx.emit(itemBool)
|
||||
return lx.pop()
|
||||
}
|
||||
|
||||
// lexCommentStart begins the lexing of a comment. It will emit
|
||||
// itemCommentStart and consume no characters, passing control to lexComment.
|
||||
func lexCommentStart(lx *lexer) stateFn {
|
||||
lx.ignore()
|
||||
lx.emit(itemCommentStart)
|
||||
return lexComment
|
||||
}
|
||||
|
||||
// lexComment lexes an entire comment. It assumes that '#' has been consumed.
|
||||
// It will consume *up to* the first new line character, and pass control
|
||||
// back to the last state on the stack.
|
||||
func lexComment(lx *lexer) stateFn {
|
||||
r := lx.peek()
|
||||
if isNL(r) || r == eof {
|
||||
lx.emit(itemText)
|
||||
return lx.pop()
|
||||
}
|
||||
lx.next()
|
||||
return lexComment
|
||||
}
|
||||
|
||||
// lexSkip ignores all slurped input and moves on to the next state.
|
||||
func lexSkip(lx *lexer, nextState stateFn) stateFn {
|
||||
return func(lx *lexer) stateFn {
|
||||
lx.ignore()
|
||||
return nextState
|
||||
}
|
||||
}
|
||||
|
||||
// isWhitespace returns true if `r` is a whitespace character according
|
||||
// to the spec.
|
||||
func isWhitespace(r rune) bool {
|
||||
return r == '\t' || r == ' '
|
||||
}
|
||||
|
||||
func isNL(r rune) bool {
|
||||
return r == '\n' || r == '\r'
|
||||
}
|
||||
|
||||
func isDigit(r rune) bool {
|
||||
return r >= '0' && r <= '9'
|
||||
}
|
||||
|
||||
func isHexadecimal(r rune) bool {
|
||||
return (r >= '0' && r <= '9') ||
|
||||
(r >= 'a' && r <= 'f') ||
|
||||
(r >= 'A' && r <= 'F')
|
||||
}
|
||||
|
||||
func isBareKeyChar(r rune) bool {
|
||||
return (r >= 'A' && r <= 'Z') ||
|
||||
(r >= 'a' && r <= 'z') ||
|
||||
(r >= '0' && r <= '9') ||
|
||||
r == '_' ||
|
||||
r == '-'
|
||||
}
|
||||
|
||||
func (itype itemType) String() string {
|
||||
switch itype {
|
||||
case itemError:
|
||||
return "Error"
|
||||
case itemNIL:
|
||||
return "NIL"
|
||||
case itemEOF:
|
||||
return "EOF"
|
||||
case itemText:
|
||||
return "Text"
|
||||
case itemString:
|
||||
return "String"
|
||||
case itemRawString:
|
||||
return "String"
|
||||
case itemMultilineString:
|
||||
return "String"
|
||||
case itemRawMultilineString:
|
||||
return "String"
|
||||
case itemBool:
|
||||
return "Bool"
|
||||
case itemInteger:
|
||||
return "Integer"
|
||||
case itemFloat:
|
||||
return "Float"
|
||||
case itemDatetime:
|
||||
return "DateTime"
|
||||
case itemTableStart:
|
||||
return "TableStart"
|
||||
case itemTableEnd:
|
||||
return "TableEnd"
|
||||
case itemKeyStart:
|
||||
return "KeyStart"
|
||||
case itemArray:
|
||||
return "Array"
|
||||
case itemArrayEnd:
|
||||
return "ArrayEnd"
|
||||
case itemCommentStart:
|
||||
return "CommentStart"
|
||||
}
|
||||
panic(fmt.Sprintf("BUG: Unknown type '%d'.", int(itype)))
|
||||
}
|
||||
|
||||
func (item item) String() string {
|
||||
return fmt.Sprintf("(%s, %s)", item.typ.String(), item.val)
|
||||
}
|
||||
498
Godeps/_workspace/src/github.com/BurntSushi/toml/parse.go
generated
vendored
Normal file
498
Godeps/_workspace/src/github.com/BurntSushi/toml/parse.go
generated
vendored
Normal file
@@ -0,0 +1,498 @@
|
||||
package toml
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
"unicode"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
type parser struct {
|
||||
mapping map[string]interface{}
|
||||
types map[string]tomlType
|
||||
lx *lexer
|
||||
|
||||
// A list of keys in the order that they appear in the TOML data.
|
||||
ordered []Key
|
||||
|
||||
// the full key for the current hash in scope
|
||||
context Key
|
||||
|
||||
// the base key name for everything except hashes
|
||||
currentKey string
|
||||
|
||||
// rough approximation of line number
|
||||
approxLine int
|
||||
|
||||
// A map of 'key.group.names' to whether they were created implicitly.
|
||||
implicits map[string]bool
|
||||
}
|
||||
|
||||
type parseError string
|
||||
|
||||
func (pe parseError) Error() string {
|
||||
return string(pe)
|
||||
}
|
||||
|
||||
func parse(data string) (p *parser, err error) {
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
var ok bool
|
||||
if err, ok = r.(parseError); ok {
|
||||
return
|
||||
}
|
||||
panic(r)
|
||||
}
|
||||
}()
|
||||
|
||||
p = &parser{
|
||||
mapping: make(map[string]interface{}),
|
||||
types: make(map[string]tomlType),
|
||||
lx: lex(data),
|
||||
ordered: make([]Key, 0),
|
||||
implicits: make(map[string]bool),
|
||||
}
|
||||
for {
|
||||
item := p.next()
|
||||
if item.typ == itemEOF {
|
||||
break
|
||||
}
|
||||
p.topLevel(item)
|
||||
}
|
||||
|
||||
return p, nil
|
||||
}
|
||||
|
||||
func (p *parser) panicf(format string, v ...interface{}) {
|
||||
msg := fmt.Sprintf("Near line %d (last key parsed '%s'): %s",
|
||||
p.approxLine, p.current(), fmt.Sprintf(format, v...))
|
||||
panic(parseError(msg))
|
||||
}
|
||||
|
||||
func (p *parser) next() item {
|
||||
it := p.lx.nextItem()
|
||||
if it.typ == itemError {
|
||||
p.panicf("%s", it.val)
|
||||
}
|
||||
return it
|
||||
}
|
||||
|
||||
func (p *parser) bug(format string, v ...interface{}) {
|
||||
log.Fatalf("BUG: %s\n\n", fmt.Sprintf(format, v...))
|
||||
}
|
||||
|
||||
func (p *parser) expect(typ itemType) item {
|
||||
it := p.next()
|
||||
p.assertEqual(typ, it.typ)
|
||||
return it
|
||||
}
|
||||
|
||||
func (p *parser) assertEqual(expected, got itemType) {
|
||||
if expected != got {
|
||||
p.bug("Expected '%s' but got '%s'.", expected, got)
|
||||
}
|
||||
}
|
||||
|
||||
func (p *parser) topLevel(item item) {
|
||||
switch item.typ {
|
||||
case itemCommentStart:
|
||||
p.approxLine = item.line
|
||||
p.expect(itemText)
|
||||
case itemTableStart:
|
||||
kg := p.next()
|
||||
p.approxLine = kg.line
|
||||
|
||||
var key Key
|
||||
for ; kg.typ != itemTableEnd && kg.typ != itemEOF; kg = p.next() {
|
||||
key = append(key, p.keyString(kg))
|
||||
}
|
||||
p.assertEqual(itemTableEnd, kg.typ)
|
||||
|
||||
p.establishContext(key, false)
|
||||
p.setType("", tomlHash)
|
||||
p.ordered = append(p.ordered, key)
|
||||
case itemArrayTableStart:
|
||||
kg := p.next()
|
||||
p.approxLine = kg.line
|
||||
|
||||
var key Key
|
||||
for ; kg.typ != itemArrayTableEnd && kg.typ != itemEOF; kg = p.next() {
|
||||
key = append(key, p.keyString(kg))
|
||||
}
|
||||
p.assertEqual(itemArrayTableEnd, kg.typ)
|
||||
|
||||
p.establishContext(key, true)
|
||||
p.setType("", tomlArrayHash)
|
||||
p.ordered = append(p.ordered, key)
|
||||
case itemKeyStart:
|
||||
kname := p.next()
|
||||
p.approxLine = kname.line
|
||||
p.currentKey = p.keyString(kname)
|
||||
|
||||
val, typ := p.value(p.next())
|
||||
p.setValue(p.currentKey, val)
|
||||
p.setType(p.currentKey, typ)
|
||||
p.ordered = append(p.ordered, p.context.add(p.currentKey))
|
||||
p.currentKey = ""
|
||||
default:
|
||||
p.bug("Unexpected type at top level: %s", item.typ)
|
||||
}
|
||||
}
|
||||
|
||||
// Gets a string for a key (or part of a key in a table name).
|
||||
func (p *parser) keyString(it item) string {
|
||||
switch it.typ {
|
||||
case itemText:
|
||||
return it.val
|
||||
case itemString, itemMultilineString,
|
||||
itemRawString, itemRawMultilineString:
|
||||
s, _ := p.value(it)
|
||||
return s.(string)
|
||||
default:
|
||||
p.bug("Unexpected key type: %s", it.typ)
|
||||
panic("unreachable")
|
||||
}
|
||||
}
|
||||
|
||||
// value translates an expected value from the lexer into a Go value wrapped
|
||||
// as an empty interface.
|
||||
func (p *parser) value(it item) (interface{}, tomlType) {
|
||||
switch it.typ {
|
||||
case itemString:
|
||||
return p.replaceEscapes(it.val), p.typeOfPrimitive(it)
|
||||
case itemMultilineString:
|
||||
trimmed := stripFirstNewline(stripEscapedWhitespace(it.val))
|
||||
return p.replaceEscapes(trimmed), p.typeOfPrimitive(it)
|
||||
case itemRawString:
|
||||
return it.val, p.typeOfPrimitive(it)
|
||||
case itemRawMultilineString:
|
||||
return stripFirstNewline(it.val), p.typeOfPrimitive(it)
|
||||
case itemBool:
|
||||
switch it.val {
|
||||
case "true":
|
||||
return true, p.typeOfPrimitive(it)
|
||||
case "false":
|
||||
return false, p.typeOfPrimitive(it)
|
||||
}
|
||||
p.bug("Expected boolean value, but got '%s'.", it.val)
|
||||
case itemInteger:
|
||||
num, err := strconv.ParseInt(it.val, 10, 64)
|
||||
if err != nil {
|
||||
// See comment below for floats describing why we make a
|
||||
// distinction between a bug and a user error.
|
||||
if e, ok := err.(*strconv.NumError); ok &&
|
||||
e.Err == strconv.ErrRange {
|
||||
|
||||
p.panicf("Integer '%s' is out of the range of 64-bit "+
|
||||
"signed integers.", it.val)
|
||||
} else {
|
||||
p.bug("Expected integer value, but got '%s'.", it.val)
|
||||
}
|
||||
}
|
||||
return num, p.typeOfPrimitive(it)
|
||||
case itemFloat:
|
||||
num, err := strconv.ParseFloat(it.val, 64)
|
||||
if err != nil {
|
||||
// Distinguish float values. Normally, it'd be a bug if the lexer
|
||||
// provides an invalid float, but it's possible that the float is
|
||||
// out of range of valid values (which the lexer cannot determine).
|
||||
// So mark the former as a bug but the latter as a legitimate user
|
||||
// error.
|
||||
//
|
||||
// This is also true for integers.
|
||||
if e, ok := err.(*strconv.NumError); ok &&
|
||||
e.Err == strconv.ErrRange {
|
||||
|
||||
p.panicf("Float '%s' is out of the range of 64-bit "+
|
||||
"IEEE-754 floating-point numbers.", it.val)
|
||||
} else {
|
||||
p.bug("Expected float value, but got '%s'.", it.val)
|
||||
}
|
||||
}
|
||||
return num, p.typeOfPrimitive(it)
|
||||
case itemDatetime:
|
||||
t, err := time.Parse("2006-01-02T15:04:05Z", it.val)
|
||||
if err != nil {
|
||||
p.bug("Expected Zulu formatted DateTime, but got '%s'.", it.val)
|
||||
}
|
||||
return t, p.typeOfPrimitive(it)
|
||||
case itemArray:
|
||||
array := make([]interface{}, 0)
|
||||
types := make([]tomlType, 0)
|
||||
|
||||
for it = p.next(); it.typ != itemArrayEnd; it = p.next() {
|
||||
if it.typ == itemCommentStart {
|
||||
p.expect(itemText)
|
||||
continue
|
||||
}
|
||||
|
||||
val, typ := p.value(it)
|
||||
array = append(array, val)
|
||||
types = append(types, typ)
|
||||
}
|
||||
return array, p.typeOfArray(types)
|
||||
}
|
||||
p.bug("Unexpected value type: %s", it.typ)
|
||||
panic("unreachable")
|
||||
}
|
||||
|
||||
// establishContext sets the current context of the parser,
|
||||
// where the context is either a hash or an array of hashes. Which one is
|
||||
// set depends on the value of the `array` parameter.
|
||||
//
|
||||
// Establishing the context also makes sure that the key isn't a duplicate, and
|
||||
// will create implicit hashes automatically.
|
||||
func (p *parser) establishContext(key Key, array bool) {
|
||||
var ok bool
|
||||
|
||||
// Always start at the top level and drill down for our context.
|
||||
hashContext := p.mapping
|
||||
keyContext := make(Key, 0)
|
||||
|
||||
// We only need implicit hashes for key[0:-1]
|
||||
for _, k := range key[0 : len(key)-1] {
|
||||
_, ok = hashContext[k]
|
||||
keyContext = append(keyContext, k)
|
||||
|
||||
// No key? Make an implicit hash and move on.
|
||||
if !ok {
|
||||
p.addImplicit(keyContext)
|
||||
hashContext[k] = make(map[string]interface{})
|
||||
}
|
||||
|
||||
// If the hash context is actually an array of tables, then set
|
||||
// the hash context to the last element in that array.
|
||||
//
|
||||
// Otherwise, it better be a table, since this MUST be a key group (by
|
||||
// virtue of it not being the last element in a key).
|
||||
switch t := hashContext[k].(type) {
|
||||
case []map[string]interface{}:
|
||||
hashContext = t[len(t)-1]
|
||||
case map[string]interface{}:
|
||||
hashContext = t
|
||||
default:
|
||||
p.panicf("Key '%s' was already created as a hash.", keyContext)
|
||||
}
|
||||
}
|
||||
|
||||
p.context = keyContext
|
||||
if array {
|
||||
// If this is the first element for this array, then allocate a new
|
||||
// list of tables for it.
|
||||
k := key[len(key)-1]
|
||||
if _, ok := hashContext[k]; !ok {
|
||||
hashContext[k] = make([]map[string]interface{}, 0, 5)
|
||||
}
|
||||
|
||||
// Add a new table. But make sure the key hasn't already been used
|
||||
// for something else.
|
||||
if hash, ok := hashContext[k].([]map[string]interface{}); ok {
|
||||
hashContext[k] = append(hash, make(map[string]interface{}))
|
||||
} else {
|
||||
p.panicf("Key '%s' was already created and cannot be used as "+
|
||||
"an array.", keyContext)
|
||||
}
|
||||
} else {
|
||||
p.setValue(key[len(key)-1], make(map[string]interface{}))
|
||||
}
|
||||
p.context = append(p.context, key[len(key)-1])
|
||||
}
|
||||
|
||||
// setValue sets the given key to the given value in the current context.
|
||||
// It will make sure that the key hasn't already been defined, account for
|
||||
// implicit key groups.
|
||||
func (p *parser) setValue(key string, value interface{}) {
|
||||
var tmpHash interface{}
|
||||
var ok bool
|
||||
|
||||
hash := p.mapping
|
||||
keyContext := make(Key, 0)
|
||||
for _, k := range p.context {
|
||||
keyContext = append(keyContext, k)
|
||||
if tmpHash, ok = hash[k]; !ok {
|
||||
p.bug("Context for key '%s' has not been established.", keyContext)
|
||||
}
|
||||
switch t := tmpHash.(type) {
|
||||
case []map[string]interface{}:
|
||||
// The context is a table of hashes. Pick the most recent table
|
||||
// defined as the current hash.
|
||||
hash = t[len(t)-1]
|
||||
case map[string]interface{}:
|
||||
hash = t
|
||||
default:
|
||||
p.bug("Expected hash to have type 'map[string]interface{}', but "+
|
||||
"it has '%T' instead.", tmpHash)
|
||||
}
|
||||
}
|
||||
keyContext = append(keyContext, key)
|
||||
|
||||
if _, ok := hash[key]; ok {
|
||||
// Typically, if the given key has already been set, then we have
|
||||
// to raise an error since duplicate keys are disallowed. However,
|
||||
// it's possible that a key was previously defined implicitly. In this
|
||||
// case, it is allowed to be redefined concretely. (See the
|
||||
// `tests/valid/implicit-and-explicit-after.toml` test in `toml-test`.)
|
||||
//
|
||||
// But we have to make sure to stop marking it as an implicit. (So that
|
||||
// another redefinition provokes an error.)
|
||||
//
|
||||
// Note that since it has already been defined (as a hash), we don't
|
||||
// want to overwrite it. So our business is done.
|
||||
if p.isImplicit(keyContext) {
|
||||
p.removeImplicit(keyContext)
|
||||
return
|
||||
}
|
||||
|
||||
// Otherwise, we have a concrete key trying to override a previous
|
||||
// key, which is *always* wrong.
|
||||
p.panicf("Key '%s' has already been defined.", keyContext)
|
||||
}
|
||||
hash[key] = value
|
||||
}
|
||||
|
||||
// setType sets the type of a particular value at a given key.
|
||||
// It should be called immediately AFTER setValue.
|
||||
//
|
||||
// Note that if `key` is empty, then the type given will be applied to the
|
||||
// current context (which is either a table or an array of tables).
|
||||
func (p *parser) setType(key string, typ tomlType) {
|
||||
keyContext := make(Key, 0, len(p.context)+1)
|
||||
for _, k := range p.context {
|
||||
keyContext = append(keyContext, k)
|
||||
}
|
||||
if len(key) > 0 { // allow type setting for hashes
|
||||
keyContext = append(keyContext, key)
|
||||
}
|
||||
p.types[keyContext.String()] = typ
|
||||
}
|
||||
|
||||
// addImplicit sets the given Key as having been created implicitly.
|
||||
func (p *parser) addImplicit(key Key) {
|
||||
p.implicits[key.String()] = true
|
||||
}
|
||||
|
||||
// removeImplicit stops tagging the given key as having been implicitly
|
||||
// created.
|
||||
func (p *parser) removeImplicit(key Key) {
|
||||
p.implicits[key.String()] = false
|
||||
}
|
||||
|
||||
// isImplicit returns true if the key group pointed to by the key was created
|
||||
// implicitly.
|
||||
func (p *parser) isImplicit(key Key) bool {
|
||||
return p.implicits[key.String()]
|
||||
}
|
||||
|
||||
// current returns the full key name of the current context.
|
||||
func (p *parser) current() string {
|
||||
if len(p.currentKey) == 0 {
|
||||
return p.context.String()
|
||||
}
|
||||
if len(p.context) == 0 {
|
||||
return p.currentKey
|
||||
}
|
||||
return fmt.Sprintf("%s.%s", p.context, p.currentKey)
|
||||
}
|
||||
|
||||
func stripFirstNewline(s string) string {
|
||||
if len(s) == 0 || s[0] != '\n' {
|
||||
return s
|
||||
}
|
||||
return s[1:len(s)]
|
||||
}
|
||||
|
||||
func stripEscapedWhitespace(s string) string {
|
||||
esc := strings.Split(s, "\\\n")
|
||||
if len(esc) > 1 {
|
||||
for i := 1; i < len(esc); i++ {
|
||||
esc[i] = strings.TrimLeftFunc(esc[i], unicode.IsSpace)
|
||||
}
|
||||
}
|
||||
return strings.Join(esc, "")
|
||||
}
|
||||
|
||||
func (p *parser) replaceEscapes(str string) string {
|
||||
var replaced []rune
|
||||
s := []byte(str)
|
||||
r := 0
|
||||
for r < len(s) {
|
||||
if s[r] != '\\' {
|
||||
c, size := utf8.DecodeRune(s[r:])
|
||||
r += size
|
||||
replaced = append(replaced, c)
|
||||
continue
|
||||
}
|
||||
r += 1
|
||||
if r >= len(s) {
|
||||
p.bug("Escape sequence at end of string.")
|
||||
return ""
|
||||
}
|
||||
switch s[r] {
|
||||
default:
|
||||
p.bug("Expected valid escape code after \\, but got %q.", s[r])
|
||||
return ""
|
||||
case 'b':
|
||||
replaced = append(replaced, rune(0x0008))
|
||||
r += 1
|
||||
case 't':
|
||||
replaced = append(replaced, rune(0x0009))
|
||||
r += 1
|
||||
case 'n':
|
||||
replaced = append(replaced, rune(0x000A))
|
||||
r += 1
|
||||
case 'f':
|
||||
replaced = append(replaced, rune(0x000C))
|
||||
r += 1
|
||||
case 'r':
|
||||
replaced = append(replaced, rune(0x000D))
|
||||
r += 1
|
||||
case '"':
|
||||
replaced = append(replaced, rune(0x0022))
|
||||
r += 1
|
||||
case '\\':
|
||||
replaced = append(replaced, rune(0x005C))
|
||||
r += 1
|
||||
case 'u':
|
||||
// At this point, we know we have a Unicode escape of the form
|
||||
// `uXXXX` at [r, r+5). (Because the lexer guarantees this
|
||||
// for us.)
|
||||
escaped := p.asciiEscapeToUnicode(s[r+1 : r+5])
|
||||
replaced = append(replaced, escaped)
|
||||
r += 5
|
||||
case 'U':
|
||||
// At this point, we know we have a Unicode escape of the form
|
||||
// `uXXXX` at [r, r+9). (Because the lexer guarantees this
|
||||
// for us.)
|
||||
escaped := p.asciiEscapeToUnicode(s[r+1 : r+9])
|
||||
replaced = append(replaced, escaped)
|
||||
r += 9
|
||||
}
|
||||
}
|
||||
return string(replaced)
|
||||
}
|
||||
|
||||
func (p *parser) asciiEscapeToUnicode(bs []byte) rune {
|
||||
s := string(bs)
|
||||
hex, err := strconv.ParseUint(strings.ToLower(s), 16, 32)
|
||||
if err != nil {
|
||||
p.bug("Could not parse '%s' as a hexadecimal number, but the "+
|
||||
"lexer claims it's OK: %s", s, err)
|
||||
}
|
||||
|
||||
// BUG(burntsushi)
|
||||
// I honestly don't understand how this works. I can't seem
|
||||
// to find a way to make this fail. I figured this would fail on invalid
|
||||
// UTF-8 characters like U+DCFF, but it doesn't.
|
||||
if !utf8.ValidString(string(rune(hex))) {
|
||||
p.panicf("Escaped character '\\u%s' is not valid UTF-8.", s)
|
||||
}
|
||||
return rune(hex)
|
||||
}
|
||||
|
||||
func isStringType(ty itemType) bool {
|
||||
return ty == itemString || ty == itemMultilineString ||
|
||||
ty == itemRawString || ty == itemRawMultilineString
|
||||
}
|
||||
1
Godeps/_workspace/src/github.com/BurntSushi/toml/session.vim
generated
vendored
Normal file
1
Godeps/_workspace/src/github.com/BurntSushi/toml/session.vim
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
au BufWritePost *.go silent!make tags > /dev/null 2>&1
|
||||
91
Godeps/_workspace/src/github.com/BurntSushi/toml/type_check.go
generated
vendored
Normal file
91
Godeps/_workspace/src/github.com/BurntSushi/toml/type_check.go
generated
vendored
Normal file
@@ -0,0 +1,91 @@
|
||||
package toml
|
||||
|
||||
// tomlType represents any Go type that corresponds to a TOML type.
|
||||
// While the first draft of the TOML spec has a simplistic type system that
|
||||
// probably doesn't need this level of sophistication, we seem to be militating
|
||||
// toward adding real composite types.
|
||||
type tomlType interface {
|
||||
typeString() string
|
||||
}
|
||||
|
||||
// typeEqual accepts any two types and returns true if they are equal.
|
||||
func typeEqual(t1, t2 tomlType) bool {
|
||||
if t1 == nil || t2 == nil {
|
||||
return false
|
||||
}
|
||||
return t1.typeString() == t2.typeString()
|
||||
}
|
||||
|
||||
func typeIsHash(t tomlType) bool {
|
||||
return typeEqual(t, tomlHash) || typeEqual(t, tomlArrayHash)
|
||||
}
|
||||
|
||||
type tomlBaseType string
|
||||
|
||||
func (btype tomlBaseType) typeString() string {
|
||||
return string(btype)
|
||||
}
|
||||
|
||||
func (btype tomlBaseType) String() string {
|
||||
return btype.typeString()
|
||||
}
|
||||
|
||||
var (
|
||||
tomlInteger tomlBaseType = "Integer"
|
||||
tomlFloat tomlBaseType = "Float"
|
||||
tomlDatetime tomlBaseType = "Datetime"
|
||||
tomlString tomlBaseType = "String"
|
||||
tomlBool tomlBaseType = "Bool"
|
||||
tomlArray tomlBaseType = "Array"
|
||||
tomlHash tomlBaseType = "Hash"
|
||||
tomlArrayHash tomlBaseType = "ArrayHash"
|
||||
)
|
||||
|
||||
// typeOfPrimitive returns a tomlType of any primitive value in TOML.
|
||||
// Primitive values are: Integer, Float, Datetime, String and Bool.
|
||||
//
|
||||
// Passing a lexer item other than the following will cause a BUG message
|
||||
// to occur: itemString, itemBool, itemInteger, itemFloat, itemDatetime.
|
||||
func (p *parser) typeOfPrimitive(lexItem item) tomlType {
|
||||
switch lexItem.typ {
|
||||
case itemInteger:
|
||||
return tomlInteger
|
||||
case itemFloat:
|
||||
return tomlFloat
|
||||
case itemDatetime:
|
||||
return tomlDatetime
|
||||
case itemString:
|
||||
return tomlString
|
||||
case itemMultilineString:
|
||||
return tomlString
|
||||
case itemRawString:
|
||||
return tomlString
|
||||
case itemRawMultilineString:
|
||||
return tomlString
|
||||
case itemBool:
|
||||
return tomlBool
|
||||
}
|
||||
p.bug("Cannot infer primitive type of lex item '%s'.", lexItem)
|
||||
panic("unreachable")
|
||||
}
|
||||
|
||||
// typeOfArray returns a tomlType for an array given a list of types of its
|
||||
// values.
|
||||
//
|
||||
// In the current spec, if an array is homogeneous, then its type is always
|
||||
// "Array". If the array is not homogeneous, an error is generated.
|
||||
func (p *parser) typeOfArray(types []tomlType) tomlType {
|
||||
// Empty arrays are cool.
|
||||
if len(types) == 0 {
|
||||
return tomlArray
|
||||
}
|
||||
|
||||
theType := types[0]
|
||||
for _, t := range types[1:] {
|
||||
if !typeEqual(theType, t) {
|
||||
p.panicf("Array contains values of type '%s' and '%s', but "+
|
||||
"arrays must be homogeneous.", theType, t)
|
||||
}
|
||||
}
|
||||
return tomlArray
|
||||
}
|
||||
241
Godeps/_workspace/src/github.com/BurntSushi/toml/type_fields.go
generated
vendored
Normal file
241
Godeps/_workspace/src/github.com/BurntSushi/toml/type_fields.go
generated
vendored
Normal file
@@ -0,0 +1,241 @@
|
||||
package toml
|
||||
|
||||
// Struct field handling is adapted from code in encoding/json:
|
||||
//
|
||||
// Copyright 2010 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the Go distribution.
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"sort"
|
||||
"sync"
|
||||
)
|
||||
|
||||
// A field represents a single field found in a struct.
|
||||
type field struct {
|
||||
name string // the name of the field (`toml` tag included)
|
||||
tag bool // whether field has a `toml` tag
|
||||
index []int // represents the depth of an anonymous field
|
||||
typ reflect.Type // the type of the field
|
||||
}
|
||||
|
||||
// byName sorts field by name, breaking ties with depth,
|
||||
// then breaking ties with "name came from toml tag", then
|
||||
// breaking ties with index sequence.
|
||||
type byName []field
|
||||
|
||||
func (x byName) Len() int { return len(x) }
|
||||
|
||||
func (x byName) Swap(i, j int) { x[i], x[j] = x[j], x[i] }
|
||||
|
||||
func (x byName) Less(i, j int) bool {
|
||||
if x[i].name != x[j].name {
|
||||
return x[i].name < x[j].name
|
||||
}
|
||||
if len(x[i].index) != len(x[j].index) {
|
||||
return len(x[i].index) < len(x[j].index)
|
||||
}
|
||||
if x[i].tag != x[j].tag {
|
||||
return x[i].tag
|
||||
}
|
||||
return byIndex(x).Less(i, j)
|
||||
}
|
||||
|
||||
// byIndex sorts field by index sequence.
|
||||
type byIndex []field
|
||||
|
||||
func (x byIndex) Len() int { return len(x) }
|
||||
|
||||
func (x byIndex) Swap(i, j int) { x[i], x[j] = x[j], x[i] }
|
||||
|
||||
func (x byIndex) Less(i, j int) bool {
|
||||
for k, xik := range x[i].index {
|
||||
if k >= len(x[j].index) {
|
||||
return false
|
||||
}
|
||||
if xik != x[j].index[k] {
|
||||
return xik < x[j].index[k]
|
||||
}
|
||||
}
|
||||
return len(x[i].index) < len(x[j].index)
|
||||
}
|
||||
|
||||
// typeFields returns a list of fields that TOML should recognize for the given
|
||||
// type. The algorithm is breadth-first search over the set of structs to
|
||||
// include - the top struct and then any reachable anonymous structs.
|
||||
func typeFields(t reflect.Type) []field {
|
||||
// Anonymous fields to explore at the current level and the next.
|
||||
current := []field{}
|
||||
next := []field{{typ: t}}
|
||||
|
||||
// Count of queued names for current level and the next.
|
||||
count := map[reflect.Type]int{}
|
||||
nextCount := map[reflect.Type]int{}
|
||||
|
||||
// Types already visited at an earlier level.
|
||||
visited := map[reflect.Type]bool{}
|
||||
|
||||
// Fields found.
|
||||
var fields []field
|
||||
|
||||
for len(next) > 0 {
|
||||
current, next = next, current[:0]
|
||||
count, nextCount = nextCount, map[reflect.Type]int{}
|
||||
|
||||
for _, f := range current {
|
||||
if visited[f.typ] {
|
||||
continue
|
||||
}
|
||||
visited[f.typ] = true
|
||||
|
||||
// Scan f.typ for fields to include.
|
||||
for i := 0; i < f.typ.NumField(); i++ {
|
||||
sf := f.typ.Field(i)
|
||||
if sf.PkgPath != "" { // unexported
|
||||
continue
|
||||
}
|
||||
name := sf.Tag.Get("toml")
|
||||
if name == "-" {
|
||||
continue
|
||||
}
|
||||
index := make([]int, len(f.index)+1)
|
||||
copy(index, f.index)
|
||||
index[len(f.index)] = i
|
||||
|
||||
ft := sf.Type
|
||||
if ft.Name() == "" && ft.Kind() == reflect.Ptr {
|
||||
// Follow pointer.
|
||||
ft = ft.Elem()
|
||||
}
|
||||
|
||||
// Record found field and index sequence.
|
||||
if name != "" || !sf.Anonymous || ft.Kind() != reflect.Struct {
|
||||
tagged := name != ""
|
||||
if name == "" {
|
||||
name = sf.Name
|
||||
}
|
||||
fields = append(fields, field{name, tagged, index, ft})
|
||||
if count[f.typ] > 1 {
|
||||
// If there were multiple instances, add a second,
|
||||
// so that the annihilation code will see a duplicate.
|
||||
// It only cares about the distinction between 1 or 2,
|
||||
// so don't bother generating any more copies.
|
||||
fields = append(fields, fields[len(fields)-1])
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
// Record new anonymous struct to explore in next round.
|
||||
nextCount[ft]++
|
||||
if nextCount[ft] == 1 {
|
||||
f := field{name: ft.Name(), index: index, typ: ft}
|
||||
next = append(next, f)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
sort.Sort(byName(fields))
|
||||
|
||||
// Delete all fields that are hidden by the Go rules for embedded fields,
|
||||
// except that fields with TOML tags are promoted.
|
||||
|
||||
// The fields are sorted in primary order of name, secondary order
|
||||
// of field index length. Loop over names; for each name, delete
|
||||
// hidden fields by choosing the one dominant field that survives.
|
||||
out := fields[:0]
|
||||
for advance, i := 0, 0; i < len(fields); i += advance {
|
||||
// One iteration per name.
|
||||
// Find the sequence of fields with the name of this first field.
|
||||
fi := fields[i]
|
||||
name := fi.name
|
||||
for advance = 1; i+advance < len(fields); advance++ {
|
||||
fj := fields[i+advance]
|
||||
if fj.name != name {
|
||||
break
|
||||
}
|
||||
}
|
||||
if advance == 1 { // Only one field with this name
|
||||
out = append(out, fi)
|
||||
continue
|
||||
}
|
||||
dominant, ok := dominantField(fields[i : i+advance])
|
||||
if ok {
|
||||
out = append(out, dominant)
|
||||
}
|
||||
}
|
||||
|
||||
fields = out
|
||||
sort.Sort(byIndex(fields))
|
||||
|
||||
return fields
|
||||
}
|
||||
|
||||
// dominantField looks through the fields, all of which are known to
|
||||
// have the same name, to find the single field that dominates the
|
||||
// others using Go's embedding rules, modified by the presence of
|
||||
// TOML tags. If there are multiple top-level fields, the boolean
|
||||
// will be false: This condition is an error in Go and we skip all
|
||||
// the fields.
|
||||
func dominantField(fields []field) (field, bool) {
|
||||
// The fields are sorted in increasing index-length order. The winner
|
||||
// must therefore be one with the shortest index length. Drop all
|
||||
// longer entries, which is easy: just truncate the slice.
|
||||
length := len(fields[0].index)
|
||||
tagged := -1 // Index of first tagged field.
|
||||
for i, f := range fields {
|
||||
if len(f.index) > length {
|
||||
fields = fields[:i]
|
||||
break
|
||||
}
|
||||
if f.tag {
|
||||
if tagged >= 0 {
|
||||
// Multiple tagged fields at the same level: conflict.
|
||||
// Return no field.
|
||||
return field{}, false
|
||||
}
|
||||
tagged = i
|
||||
}
|
||||
}
|
||||
if tagged >= 0 {
|
||||
return fields[tagged], true
|
||||
}
|
||||
// All remaining fields have the same length. If there's more than one,
|
||||
// we have a conflict (two fields named "X" at the same level) and we
|
||||
// return no field.
|
||||
if len(fields) > 1 {
|
||||
return field{}, false
|
||||
}
|
||||
return fields[0], true
|
||||
}
|
||||
|
||||
var fieldCache struct {
|
||||
sync.RWMutex
|
||||
m map[reflect.Type][]field
|
||||
}
|
||||
|
||||
// cachedTypeFields is like typeFields but uses a cache to avoid repeated work.
|
||||
func cachedTypeFields(t reflect.Type) []field {
|
||||
fieldCache.RLock()
|
||||
f := fieldCache.m[t]
|
||||
fieldCache.RUnlock()
|
||||
if f != nil {
|
||||
return f
|
||||
}
|
||||
|
||||
// Compute fields without lock.
|
||||
// Might duplicate effort but won't hold other computations back.
|
||||
f = typeFields(t)
|
||||
if f == nil {
|
||||
f = []field{}
|
||||
}
|
||||
|
||||
fieldCache.Lock()
|
||||
if fieldCache.m == nil {
|
||||
fieldCache.m = map[reflect.Type][]field{}
|
||||
}
|
||||
fieldCache.m[t] = f
|
||||
fieldCache.Unlock()
|
||||
return f
|
||||
}
|
||||
24
Godeps/_workspace/src/github.com/Unknwon/com/.gitignore
generated
vendored
Normal file
24
Godeps/_workspace/src/github.com/Unknwon/com/.gitignore
generated
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
# Compiled Object files, Static and Dynamic libs (Shared Objects)
|
||||
*.o
|
||||
*.a
|
||||
*.so
|
||||
|
||||
# Folders
|
||||
_obj
|
||||
_test
|
||||
.idea
|
||||
|
||||
# Architecture specific extensions/prefixes
|
||||
*.[568vq]
|
||||
[568vq].out
|
||||
|
||||
*.cgo1.go
|
||||
*.cgo2.c
|
||||
_cgo_defun.c
|
||||
_cgo_gotypes.go
|
||||
_cgo_export.*
|
||||
|
||||
_testmain.go
|
||||
|
||||
*.exe
|
||||
*.iml
|
||||
191
Godeps/_workspace/src/github.com/Unknwon/com/LICENSE
generated
vendored
Normal file
191
Godeps/_workspace/src/github.com/Unknwon/com/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,191 @@
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction, and
|
||||
distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by the copyright
|
||||
owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all other entities
|
||||
that control, are controlled by, or are under common control with that entity.
|
||||
For the purposes of this definition, "control" means (i) the power, direct or
|
||||
indirect, to cause the direction or management of such entity, whether by
|
||||
contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity exercising
|
||||
permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications, including
|
||||
but not limited to software source code, documentation source, and configuration
|
||||
files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical transformation or
|
||||
translation of a Source form, including but not limited to compiled object code,
|
||||
generated documentation, and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or Object form, made
|
||||
available under the License, as indicated by a copyright notice that is included
|
||||
in or attached to the work (an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object form, that
|
||||
is based on (or derived from) the Work and for which the editorial revisions,
|
||||
annotations, elaborations, or other modifications represent, as a whole, an
|
||||
original work of authorship. For the purposes of this License, Derivative Works
|
||||
shall not include works that remain separable from, or merely link (or bind by
|
||||
name) to the interfaces of, the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including the original version
|
||||
of the Work and any modifications or additions to that Work or Derivative Works
|
||||
thereof, that is intentionally submitted to Licensor for inclusion in the Work
|
||||
by the copyright owner or by an individual or Legal Entity authorized to submit
|
||||
on behalf of the copyright owner. For the purposes of this definition,
|
||||
"submitted" means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems, and
|
||||
issue tracking systems that are managed by, or on behalf of, the Licensor for
|
||||
the purpose of discussing and improving the Work, but excluding communication
|
||||
that is conspicuously marked or otherwise designated in writing by the copyright
|
||||
owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity on behalf
|
||||
of whom a Contribution has been received by Licensor and subsequently
|
||||
incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License.
|
||||
|
||||
Subject to the terms and conditions of this License, each Contributor hereby
|
||||
grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free,
|
||||
irrevocable copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the Work and such
|
||||
Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License.
|
||||
|
||||
Subject to the terms and conditions of this License, each Contributor hereby
|
||||
grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free,
|
||||
irrevocable (except as stated in this section) patent license to make, have
|
||||
made, use, offer to sell, sell, import, and otherwise transfer the Work, where
|
||||
such license applies only to those patent claims licensable by such Contributor
|
||||
that are necessarily infringed by their Contribution(s) alone or by combination
|
||||
of their Contribution(s) with the Work to which such Contribution(s) was
|
||||
submitted. If You institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work or a
|
||||
Contribution incorporated within the Work constitutes direct or contributory
|
||||
patent infringement, then any patent licenses granted to You under this License
|
||||
for that Work shall terminate as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution.
|
||||
|
||||
You may reproduce and distribute copies of the Work or Derivative Works thereof
|
||||
in any medium, with or without modifications, and in Source or Object form,
|
||||
provided that You meet the following conditions:
|
||||
|
||||
You must give any other recipients of the Work or Derivative Works a copy of
|
||||
this License; and
|
||||
You must cause any modified files to carry prominent notices stating that You
|
||||
changed the files; and
|
||||
You must retain, in the Source form of any Derivative Works that You distribute,
|
||||
all copyright, patent, trademark, and attribution notices from the Source form
|
||||
of the Work, excluding those notices that do not pertain to any part of the
|
||||
Derivative Works; and
|
||||
If the Work includes a "NOTICE" text file as part of its distribution, then any
|
||||
Derivative Works that You distribute must include a readable copy of the
|
||||
attribution notices contained within such NOTICE file, excluding those notices
|
||||
that do not pertain to any part of the Derivative Works, in at least one of the
|
||||
following places: within a NOTICE text file distributed as part of the
|
||||
Derivative Works; within the Source form or documentation, if provided along
|
||||
with the Derivative Works; or, within a display generated by the Derivative
|
||||
Works, if and wherever such third-party notices normally appear. The contents of
|
||||
the NOTICE file are for informational purposes only and do not modify the
|
||||
License. You may add Your own attribution notices within Derivative Works that
|
||||
You distribute, alongside or as an addendum to the NOTICE text from the Work,
|
||||
provided that such additional attribution notices cannot be construed as
|
||||
modifying the License.
|
||||
You may add Your own copyright statement to Your modifications and may provide
|
||||
additional or different license terms and conditions for use, reproduction, or
|
||||
distribution of Your modifications, or for any such Derivative Works as a whole,
|
||||
provided Your use, reproduction, and distribution of the Work otherwise complies
|
||||
with the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions.
|
||||
|
||||
Unless You explicitly state otherwise, any Contribution intentionally submitted
|
||||
for inclusion in the Work by You to the Licensor shall be under the terms and
|
||||
conditions of this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify the terms of
|
||||
any separate license agreement you may have executed with Licensor regarding
|
||||
such Contributions.
|
||||
|
||||
6. Trademarks.
|
||||
|
||||
This License does not grant permission to use the trade names, trademarks,
|
||||
service marks, or product names of the Licensor, except as required for
|
||||
reasonable and customary use in describing the origin of the Work and
|
||||
reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty.
|
||||
|
||||
Unless required by applicable law or agreed to in writing, Licensor provides the
|
||||
Work (and each Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied,
|
||||
including, without limitation, any warranties or conditions of TITLE,
|
||||
NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are
|
||||
solely responsible for determining the appropriateness of using or
|
||||
redistributing the Work and assume any risks associated with Your exercise of
|
||||
permissions under this License.
|
||||
|
||||
8. Limitation of Liability.
|
||||
|
||||
In no event and under no legal theory, whether in tort (including negligence),
|
||||
contract, or otherwise, unless required by applicable law (such as deliberate
|
||||
and grossly negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special, incidental,
|
||||
or consequential damages of any character arising as a result of this License or
|
||||
out of the use or inability to use the Work (including but not limited to
|
||||
damages for loss of goodwill, work stoppage, computer failure or malfunction, or
|
||||
any and all other commercial damages or losses), even if such Contributor has
|
||||
been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability.
|
||||
|
||||
While redistributing the Work or Derivative Works thereof, You may choose to
|
||||
offer, and charge a fee for, acceptance of support, warranty, indemnity, or
|
||||
other liability obligations and/or rights consistent with this License. However,
|
||||
in accepting such obligations, You may act only on Your own behalf and on Your
|
||||
sole responsibility, not on behalf of any other Contributor, and only if You
|
||||
agree to indemnify, defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason of your
|
||||
accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work
|
||||
|
||||
To apply the Apache License to your work, attach the following boilerplate
|
||||
notice, with the fields enclosed by brackets "[]" replaced with your own
|
||||
identifying information. (Don't include the brackets!) The text should be
|
||||
enclosed in the appropriate comment syntax for the file format. We also
|
||||
recommend that a file or class name and description of purpose be included on
|
||||
the same "printed page" as the copyright notice for easier identification within
|
||||
third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
24
Godeps/_workspace/src/github.com/Unknwon/com/README.md
generated
vendored
Normal file
24
Godeps/_workspace/src/github.com/Unknwon/com/README.md
generated
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
Common functions
|
||||
===
|
||||
|
||||
[](https://drone.io/github.com/Unknwon/com/latest) [](http://gowalker.org/github.com/Unknwon/com)
|
||||
|
||||
This is an open source project for commonly used functions for the Go programming language.
|
||||
|
||||
This package need >= **go 1.2**
|
||||
|
||||
Code Convention: based on [Go Code Convention](https://github.com/Unknwon/go-code-convention).
|
||||
|
||||
## Contribute
|
||||
|
||||
Your contribute is welcome, but you have to check following steps after you added some functions and commit them:
|
||||
|
||||
1. Make sure you wrote user-friendly comments for **all functions** .
|
||||
2. Make sure you wrote test cases with any possible condition for **all functions** in file `*_test.go`.
|
||||
3. Make sure you wrote benchmarks for **all functions** in file `*_test.go`.
|
||||
4. Make sure you wrote useful examples for **all functions** in file `example_test.go`.
|
||||
5. Make sure you ran `go test -bench="."` and got **PASS** .
|
||||
|
||||
## Performance
|
||||
|
||||
See results on [drone.io](https://drone.io/github.com/Unknwon/com/latest) by `go test -bench="."`.
|
||||
161
Godeps/_workspace/src/github.com/Unknwon/com/cmd.go
generated
vendored
Normal file
161
Godeps/_workspace/src/github.com/Unknwon/com/cmd.go
generated
vendored
Normal file
@@ -0,0 +1,161 @@
|
||||
// +build go1.2
|
||||
|
||||
// Copyright 2013 com authors
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
// Package com is an open source project for commonly used functions for the Go programming language.
|
||||
package com
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"os/exec"
|
||||
"runtime"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// ExecCmdDirBytes executes system command in given directory
|
||||
// and return stdout, stderr in bytes type, along with possible error.
|
||||
func ExecCmdDirBytes(dir, cmdName string, args ...string) ([]byte, []byte, error) {
|
||||
bufOut := new(bytes.Buffer)
|
||||
bufErr := new(bytes.Buffer)
|
||||
|
||||
cmd := exec.Command(cmdName, args...)
|
||||
cmd.Dir = dir
|
||||
cmd.Stdout = bufOut
|
||||
cmd.Stderr = bufErr
|
||||
|
||||
err := cmd.Run()
|
||||
return bufOut.Bytes(), bufErr.Bytes(), err
|
||||
}
|
||||
|
||||
// ExecCmdBytes executes system command
|
||||
// and return stdout, stderr in bytes type, along with possible error.
|
||||
func ExecCmdBytes(cmdName string, args ...string) ([]byte, []byte, error) {
|
||||
return ExecCmdDirBytes("", cmdName, args...)
|
||||
}
|
||||
|
||||
// ExecCmdDir executes system command in given directory
|
||||
// and return stdout, stderr in string type, along with possible error.
|
||||
func ExecCmdDir(dir, cmdName string, args ...string) (string, string, error) {
|
||||
bufOut, bufErr, err := ExecCmdDirBytes(dir, cmdName, args...)
|
||||
return string(bufOut), string(bufErr), err
|
||||
}
|
||||
|
||||
// ExecCmd executes system command
|
||||
// and return stdout, stderr in string type, along with possible error.
|
||||
func ExecCmd(cmdName string, args ...string) (string, string, error) {
|
||||
return ExecCmdDir("", cmdName, args...)
|
||||
}
|
||||
|
||||
// _________ .__ .____
|
||||
// \_ ___ \ ____ | | ___________ | | ____ ____
|
||||
// / \ \/ / _ \| | / _ \_ __ \ | | / _ \ / ___\
|
||||
// \ \___( <_> ) |_( <_> ) | \/ | |__( <_> ) /_/ >
|
||||
// \______ /\____/|____/\____/|__| |_______ \____/\___ /
|
||||
// \/ \/ /_____/
|
||||
|
||||
// Color number constants.
|
||||
const (
|
||||
Gray = uint8(iota + 90)
|
||||
Red
|
||||
Green
|
||||
Yellow
|
||||
Blue
|
||||
Magenta
|
||||
//NRed = uint8(31) // Normal
|
||||
EndColor = "\033[0m"
|
||||
)
|
||||
|
||||
// getColorLevel returns colored level string by given level.
|
||||
func getColorLevel(level string) string {
|
||||
level = strings.ToUpper(level)
|
||||
switch level {
|
||||
case "TRAC":
|
||||
return fmt.Sprintf("\033[%dm%s\033[0m", Blue, level)
|
||||
case "ERRO":
|
||||
return fmt.Sprintf("\033[%dm%s\033[0m", Red, level)
|
||||
case "WARN":
|
||||
return fmt.Sprintf("\033[%dm%s\033[0m", Magenta, level)
|
||||
case "SUCC":
|
||||
return fmt.Sprintf("\033[%dm%s\033[0m", Green, level)
|
||||
default:
|
||||
return level
|
||||
}
|
||||
}
|
||||
|
||||
// ColorLogS colors log and return colored content.
|
||||
// Log format: <level> <content [highlight][path]> [ error ].
|
||||
// Level: TRAC -> blue; ERRO -> red; WARN -> Magenta; SUCC -> green; others -> default.
|
||||
// Content: default; path: yellow; error -> red.
|
||||
// Level has to be surrounded by "[" and "]".
|
||||
// Highlights have to be surrounded by "# " and " #"(space), "#" will be deleted.
|
||||
// Paths have to be surrounded by "( " and " )"(space).
|
||||
// Errors have to be surrounded by "[ " and " ]"(space).
|
||||
// Note: it hasn't support windows yet, contribute is welcome.
|
||||
func ColorLogS(format string, a ...interface{}) string {
|
||||
log := fmt.Sprintf(format, a...)
|
||||
|
||||
var clog string
|
||||
|
||||
if runtime.GOOS != "windows" {
|
||||
// Level.
|
||||
i := strings.Index(log, "]")
|
||||
if log[0] == '[' && i > -1 {
|
||||
clog += "[" + getColorLevel(log[1:i]) + "]"
|
||||
}
|
||||
|
||||
log = log[i+1:]
|
||||
|
||||
// Error.
|
||||
log = strings.Replace(log, "[ ", fmt.Sprintf("[\033[%dm", Red), -1)
|
||||
log = strings.Replace(log, " ]", EndColor+"]", -1)
|
||||
|
||||
// Path.
|
||||
log = strings.Replace(log, "( ", fmt.Sprintf("(\033[%dm", Yellow), -1)
|
||||
log = strings.Replace(log, " )", EndColor+")", -1)
|
||||
|
||||
// Highlights.
|
||||
log = strings.Replace(log, "# ", fmt.Sprintf("\033[%dm", Gray), -1)
|
||||
log = strings.Replace(log, " #", EndColor, -1)
|
||||
|
||||
} else {
|
||||
// Level.
|
||||
i := strings.Index(log, "]")
|
||||
if log[0] == '[' && i > -1 {
|
||||
clog += "[" + log[1:i] + "]"
|
||||
}
|
||||
|
||||
log = log[i+1:]
|
||||
|
||||
// Error.
|
||||
log = strings.Replace(log, "[ ", "[", -1)
|
||||
log = strings.Replace(log, " ]", "]", -1)
|
||||
|
||||
// Path.
|
||||
log = strings.Replace(log, "( ", "(", -1)
|
||||
log = strings.Replace(log, " )", ")", -1)
|
||||
|
||||
// Highlights.
|
||||
log = strings.Replace(log, "# ", "", -1)
|
||||
log = strings.Replace(log, " #", "", -1)
|
||||
}
|
||||
return clog + log
|
||||
}
|
||||
|
||||
// ColorLog prints colored log to stdout.
|
||||
// See color rules in function 'ColorLogS'.
|
||||
func ColorLog(format string, a ...interface{}) {
|
||||
fmt.Print(ColorLogS(format, a...))
|
||||
}
|
||||
140
Godeps/_workspace/src/github.com/Unknwon/com/cmd_test.go
generated
vendored
Normal file
140
Godeps/_workspace/src/github.com/Unknwon/com/cmd_test.go
generated
vendored
Normal file
@@ -0,0 +1,140 @@
|
||||
// Copyright 2013 com authors
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
package com
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"runtime"
|
||||
"strings"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestColorLogS(t *testing.T) {
|
||||
if runtime.GOOS != "windows" {
|
||||
// Trace + path.
|
||||
cls := ColorLogS("[TRAC] Trace level test with path( %s )", "/path/to/somethere")
|
||||
clsR := fmt.Sprintf(
|
||||
"[\033[%dmTRAC%s] Trace level test with path(\033[%dm%s%s)",
|
||||
Blue, EndColor, Yellow, "/path/to/somethere", EndColor)
|
||||
if cls != clsR {
|
||||
t.Errorf("ColorLogS:\n Expect => %s\n Got => %s\n", clsR, cls)
|
||||
}
|
||||
|
||||
// Error + error.
|
||||
cls = ColorLogS("[ERRO] Error level test with error[ %s ]", "test error")
|
||||
clsR = fmt.Sprintf(
|
||||
"[\033[%dmERRO%s] Error level test with error[\033[%dm%s%s]",
|
||||
Red, EndColor, Red, "test error", EndColor)
|
||||
if cls != clsR {
|
||||
t.Errorf("ColorLogS:\n Expect => %s\n Got => %s\n", clsR, cls)
|
||||
}
|
||||
|
||||
// Warning + highlight.
|
||||
cls = ColorLogS("[WARN] Warnning level test with highlight # %s #", "special offer!")
|
||||
clsR = fmt.Sprintf(
|
||||
"[\033[%dmWARN%s] Warnning level test with highlight \033[%dm%s%s",
|
||||
Magenta, EndColor, Gray, "special offer!", EndColor)
|
||||
if cls != clsR {
|
||||
t.Errorf("ColorLogS:\n Expect => %s\n Got => %s\n", clsR, cls)
|
||||
}
|
||||
|
||||
// Success.
|
||||
cls = ColorLogS("[SUCC] Success level test")
|
||||
clsR = fmt.Sprintf(
|
||||
"[\033[%dmSUCC%s] Success level test",
|
||||
Green, EndColor)
|
||||
if cls != clsR {
|
||||
t.Errorf("ColorLogS:\n Expect => %s\n Got => %s\n", clsR, cls)
|
||||
}
|
||||
|
||||
// Default.
|
||||
cls = ColorLogS("[INFO] Default level test")
|
||||
clsR = fmt.Sprintf(
|
||||
"[INFO] Default level test")
|
||||
if cls != clsR {
|
||||
t.Errorf("ColorLogS:\n Expect => %s\n Got => %s\n", clsR, cls)
|
||||
}
|
||||
} else {
|
||||
// Trace + path.
|
||||
cls := ColorLogS("[TRAC] Trace level test with path( %s )", "/path/to/somethere")
|
||||
clsR := fmt.Sprintf(
|
||||
"[TRAC] Trace level test with path(%s)",
|
||||
"/path/to/somethere")
|
||||
if cls != clsR {
|
||||
t.Errorf("ColorLogS:\n Expect => %s\n Got => %s\n", clsR, cls)
|
||||
}
|
||||
|
||||
// Error + error.
|
||||
cls = ColorLogS("[ERRO] Error level test with error[ %s ]", "test error")
|
||||
clsR = fmt.Sprintf(
|
||||
"[ERRO] Error level test with error[%s]",
|
||||
"test error")
|
||||
if cls != clsR {
|
||||
t.Errorf("ColorLogS:\n Expect => %s\n Got => %s\n", clsR, cls)
|
||||
}
|
||||
|
||||
// Warning + highlight.
|
||||
cls = ColorLogS("[WARN] Warnning level test with highlight # %s #", "special offer!")
|
||||
clsR = fmt.Sprintf(
|
||||
"[WARN] Warnning level test with highlight %s",
|
||||
"special offer!")
|
||||
if cls != clsR {
|
||||
t.Errorf("ColorLogS:\n Expect => %s\n Got => %s\n", clsR, cls)
|
||||
}
|
||||
|
||||
// Success.
|
||||
cls = ColorLogS("[SUCC] Success level test")
|
||||
clsR = fmt.Sprintf(
|
||||
"[SUCC] Success level test")
|
||||
if cls != clsR {
|
||||
t.Errorf("ColorLogS:\n Expect => %s\n Got => %s\n", clsR, cls)
|
||||
}
|
||||
|
||||
// Default.
|
||||
cls = ColorLogS("[INFO] Default level test")
|
||||
clsR = fmt.Sprintf(
|
||||
"[INFO] Default level test")
|
||||
if cls != clsR {
|
||||
t.Errorf("ColorLogS:\n Expect => %s\n Got => %s\n", clsR, cls)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestExecCmd(t *testing.T) {
|
||||
stdout, stderr, err := ExecCmd("go", "help", "get")
|
||||
if err != nil {
|
||||
t.Errorf("ExecCmd:\n Expect => %v\n Got => %v\n", nil, err)
|
||||
} else if len(stderr) != 0 {
|
||||
t.Errorf("ExecCmd:\n Expect => %s\n Got => %s\n", "", stderr)
|
||||
} else if !strings.HasPrefix(stdout, "usage: go get") {
|
||||
t.Errorf("ExecCmd:\n Expect => %s\n Got => %s\n", "usage: go get", stdout)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkColorLogS(b *testing.B) {
|
||||
log := fmt.Sprintf(
|
||||
"[WARN] This is a tesing log that should be colored, path( %s ),"+
|
||||
" highlight # %s #, error [ %s ].",
|
||||
"path to somewhere", "highlighted content", "tesing error")
|
||||
for i := 0; i < b.N; i++ {
|
||||
ColorLogS(log)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkExecCmd(b *testing.B) {
|
||||
for i := 0; i < b.N; i++ {
|
||||
ExecCmd("go", "help", "get")
|
||||
}
|
||||
}
|
||||
157
Godeps/_workspace/src/github.com/Unknwon/com/convert.go
generated
vendored
Normal file
157
Godeps/_workspace/src/github.com/Unknwon/com/convert.go
generated
vendored
Normal file
@@ -0,0 +1,157 @@
|
||||
// Copyright 2014 com authors
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
package com
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
// Convert string to specify type.
|
||||
type StrTo string
|
||||
|
||||
func (f StrTo) Exist() bool {
|
||||
return string(f) != string(0x1E)
|
||||
}
|
||||
|
||||
func (f StrTo) Uint8() (uint8, error) {
|
||||
v, err := strconv.ParseUint(f.String(), 10, 8)
|
||||
return uint8(v), err
|
||||
}
|
||||
|
||||
func (f StrTo) Int() (int, error) {
|
||||
v, err := strconv.ParseInt(f.String(), 10, 32)
|
||||
return int(v), err
|
||||
}
|
||||
|
||||
func (f StrTo) Int64() (int64, error) {
|
||||
v, err := strconv.ParseInt(f.String(), 10, 64)
|
||||
return int64(v), err
|
||||
}
|
||||
|
||||
func (f StrTo) MustUint8() uint8 {
|
||||
v, _ := f.Uint8()
|
||||
return v
|
||||
}
|
||||
|
||||
func (f StrTo) MustInt() int {
|
||||
v, _ := f.Int()
|
||||
return v
|
||||
}
|
||||
|
||||
func (f StrTo) MustInt64() int64 {
|
||||
v, _ := f.Int64()
|
||||
return v
|
||||
}
|
||||
|
||||
func (f StrTo) String() string {
|
||||
if f.Exist() {
|
||||
return string(f)
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
// Convert any type to string.
|
||||
func ToStr(value interface{}, args ...int) (s string) {
|
||||
switch v := value.(type) {
|
||||
case bool:
|
||||
s = strconv.FormatBool(v)
|
||||
case float32:
|
||||
s = strconv.FormatFloat(float64(v), 'f', argInt(args).Get(0, -1), argInt(args).Get(1, 32))
|
||||
case float64:
|
||||
s = strconv.FormatFloat(v, 'f', argInt(args).Get(0, -1), argInt(args).Get(1, 64))
|
||||
case int:
|
||||
s = strconv.FormatInt(int64(v), argInt(args).Get(0, 10))
|
||||
case int8:
|
||||
s = strconv.FormatInt(int64(v), argInt(args).Get(0, 10))
|
||||
case int16:
|
||||
s = strconv.FormatInt(int64(v), argInt(args).Get(0, 10))
|
||||
case int32:
|
||||
s = strconv.FormatInt(int64(v), argInt(args).Get(0, 10))
|
||||
case int64:
|
||||
s = strconv.FormatInt(v, argInt(args).Get(0, 10))
|
||||
case uint:
|
||||
s = strconv.FormatUint(uint64(v), argInt(args).Get(0, 10))
|
||||
case uint8:
|
||||
s = strconv.FormatUint(uint64(v), argInt(args).Get(0, 10))
|
||||
case uint16:
|
||||
s = strconv.FormatUint(uint64(v), argInt(args).Get(0, 10))
|
||||
case uint32:
|
||||
s = strconv.FormatUint(uint64(v), argInt(args).Get(0, 10))
|
||||
case uint64:
|
||||
s = strconv.FormatUint(v, argInt(args).Get(0, 10))
|
||||
case string:
|
||||
s = v
|
||||
case []byte:
|
||||
s = string(v)
|
||||
default:
|
||||
s = fmt.Sprintf("%v", v)
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
type argInt []int
|
||||
|
||||
func (a argInt) Get(i int, args ...int) (r int) {
|
||||
if i >= 0 && i < len(a) {
|
||||
r = a[i]
|
||||
} else if len(args) > 0 {
|
||||
r = args[0]
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// HexStr2int converts hex format string to decimal number.
|
||||
func HexStr2int(hexStr string) (int, error) {
|
||||
num := 0
|
||||
length := len(hexStr)
|
||||
for i := 0; i < length; i++ {
|
||||
char := hexStr[length-i-1]
|
||||
factor := -1
|
||||
|
||||
switch {
|
||||
case char >= '0' && char <= '9':
|
||||
factor = int(char) - '0'
|
||||
case char >= 'a' && char <= 'f':
|
||||
factor = int(char) - 'a' + 10
|
||||
default:
|
||||
return -1, fmt.Errorf("invalid hex: %s", string(char))
|
||||
}
|
||||
|
||||
num += factor * PowInt(16, i)
|
||||
}
|
||||
return num, nil
|
||||
}
|
||||
|
||||
// Int2HexStr converts decimal number to hex format string.
|
||||
func Int2HexStr(num int) (hex string) {
|
||||
if num == 0 {
|
||||
return "0"
|
||||
}
|
||||
|
||||
for num > 0 {
|
||||
r := num % 16
|
||||
|
||||
c := "?"
|
||||
if r >= 0 && r <= 9 {
|
||||
c = string(r + '0')
|
||||
} else {
|
||||
c = string(r + 'a' - 10)
|
||||
}
|
||||
hex = c + hex
|
||||
num = num / 16
|
||||
}
|
||||
return hex
|
||||
}
|
||||
56
Godeps/_workspace/src/github.com/Unknwon/com/convert_test.go
generated
vendored
Normal file
56
Godeps/_workspace/src/github.com/Unknwon/com/convert_test.go
generated
vendored
Normal file
@@ -0,0 +1,56 @@
|
||||
// Copyright 2014 com authors
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
package com
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
. "github.com/smartystreets/goconvey/convey"
|
||||
)
|
||||
|
||||
func TestHexStr2int(t *testing.T) {
|
||||
Convey("Convert hex format string to decimal", t, func() {
|
||||
hexDecs := map[string]int{
|
||||
"1": 1,
|
||||
"002": 2,
|
||||
"011": 17,
|
||||
"0a1": 161,
|
||||
"35e": 862,
|
||||
}
|
||||
|
||||
for hex, dec := range hexDecs {
|
||||
val, err := HexStr2int(hex)
|
||||
So(err, ShouldBeNil)
|
||||
So(val, ShouldEqual, dec)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func TestInt2HexStr(t *testing.T) {
|
||||
Convey("Convert decimal to hex format string", t, func() {
|
||||
decHexs := map[int]string{
|
||||
1: "1",
|
||||
2: "2",
|
||||
17: "11",
|
||||
161: "a1",
|
||||
862: "35e",
|
||||
}
|
||||
|
||||
for dec, hex := range decHexs {
|
||||
val := Int2HexStr(dec)
|
||||
So(val, ShouldEqual, hex)
|
||||
}
|
||||
})
|
||||
}
|
||||
173
Godeps/_workspace/src/github.com/Unknwon/com/dir.go
generated
vendored
Normal file
173
Godeps/_workspace/src/github.com/Unknwon/com/dir.go
generated
vendored
Normal file
@@ -0,0 +1,173 @@
|
||||
// Copyright 2013 com authors
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
package com
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"os"
|
||||
"path"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// IsDir returns true if given path is a directory,
|
||||
// or returns false when it's a file or does not exist.
|
||||
func IsDir(dir string) bool {
|
||||
f, e := os.Stat(dir)
|
||||
if e != nil {
|
||||
return false
|
||||
}
|
||||
return f.IsDir()
|
||||
}
|
||||
|
||||
func statDir(dirPath, recPath string, includeDir, isDirOnly bool) ([]string, error) {
|
||||
dir, err := os.Open(dirPath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer dir.Close()
|
||||
|
||||
fis, err := dir.Readdir(0)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
statList := make([]string, 0)
|
||||
for _, fi := range fis {
|
||||
if strings.Contains(fi.Name(), ".DS_Store") {
|
||||
continue
|
||||
}
|
||||
|
||||
relPath := path.Join(recPath, fi.Name())
|
||||
curPath := path.Join(dirPath, fi.Name())
|
||||
if fi.IsDir() {
|
||||
if includeDir {
|
||||
statList = append(statList, relPath+"/")
|
||||
}
|
||||
s, err := statDir(curPath, relPath, includeDir, isDirOnly)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
statList = append(statList, s...)
|
||||
} else if !isDirOnly {
|
||||
statList = append(statList, relPath)
|
||||
}
|
||||
}
|
||||
return statList, nil
|
||||
}
|
||||
|
||||
// StatDir gathers information of given directory by depth-first.
|
||||
// It returns slice of file list and includes subdirectories if enabled;
|
||||
// it returns error and nil slice when error occurs in underlying functions,
|
||||
// or given path is not a directory or does not exist.
|
||||
//
|
||||
// Slice does not include given path itself.
|
||||
// If subdirectories is enabled, they will have suffix '/'.
|
||||
func StatDir(rootPath string, includeDir ...bool) ([]string, error) {
|
||||
if !IsDir(rootPath) {
|
||||
return nil, errors.New("not a directory or does not exist: " + rootPath)
|
||||
}
|
||||
|
||||
isIncludeDir := false
|
||||
if len(includeDir) >= 1 {
|
||||
isIncludeDir = includeDir[0]
|
||||
}
|
||||
return statDir(rootPath, "", isIncludeDir, false)
|
||||
}
|
||||
|
||||
// GetAllSubDirs returns all subdirectories of given root path.
|
||||
// Slice does not include given path itself.
|
||||
func GetAllSubDirs(rootPath string) ([]string, error) {
|
||||
if !IsDir(rootPath) {
|
||||
return nil, errors.New("not a directory or does not exist: " + rootPath)
|
||||
}
|
||||
return statDir(rootPath, "", true, true)
|
||||
}
|
||||
|
||||
// GetFileListBySuffix returns an ordered list of file paths.
|
||||
// It recognize if given path is a file, and don't do recursive find.
|
||||
func GetFileListBySuffix(dirPath, suffix string) ([]string, error) {
|
||||
if !IsExist(dirPath) {
|
||||
return nil, fmt.Errorf("given path does not exist: %s", dirPath)
|
||||
} else if IsFile(dirPath) {
|
||||
return []string{dirPath}, nil
|
||||
}
|
||||
|
||||
// Given path is a directory.
|
||||
dir, err := os.Open(dirPath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
fis, err := dir.Readdir(0)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
files := make([]string, 0, len(fis))
|
||||
for _, fi := range fis {
|
||||
if strings.HasSuffix(fi.Name(), suffix) {
|
||||
files = append(files, path.Join(dirPath, fi.Name()))
|
||||
}
|
||||
}
|
||||
|
||||
return files, nil
|
||||
}
|
||||
|
||||
// CopyDir copy files recursively from source to target directory.
|
||||
//
|
||||
// The filter accepts a function that process the path info.
|
||||
// and should return true for need to filter.
|
||||
//
|
||||
// It returns error when error occurs in underlying functions.
|
||||
func CopyDir(srcPath, destPath string, filters ...func(filePath string) bool) error {
|
||||
// Check if target directory exists.
|
||||
if IsExist(destPath) {
|
||||
return errors.New("file or directory alreay exists: " + destPath)
|
||||
}
|
||||
|
||||
err := os.MkdirAll(destPath, os.ModePerm)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Gather directory info.
|
||||
infos, err := StatDir(srcPath, true)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
var filter func(filePath string) bool
|
||||
if len(filters) > 0 {
|
||||
filter = filters[0]
|
||||
}
|
||||
|
||||
for _, info := range infos {
|
||||
if filter != nil && filter(info) {
|
||||
continue
|
||||
}
|
||||
|
||||
curPath := path.Join(destPath, info)
|
||||
if strings.HasSuffix(info, "/") {
|
||||
err = os.MkdirAll(curPath, os.ModePerm)
|
||||
} else {
|
||||
err = Copy(path.Join(srcPath, info), curPath)
|
||||
}
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
58
Godeps/_workspace/src/github.com/Unknwon/com/dir_test.go
generated
vendored
Normal file
58
Godeps/_workspace/src/github.com/Unknwon/com/dir_test.go
generated
vendored
Normal file
@@ -0,0 +1,58 @@
|
||||
// Copyright 2013 com authors
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
package com
|
||||
|
||||
import (
|
||||
"os"
|
||||
"testing"
|
||||
|
||||
. "github.com/smartystreets/goconvey/convey"
|
||||
)
|
||||
|
||||
func TestIsDir(t *testing.T) {
|
||||
Convey("Check if given path is a directory", t, func() {
|
||||
Convey("Pass a file name", func() {
|
||||
So(IsDir("file.go"), ShouldEqual, false)
|
||||
})
|
||||
Convey("Pass a directory name", func() {
|
||||
So(IsDir("testdata"), ShouldEqual, true)
|
||||
})
|
||||
Convey("Pass a invalid path", func() {
|
||||
So(IsDir("foo"), ShouldEqual, false)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
func TestCopyDir(t *testing.T) {
|
||||
Convey("Items of two slices should be same", t, func() {
|
||||
s1, err := StatDir("testdata", true)
|
||||
So(err, ShouldEqual, nil)
|
||||
|
||||
err = CopyDir("testdata", "testdata2")
|
||||
So(err, ShouldEqual, nil)
|
||||
|
||||
s2, err := StatDir("testdata2", true)
|
||||
os.RemoveAll("testdata2")
|
||||
So(err, ShouldEqual, nil)
|
||||
|
||||
So(CompareSliceStr(s1, s2), ShouldEqual, true)
|
||||
})
|
||||
}
|
||||
|
||||
func BenchmarkIsDir(b *testing.B) {
|
||||
for i := 0; i < b.N; i++ {
|
||||
IsDir("file.go")
|
||||
}
|
||||
}
|
||||
299
Godeps/_workspace/src/github.com/Unknwon/com/example_test.go
generated
vendored
Normal file
299
Godeps/_workspace/src/github.com/Unknwon/com/example_test.go
generated
vendored
Normal file
@@ -0,0 +1,299 @@
|
||||
// Copyright 2013 com authors
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
package com_test
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
|
||||
"github.com/Unknwon/com"
|
||||
)
|
||||
|
||||
// ------------------------------
|
||||
// cmd.go
|
||||
// ------------------------------
|
||||
|
||||
func ExampleColorLogS() {
|
||||
coloredLog := com.ColorLogS(fmt.Sprintf(
|
||||
"[WARN] This is a tesing log that should be colored, path( %s ),"+
|
||||
" highlight # %s #, error [ %s ].",
|
||||
"path to somewhere", "highlighted content", "tesing error"))
|
||||
fmt.Println(coloredLog)
|
||||
}
|
||||
|
||||
func ExampleColorLog() {
|
||||
com.ColorLog(fmt.Sprintf(
|
||||
"[WARN] This is a tesing log that should be colored, path( %s ),"+
|
||||
" highlight # %s #, error [ %s ].",
|
||||
"path to somewhere", "highlighted content", "tesing error"))
|
||||
}
|
||||
|
||||
func ExampleExecCmd() {
|
||||
stdout, stderr, err := com.ExecCmd("go", "help", "get")
|
||||
fmt.Println(stdout, stderr, err)
|
||||
}
|
||||
|
||||
// ------------- END ------------
|
||||
|
||||
// ------------------------------
|
||||
// html.go
|
||||
// ------------------------------
|
||||
|
||||
func ExampleHtml2JS() {
|
||||
htm := "<div id=\"button\" class=\"btn\">Click me</div>\n\r"
|
||||
js := string(com.Html2JS([]byte(htm)))
|
||||
fmt.Println(js)
|
||||
// Output: <div id=\"button\" class=\"btn\">Click me</div>\n
|
||||
}
|
||||
|
||||
// ------------- END ------------
|
||||
|
||||
// ------------------------------
|
||||
// path.go
|
||||
// ------------------------------
|
||||
|
||||
func ExampleGetGOPATHs() {
|
||||
gps := com.GetGOPATHs()
|
||||
fmt.Println(gps)
|
||||
}
|
||||
|
||||
func ExampleGetSrcPath() {
|
||||
srcPath, err := com.GetSrcPath("github.com/Unknwon/com")
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
return
|
||||
}
|
||||
fmt.Println(srcPath)
|
||||
}
|
||||
|
||||
func ExampleHomeDir() {
|
||||
hd, err := com.HomeDir()
|
||||
fmt.Println(hd, err)
|
||||
}
|
||||
|
||||
// ------------- END ------------
|
||||
|
||||
// ------------------------------
|
||||
// file.go
|
||||
// ------------------------------
|
||||
|
||||
func ExampleIsFile() {
|
||||
if com.IsFile("file.go") {
|
||||
fmt.Println("file.go exists")
|
||||
return
|
||||
}
|
||||
fmt.Println("file.go is not a file or does not exist")
|
||||
}
|
||||
|
||||
func ExampleIsExist() {
|
||||
if com.IsExist("file.go") {
|
||||
fmt.Println("file.go exists")
|
||||
return
|
||||
}
|
||||
fmt.Println("file.go does not exist")
|
||||
}
|
||||
|
||||
// ------------- END ------------
|
||||
|
||||
// ------------------------------
|
||||
// dir.go
|
||||
// ------------------------------
|
||||
|
||||
func ExampleIsDir() {
|
||||
if com.IsDir("files") {
|
||||
fmt.Println("directory 'files' exists")
|
||||
return
|
||||
}
|
||||
fmt.Println("'files' is not a directory or does not exist")
|
||||
}
|
||||
|
||||
// ------------- END ------------
|
||||
|
||||
// ------------------------------
|
||||
// string.go
|
||||
// ------------------------------
|
||||
|
||||
func ExampleIsLetter() {
|
||||
fmt.Println(com.IsLetter('1'))
|
||||
fmt.Println(com.IsLetter('['))
|
||||
fmt.Println(com.IsLetter('a'))
|
||||
fmt.Println(com.IsLetter('Z'))
|
||||
// Output:
|
||||
// false
|
||||
// false
|
||||
// true
|
||||
// true
|
||||
}
|
||||
|
||||
func ExampleExpand() {
|
||||
match := map[string]string{
|
||||
"domain": "gowalker.org",
|
||||
"subdomain": "github.com",
|
||||
}
|
||||
s := "http://{domain}/{subdomain}/{0}/{1}"
|
||||
fmt.Println(com.Expand(s, match, "Unknwon", "gowalker"))
|
||||
// Output: http://gowalker.org/github.com/Unknwon/gowalker
|
||||
}
|
||||
|
||||
// ------------- END ------------
|
||||
|
||||
// ------------------------------
|
||||
// http.go
|
||||
// ------------------------------
|
||||
|
||||
func ExampleHttpGet() ([]byte, error) {
|
||||
rc, err := com.HttpGet(&http.Client{}, "http://gowalker.org", nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
p, err := ioutil.ReadAll(rc)
|
||||
rc.Close()
|
||||
return p, err
|
||||
}
|
||||
|
||||
func ExampleHttpGetBytes() ([]byte, error) {
|
||||
p, err := com.HttpGetBytes(&http.Client{}, "http://gowalker.org", nil)
|
||||
return p, err
|
||||
}
|
||||
|
||||
func ExampleHttpGetJSON() interface{} {
|
||||
j := com.HttpGetJSON(&http.Client{}, "http://gowalker.org", nil)
|
||||
return j
|
||||
}
|
||||
|
||||
type rawFile struct {
|
||||
name string
|
||||
rawURL string
|
||||
data []byte
|
||||
}
|
||||
|
||||
func (rf *rawFile) Name() string {
|
||||
return rf.name
|
||||
}
|
||||
|
||||
func (rf *rawFile) RawUrl() string {
|
||||
return rf.rawURL
|
||||
}
|
||||
|
||||
func (rf *rawFile) Data() []byte {
|
||||
return rf.data
|
||||
}
|
||||
|
||||
func (rf *rawFile) SetData(p []byte) {
|
||||
rf.data = p
|
||||
}
|
||||
|
||||
func ExampleFetchFiles() {
|
||||
// Code that should be outside of your function body.
|
||||
// type rawFile struct {
|
||||
// name string
|
||||
// rawURL string
|
||||
// data []byte
|
||||
// }
|
||||
|
||||
// func (rf *rawFile) Name() string {
|
||||
// return rf.name
|
||||
// }
|
||||
|
||||
// func (rf *rawFile) RawUrl() string {
|
||||
// return rf.rawURL
|
||||
// }
|
||||
|
||||
// func (rf *rawFile) Data() []byte {
|
||||
// return rf.data
|
||||
// }
|
||||
|
||||
// func (rf *rawFile) SetData(p []byte) {
|
||||
// rf.data = p
|
||||
// }
|
||||
|
||||
files := []com.RawFile{
|
||||
&rawFile{rawURL: "http://example.com"},
|
||||
&rawFile{rawURL: "http://example.com/foo"},
|
||||
}
|
||||
err := com.FetchFiles(&http.Client{}, files, nil)
|
||||
fmt.Println(err, len(files[0].Data()), len(files[1].Data()))
|
||||
}
|
||||
|
||||
func ExampleFetchFilesCurl() {
|
||||
// Code that should be outside of your function body.
|
||||
// type rawFile struct {
|
||||
// name string
|
||||
// rawURL string
|
||||
// data []byte
|
||||
// }
|
||||
|
||||
// func (rf *rawFile) Name() string {
|
||||
// return rf.name
|
||||
// }
|
||||
|
||||
// func (rf *rawFile) RawUrl() string {
|
||||
// return rf.rawURL
|
||||
// }
|
||||
|
||||
// func (rf *rawFile) Data() []byte {
|
||||
// return rf.data
|
||||
// }
|
||||
|
||||
// func (rf *rawFile) SetData(p []byte) {
|
||||
// rf.data = p
|
||||
// }
|
||||
|
||||
files := []com.RawFile{
|
||||
&rawFile{rawURL: "http://example.com"},
|
||||
&rawFile{rawURL: "http://example.com/foo"},
|
||||
}
|
||||
err := com.FetchFilesCurl(files)
|
||||
fmt.Println(err, len(files[0].Data()), len(files[1].Data()))
|
||||
}
|
||||
|
||||
// ------------- END ------------
|
||||
|
||||
// ------------------------------
|
||||
// regex.go
|
||||
// ------------------------------
|
||||
|
||||
func ExampleIsEmail() {
|
||||
fmt.Println(com.IsEmail("test@example.com"))
|
||||
fmt.Println(com.IsEmail("@example.com"))
|
||||
// Output:
|
||||
// true
|
||||
// false
|
||||
}
|
||||
|
||||
func ExampleIsUrl() {
|
||||
fmt.Println(com.IsUrl("http://example.com"))
|
||||
fmt.Println(com.IsUrl("http//example.com"))
|
||||
// Output:
|
||||
// true
|
||||
// false
|
||||
}
|
||||
|
||||
// ------------- END ------------
|
||||
|
||||
// ------------------------------
|
||||
// slice.go
|
||||
// ------------------------------
|
||||
|
||||
func ExampleAppendStr() {
|
||||
s := []string{"a"}
|
||||
s = com.AppendStr(s, "a")
|
||||
s = com.AppendStr(s, "b")
|
||||
fmt.Println(s)
|
||||
// Output: [a b]
|
||||
}
|
||||
|
||||
// ------------- END ------------
|
||||
145
Godeps/_workspace/src/github.com/Unknwon/com/file.go
generated
vendored
Normal file
145
Godeps/_workspace/src/github.com/Unknwon/com/file.go
generated
vendored
Normal file
@@ -0,0 +1,145 @@
|
||||
// Copyright 2013 com authors
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
package com
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"math"
|
||||
"os"
|
||||
"path"
|
||||
)
|
||||
|
||||
// Storage unit constants.
|
||||
const (
|
||||
Byte = 1
|
||||
KByte = Byte * 1024
|
||||
MByte = KByte * 1024
|
||||
GByte = MByte * 1024
|
||||
TByte = GByte * 1024
|
||||
PByte = TByte * 1024
|
||||
EByte = PByte * 1024
|
||||
)
|
||||
|
||||
func logn(n, b float64) float64 {
|
||||
return math.Log(n) / math.Log(b)
|
||||
}
|
||||
|
||||
func humanateBytes(s uint64, base float64, sizes []string) string {
|
||||
if s < 10 {
|
||||
return fmt.Sprintf("%dB", s)
|
||||
}
|
||||
e := math.Floor(logn(float64(s), base))
|
||||
suffix := sizes[int(e)]
|
||||
val := float64(s) / math.Pow(base, math.Floor(e))
|
||||
f := "%.0f"
|
||||
if val < 10 {
|
||||
f = "%.1f"
|
||||
}
|
||||
|
||||
return fmt.Sprintf(f+"%s", val, suffix)
|
||||
}
|
||||
|
||||
// HumaneFileSize calculates the file size and generate user-friendly string.
|
||||
func HumaneFileSize(s uint64) string {
|
||||
sizes := []string{"B", "KB", "MB", "GB", "TB", "PB", "EB"}
|
||||
return humanateBytes(s, 1024, sizes)
|
||||
}
|
||||
|
||||
// FileMTime returns file modified time and possible error.
|
||||
func FileMTime(file string) (int64, error) {
|
||||
f, err := os.Stat(file)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return f.ModTime().Unix(), nil
|
||||
}
|
||||
|
||||
// FileSize returns file size in bytes and possible error.
|
||||
func FileSize(file string) (int64, error) {
|
||||
f, err := os.Stat(file)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return f.Size(), nil
|
||||
}
|
||||
|
||||
// Copy copies file from source to target path.
|
||||
func Copy(src, dest string) error {
|
||||
// Gather file information to set back later.
|
||||
si, err := os.Lstat(src)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Handle symbolic link.
|
||||
if si.Mode()&os.ModeSymlink != 0 {
|
||||
target, err := os.Readlink(src)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
// NOTE: os.Chmod and os.Chtimes don't recoganize symbolic link,
|
||||
// which will lead "no such file or directory" error.
|
||||
return os.Symlink(target, dest)
|
||||
}
|
||||
|
||||
sr, err := os.Open(src)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer sr.Close()
|
||||
|
||||
dw, err := os.Create(dest)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer dw.Close()
|
||||
|
||||
if _, err = io.Copy(dw, sr); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Set back file information.
|
||||
if err = os.Chtimes(dest, si.ModTime(), si.ModTime()); err != nil {
|
||||
return err
|
||||
}
|
||||
return os.Chmod(dest, si.Mode())
|
||||
}
|
||||
|
||||
// WriteFile writes data to a file named by filename.
|
||||
// If the file does not exist, WriteFile creates it
|
||||
// and its upper level paths.
|
||||
func WriteFile(filename string, data []byte) error {
|
||||
os.MkdirAll(path.Dir(filename), os.ModePerm)
|
||||
return ioutil.WriteFile(filename, data, 0655)
|
||||
}
|
||||
|
||||
// IsFile returns true if given path is a file,
|
||||
// or returns false when it's a directory or does not exist.
|
||||
func IsFile(filePath string) bool {
|
||||
f, e := os.Stat(filePath)
|
||||
if e != nil {
|
||||
return false
|
||||
}
|
||||
return !f.IsDir()
|
||||
}
|
||||
|
||||
// IsExist checks whether a file or directory exists.
|
||||
// It returns false when the file or directory does not exist.
|
||||
func IsExist(path string) bool {
|
||||
_, err := os.Stat(path)
|
||||
return err == nil || os.IsExist(err)
|
||||
}
|
||||
61
Godeps/_workspace/src/github.com/Unknwon/com/file_test.go
generated
vendored
Normal file
61
Godeps/_workspace/src/github.com/Unknwon/com/file_test.go
generated
vendored
Normal file
@@ -0,0 +1,61 @@
|
||||
// Copyright 2013 com authors
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
package com
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
. "github.com/smartystreets/goconvey/convey"
|
||||
)
|
||||
|
||||
func TestIsFile(t *testing.T) {
|
||||
if !IsFile("file.go") {
|
||||
t.Errorf("IsExist:\n Expect => %v\n Got => %v\n", true, false)
|
||||
}
|
||||
|
||||
if IsFile("testdata") {
|
||||
t.Errorf("IsExist:\n Expect => %v\n Got => %v\n", false, true)
|
||||
}
|
||||
|
||||
if IsFile("files.go") {
|
||||
t.Errorf("IsExist:\n Expect => %v\n Got => %v\n", false, true)
|
||||
}
|
||||
}
|
||||
|
||||
func TestIsExist(t *testing.T) {
|
||||
Convey("Check if file or directory exists", t, func() {
|
||||
Convey("Pass a file name that exists", func() {
|
||||
So(IsExist("file.go"), ShouldEqual, true)
|
||||
})
|
||||
Convey("Pass a directory name that exists", func() {
|
||||
So(IsExist("testdata"), ShouldEqual, true)
|
||||
})
|
||||
Convey("Pass a directory name that does not exist", func() {
|
||||
So(IsExist(".hg"), ShouldEqual, false)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
func BenchmarkIsFile(b *testing.B) {
|
||||
for i := 0; i < b.N; i++ {
|
||||
IsFile("file.go")
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkIsExist(b *testing.B) {
|
||||
for i := 0; i < b.N; i++ {
|
||||
IsExist("file.go")
|
||||
}
|
||||
}
|
||||
60
Godeps/_workspace/src/github.com/Unknwon/com/html.go
generated
vendored
Normal file
60
Godeps/_workspace/src/github.com/Unknwon/com/html.go
generated
vendored
Normal file
@@ -0,0 +1,60 @@
|
||||
// Copyright 2013 com authors
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
package com
|
||||
|
||||
import (
|
||||
"html"
|
||||
"regexp"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// Html2JS converts []byte type of HTML content into JS format.
|
||||
func Html2JS(data []byte) []byte {
|
||||
s := string(data)
|
||||
s = strings.Replace(s, `\`, `\\`, -1)
|
||||
s = strings.Replace(s, "\n", `\n`, -1)
|
||||
s = strings.Replace(s, "\r", "", -1)
|
||||
s = strings.Replace(s, "\"", `\"`, -1)
|
||||
s = strings.Replace(s, "<table>", "<table>", -1)
|
||||
return []byte(s)
|
||||
}
|
||||
|
||||
// encode html chars to string
|
||||
func HtmlEncode(str string) string {
|
||||
return html.EscapeString(str)
|
||||
}
|
||||
|
||||
// decode string to html chars
|
||||
func HtmlDecode(str string) string {
|
||||
return html.UnescapeString(str)
|
||||
}
|
||||
|
||||
// strip tags in html string
|
||||
func StripTags(src string) string {
|
||||
//去除style,script,html tag
|
||||
re := regexp.MustCompile(`(?s)<(?:style|script)[^<>]*>.*?</(?:style|script)>|</?[a-z][a-z0-9]*[^<>]*>|<!--.*?-->`)
|
||||
src = re.ReplaceAllString(src, "")
|
||||
|
||||
//trim all spaces(2+) into \n
|
||||
re = regexp.MustCompile(`\s{2,}`)
|
||||
src = re.ReplaceAllString(src, "\n")
|
||||
|
||||
return strings.TrimSpace(src)
|
||||
}
|
||||
|
||||
// change \n to <br/>
|
||||
func Nl2br(str string) string {
|
||||
return strings.Replace(str, "\n", "<br/>", -1)
|
||||
}
|
||||
35
Godeps/_workspace/src/github.com/Unknwon/com/html_test.go
generated
vendored
Normal file
35
Godeps/_workspace/src/github.com/Unknwon/com/html_test.go
generated
vendored
Normal file
@@ -0,0 +1,35 @@
|
||||
// Copyright 2013 com authors
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
package com
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestHtml2JS(t *testing.T) {
|
||||
htm := "<div id=\"button\" class=\"btn\">Click me</div>\n\r"
|
||||
js := string(Html2JS([]byte(htm)))
|
||||
jsR := `<div id=\"button\" class=\"btn\">Click me</div>\n`
|
||||
if js != jsR {
|
||||
t.Errorf("Html2JS:\n Expect => %s\n Got => %s\n", jsR, js)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkHtml2JS(b *testing.B) {
|
||||
htm := "<div id=\"button\" class=\"btn\">Click me</div>\n\r"
|
||||
for i := 0; i < b.N; i++ {
|
||||
Html2JS([]byte(htm))
|
||||
}
|
||||
}
|
||||
201
Godeps/_workspace/src/github.com/Unknwon/com/http.go
generated
vendored
Normal file
201
Godeps/_workspace/src/github.com/Unknwon/com/http.go
generated
vendored
Normal file
@@ -0,0 +1,201 @@
|
||||
// Copyright 2013 com authors
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
package com
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
"os"
|
||||
"path"
|
||||
)
|
||||
|
||||
type NotFoundError struct {
|
||||
Message string
|
||||
}
|
||||
|
||||
func (e NotFoundError) Error() string {
|
||||
return e.Message
|
||||
}
|
||||
|
||||
type RemoteError struct {
|
||||
Host string
|
||||
Err error
|
||||
}
|
||||
|
||||
func (e *RemoteError) Error() string {
|
||||
return e.Err.Error()
|
||||
}
|
||||
|
||||
var UserAgent = "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/29.0.1541.0 Safari/537.36"
|
||||
|
||||
// HttpCall makes HTTP method call.
|
||||
func HttpCall(client *http.Client, method, url string, header http.Header, body io.Reader) (io.ReadCloser, error) {
|
||||
req, err := http.NewRequest(method, url, body)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
req.Header.Set("User-Agent", UserAgent)
|
||||
for k, vs := range header {
|
||||
req.Header[k] = vs
|
||||
}
|
||||
resp, err := client.Do(req)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if resp.StatusCode == 200 {
|
||||
return resp.Body, nil
|
||||
}
|
||||
resp.Body.Close()
|
||||
if resp.StatusCode == 404 { // 403 can be rate limit error. || resp.StatusCode == 403 {
|
||||
err = fmt.Errorf("resource not found: %s", url)
|
||||
} else {
|
||||
err = fmt.Errorf("%s %s -> %d", method, url, resp.StatusCode)
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// HttpGet gets the specified resource.
|
||||
// ErrNotFound is returned if the server responds with status 404.
|
||||
func HttpGet(client *http.Client, url string, header http.Header) (io.ReadCloser, error) {
|
||||
return HttpCall(client, "GET", url, header, nil)
|
||||
}
|
||||
|
||||
// HttpPost posts the specified resource.
|
||||
// ErrNotFound is returned if the server responds with status 404.
|
||||
func HttpPost(client *http.Client, url string, header http.Header, body []byte) (io.ReadCloser, error) {
|
||||
return HttpCall(client, "POST", url, header, bytes.NewBuffer(body))
|
||||
}
|
||||
|
||||
// HttpGetToFile gets the specified resource and writes to file.
|
||||
// ErrNotFound is returned if the server responds with status 404.
|
||||
func HttpGetToFile(client *http.Client, url string, header http.Header, fileName string) error {
|
||||
rc, err := HttpGet(client, url, header)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer rc.Close()
|
||||
|
||||
os.MkdirAll(path.Dir(fileName), os.ModePerm)
|
||||
f, err := os.Create(fileName)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer f.Close()
|
||||
_, err = io.Copy(f, rc)
|
||||
return err
|
||||
}
|
||||
|
||||
// HttpGetBytes gets the specified resource. ErrNotFound is returned if the server
|
||||
// responds with status 404.
|
||||
func HttpGetBytes(client *http.Client, url string, header http.Header) ([]byte, error) {
|
||||
rc, err := HttpGet(client, url, header)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rc.Close()
|
||||
return ioutil.ReadAll(rc)
|
||||
}
|
||||
|
||||
// HttpGetJSON gets the specified resource and mapping to struct.
|
||||
// ErrNotFound is returned if the server responds with status 404.
|
||||
func HttpGetJSON(client *http.Client, url string, v interface{}) error {
|
||||
rc, err := HttpGet(client, url, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer rc.Close()
|
||||
err = json.NewDecoder(rc).Decode(v)
|
||||
if _, ok := err.(*json.SyntaxError); ok {
|
||||
return fmt.Errorf("JSON syntax error at %s", url)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// HttpPostJSON posts the specified resource with struct values,
|
||||
// and maps results to struct.
|
||||
// ErrNotFound is returned if the server responds with status 404.
|
||||
func HttpPostJSON(client *http.Client, url string, body, v interface{}) error {
|
||||
data, err := json.Marshal(body)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
rc, err := HttpPost(client, url, http.Header{"content-type": []string{"application/json"}}, data)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer rc.Close()
|
||||
err = json.NewDecoder(rc).Decode(v)
|
||||
if _, ok := err.(*json.SyntaxError); ok {
|
||||
return fmt.Errorf("JSON syntax error at %s", url)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// A RawFile describes a file that can be downloaded.
|
||||
type RawFile interface {
|
||||
Name() string
|
||||
RawUrl() string
|
||||
Data() []byte
|
||||
SetData([]byte)
|
||||
}
|
||||
|
||||
// FetchFiles fetches files specified by the rawURL field in parallel.
|
||||
func FetchFiles(client *http.Client, files []RawFile, header http.Header) error {
|
||||
ch := make(chan error, len(files))
|
||||
for i := range files {
|
||||
go func(i int) {
|
||||
p, err := HttpGetBytes(client, files[i].RawUrl(), nil)
|
||||
if err != nil {
|
||||
ch <- err
|
||||
return
|
||||
}
|
||||
files[i].SetData(p)
|
||||
ch <- nil
|
||||
}(i)
|
||||
}
|
||||
for _ = range files {
|
||||
if err := <-ch; err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// FetchFiles uses command `curl` to fetch files specified by the rawURL field in parallel.
|
||||
func FetchFilesCurl(files []RawFile, curlOptions ...string) error {
|
||||
ch := make(chan error, len(files))
|
||||
for i := range files {
|
||||
go func(i int) {
|
||||
stdout, _, err := ExecCmd("curl", append(curlOptions, files[i].RawUrl())...)
|
||||
if err != nil {
|
||||
ch <- err
|
||||
return
|
||||
}
|
||||
|
||||
files[i].SetData([]byte(stdout))
|
||||
ch <- nil
|
||||
}(i)
|
||||
}
|
||||
for _ = range files {
|
||||
if err := <-ch; err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
111
Godeps/_workspace/src/github.com/Unknwon/com/http_test.go
generated
vendored
Normal file
111
Godeps/_workspace/src/github.com/Unknwon/com/http_test.go
generated
vendored
Normal file
@@ -0,0 +1,111 @@
|
||||
// Copyright 2013 com authors
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
package com
|
||||
|
||||
import (
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
"strings"
|
||||
"testing"
|
||||
)
|
||||
|
||||
var examplePrefix = `<!doctype html>
|
||||
<html>
|
||||
<head>
|
||||
<title>Example Domain</title>
|
||||
`
|
||||
|
||||
func TestHttpGet(t *testing.T) {
|
||||
// 200.
|
||||
rc, err := HttpGet(&http.Client{}, "http://example.com", nil)
|
||||
if err != nil {
|
||||
t.Fatalf("HttpGet:\n Expect => %v\n Got => %s\n", nil, err)
|
||||
}
|
||||
p, err := ioutil.ReadAll(rc)
|
||||
if err != nil {
|
||||
t.Errorf("HttpGet:\n Expect => %v\n Got => %s\n", nil, err)
|
||||
}
|
||||
s := string(p)
|
||||
if !strings.HasPrefix(s, examplePrefix) {
|
||||
t.Errorf("HttpGet:\n Expect => %s\n Got => %s\n", examplePrefix, s)
|
||||
}
|
||||
}
|
||||
|
||||
func TestHttpGetBytes(t *testing.T) {
|
||||
p, err := HttpGetBytes(&http.Client{}, "http://example.com", nil)
|
||||
if err != nil {
|
||||
t.Errorf("HttpGetBytes:\n Expect => %v\n Got => %s\n", nil, err)
|
||||
}
|
||||
s := string(p)
|
||||
if !strings.HasPrefix(s, examplePrefix) {
|
||||
t.Errorf("HttpGet:\n Expect => %s\n Got => %s\n", examplePrefix, s)
|
||||
}
|
||||
}
|
||||
|
||||
func TestHttpGetJSON(t *testing.T) {
|
||||
|
||||
}
|
||||
|
||||
type rawFile struct {
|
||||
name string
|
||||
rawURL string
|
||||
data []byte
|
||||
}
|
||||
|
||||
func (rf *rawFile) Name() string {
|
||||
return rf.name
|
||||
}
|
||||
|
||||
func (rf *rawFile) RawUrl() string {
|
||||
return rf.rawURL
|
||||
}
|
||||
|
||||
func (rf *rawFile) Data() []byte {
|
||||
return rf.data
|
||||
}
|
||||
|
||||
func (rf *rawFile) SetData(p []byte) {
|
||||
rf.data = p
|
||||
}
|
||||
|
||||
func TestFetchFiles(t *testing.T) {
|
||||
files := []RawFile{
|
||||
&rawFile{rawURL: "http://example.com"},
|
||||
&rawFile{rawURL: "http://example.com"},
|
||||
}
|
||||
err := FetchFiles(&http.Client{}, files, nil)
|
||||
if err != nil {
|
||||
t.Errorf("FetchFiles:\n Expect => %v\n Got => %s\n", nil, err)
|
||||
} else if len(files[0].Data()) != 1270 {
|
||||
t.Errorf("FetchFiles:\n Expect => %d\n Got => %d\n", 1270, len(files[0].Data()))
|
||||
} else if len(files[1].Data()) != 1270 {
|
||||
t.Errorf("FetchFiles:\n Expect => %d\n Got => %d\n", 1270, len(files[1].Data()))
|
||||
}
|
||||
}
|
||||
|
||||
func TestFetchFilesCurl(t *testing.T) {
|
||||
files := []RawFile{
|
||||
&rawFile{rawURL: "http://example.com"},
|
||||
&rawFile{rawURL: "http://example.com"},
|
||||
}
|
||||
err := FetchFilesCurl(files)
|
||||
if err != nil {
|
||||
t.Errorf("FetchFilesCurl:\n Expect => %v\n Got => %s\n", nil, err)
|
||||
} else if len(files[0].Data()) != 1270 {
|
||||
t.Errorf("FetchFilesCurl:\n Expect => %d\n Got => %d\n", 1270, len(files[0].Data()))
|
||||
} else if len(files[1].Data()) != 1270 {
|
||||
t.Errorf("FetchFilesCurl:\n Expect => %d\n Got => %d\n", 1270, len(files[1].Data()))
|
||||
}
|
||||
}
|
||||
24
Godeps/_workspace/src/github.com/Unknwon/com/math.go
generated
vendored
Normal file
24
Godeps/_workspace/src/github.com/Unknwon/com/math.go
generated
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
// Copyright 2014 com authors
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
package com
|
||||
|
||||
// PowInt is int type of math.Pow function.
|
||||
func PowInt(x int, y int) int {
|
||||
num := 1
|
||||
for i := 0; i < y; i++ {
|
||||
num *= x
|
||||
}
|
||||
return num
|
||||
}
|
||||
80
Godeps/_workspace/src/github.com/Unknwon/com/path.go
generated
vendored
Normal file
80
Godeps/_workspace/src/github.com/Unknwon/com/path.go
generated
vendored
Normal file
@@ -0,0 +1,80 @@
|
||||
// Copyright 2013 com authors
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
package com
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// GetGOPATHs returns all paths in GOPATH variable.
|
||||
func GetGOPATHs() []string {
|
||||
gopath := os.Getenv("GOPATH")
|
||||
var paths []string
|
||||
if runtime.GOOS == "windows" {
|
||||
gopath = strings.Replace(gopath, "\\", "/", -1)
|
||||
paths = strings.Split(gopath, ";")
|
||||
} else {
|
||||
paths = strings.Split(gopath, ":")
|
||||
}
|
||||
return paths
|
||||
}
|
||||
|
||||
// GetSrcPath returns app. source code path.
|
||||
// It only works when you have src. folder in GOPATH,
|
||||
// it returns error not able to locate source folder path.
|
||||
func GetSrcPath(importPath string) (appPath string, err error) {
|
||||
paths := GetGOPATHs()
|
||||
for _, p := range paths {
|
||||
if IsExist(p + "/src/" + importPath + "/") {
|
||||
appPath = p + "/src/" + importPath + "/"
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if len(appPath) == 0 {
|
||||
return "", errors.New("Unable to locate source folder path")
|
||||
}
|
||||
|
||||
appPath = filepath.Dir(appPath) + "/"
|
||||
if runtime.GOOS == "windows" {
|
||||
// Replace all '\' to '/'.
|
||||
appPath = strings.Replace(appPath, "\\", "/", -1)
|
||||
}
|
||||
|
||||
return appPath, nil
|
||||
}
|
||||
|
||||
// HomeDir returns path of '~'(in Linux) on Windows,
|
||||
// it returns error when the variable does not exist.
|
||||
func HomeDir() (home string, err error) {
|
||||
if runtime.GOOS == "windows" {
|
||||
home = os.Getenv("HOMEDRIVE") + os.Getenv("HOMEPATH")
|
||||
if home == "" {
|
||||
home = os.Getenv("USERPROFILE")
|
||||
}
|
||||
} else {
|
||||
home = os.Getenv("HOME")
|
||||
}
|
||||
|
||||
if len(home) == 0 {
|
||||
return "", errors.New("Cannot specify home directory because it's empty")
|
||||
}
|
||||
|
||||
return home, nil
|
||||
}
|
||||
67
Godeps/_workspace/src/github.com/Unknwon/com/path_test.go
generated
vendored
Normal file
67
Godeps/_workspace/src/github.com/Unknwon/com/path_test.go
generated
vendored
Normal file
@@ -0,0 +1,67 @@
|
||||
// Copyright 2013 com authors
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
package com
|
||||
|
||||
import (
|
||||
"os"
|
||||
"runtime"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestGetGOPATHs(t *testing.T) {
|
||||
var gpsR []string
|
||||
|
||||
if runtime.GOOS != "windows" {
|
||||
gpsR = []string{"path/to/gopath1", "path/to/gopath2", "path/to/gopath3"}
|
||||
os.Setenv("GOPATH", "path/to/gopath1:path/to/gopath2:path/to/gopath3")
|
||||
} else {
|
||||
gpsR = []string{"path/to/gopath1", "path/to/gopath2", "path/to/gopath3"}
|
||||
os.Setenv("GOPATH", "path\\to\\gopath1;path\\to\\gopath2;path\\to\\gopath3")
|
||||
}
|
||||
|
||||
gps := GetGOPATHs()
|
||||
if !CompareSliceStr(gps, gpsR) {
|
||||
t.Errorf("GetGOPATHs:\n Expect => %s\n Got => %s\n", gpsR, gps)
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetSrcPath(t *testing.T) {
|
||||
|
||||
}
|
||||
|
||||
func TestHomeDir(t *testing.T) {
|
||||
_, err := HomeDir()
|
||||
if err != nil {
|
||||
t.Errorf("HomeDir:\n Expect => %v\n Got => %s\n", nil, err)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkGetGOPATHs(b *testing.B) {
|
||||
for i := 0; i < b.N; i++ {
|
||||
GetGOPATHs()
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkGetSrcPath(b *testing.B) {
|
||||
for i := 0; i < b.N; i++ {
|
||||
GetSrcPath("github.com/Unknwon/com")
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkHomeDir(b *testing.B) {
|
||||
for i := 0; i < b.N; i++ {
|
||||
HomeDir()
|
||||
}
|
||||
}
|
||||
56
Godeps/_workspace/src/github.com/Unknwon/com/regex.go
generated
vendored
Normal file
56
Godeps/_workspace/src/github.com/Unknwon/com/regex.go
generated
vendored
Normal file
@@ -0,0 +1,56 @@
|
||||
// Copyright 2013 com authors
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
package com
|
||||
|
||||
import "regexp"
|
||||
|
||||
const (
|
||||
regex_email_pattern = `(?i)[A-Z0-9._%+-]+@(?:[A-Z0-9-]+\.)+[A-Z]{2,6}`
|
||||
regex_strict_email_pattern = `(?i)[A-Z0-9!#$%&'*+/=?^_{|}~-]+` +
|
||||
`(?:\.[A-Z0-9!#$%&'*+/=?^_{|}~-]+)*` +
|
||||
`@(?:[A-Z0-9](?:[A-Z0-9-]*[A-Z0-9])?\.)+` +
|
||||
`[A-Z0-9](?:[A-Z0-9-]*[A-Z0-9])?`
|
||||
regex_url_pattern = `(ftp|http|https):\/\/(\w+:{0,1}\w*@)?(\S+)(:[0-9]+)?(\/|\/([\w#!:.?+=&%@!\-\/]))?`
|
||||
)
|
||||
|
||||
var (
|
||||
regex_email *regexp.Regexp
|
||||
regex_strict_email *regexp.Regexp
|
||||
regex_url *regexp.Regexp
|
||||
)
|
||||
|
||||
func init() {
|
||||
regex_email = regexp.MustCompile(regex_email_pattern)
|
||||
regex_strict_email = regexp.MustCompile(regex_strict_email_pattern)
|
||||
regex_url = regexp.MustCompile(regex_url_pattern)
|
||||
}
|
||||
|
||||
// validate string is an email address, if not return false
|
||||
// basically validation can match 99% cases
|
||||
func IsEmail(email string) bool {
|
||||
return regex_email.MatchString(email)
|
||||
}
|
||||
|
||||
// validate string is an email address, if not return false
|
||||
// this validation omits RFC 2822
|
||||
func IsEmailRFC(email string) bool {
|
||||
return regex_strict_email.MatchString(email)
|
||||
}
|
||||
|
||||
// validate string is a url link, if not return false
|
||||
// simple validation can match 99% cases
|
||||
func IsUrl(url string) bool {
|
||||
return regex_url.MatchString(url)
|
||||
}
|
||||
70
Godeps/_workspace/src/github.com/Unknwon/com/regex_test.go
generated
vendored
Normal file
70
Godeps/_workspace/src/github.com/Unknwon/com/regex_test.go
generated
vendored
Normal file
@@ -0,0 +1,70 @@
|
||||
// Copyright 2013 com authors
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
package com
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestIsEmail(t *testing.T) {
|
||||
emails := map[string]bool{
|
||||
`test@example.com`: true,
|
||||
`single-character@b.org`: true,
|
||||
`uncommon_address@test.museum`: true,
|
||||
`local@sld.UPPER`: true,
|
||||
`@missing.org`: false,
|
||||
`missing@.com`: false,
|
||||
`missing@qq.`: false,
|
||||
`wrong-ip@127.1.1.1.26`: false,
|
||||
}
|
||||
for e, r := range emails {
|
||||
b := IsEmail(e)
|
||||
if b != r {
|
||||
t.Errorf("IsEmail:\n Expect => %v\n Got => %v\n", r, b)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestIsUrl(t *testing.T) {
|
||||
urls := map[string]bool{
|
||||
"http://www.example.com": true,
|
||||
"http://example.com": true,
|
||||
"http://example.com?user=test&password=test": true,
|
||||
"http://example.com?user=test#login": true,
|
||||
"ftp://example.com": true,
|
||||
"https://example.com": true,
|
||||
"htp://example.com": false,
|
||||
"http//example.com": false,
|
||||
"http://example": true,
|
||||
}
|
||||
for u, r := range urls {
|
||||
b := IsUrl(u)
|
||||
if b != r {
|
||||
t.Errorf("IsUrl:\n Expect => %v\n Got => %v\n", r, b)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkIsEmail(b *testing.B) {
|
||||
for i := 0; i < b.N; i++ {
|
||||
IsEmail("test@example.com")
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkIsUrl(b *testing.B) {
|
||||
for i := 0; i < b.N; i++ {
|
||||
IsEmail("http://example.com")
|
||||
}
|
||||
}
|
||||
87
Godeps/_workspace/src/github.com/Unknwon/com/slice.go
generated
vendored
Normal file
87
Godeps/_workspace/src/github.com/Unknwon/com/slice.go
generated
vendored
Normal file
@@ -0,0 +1,87 @@
|
||||
// Copyright 2013 com authors
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
package com
|
||||
|
||||
import (
|
||||
"strings"
|
||||
)
|
||||
|
||||
// AppendStr appends string to slice with no duplicates.
|
||||
func AppendStr(strs []string, str string) []string {
|
||||
for _, s := range strs {
|
||||
if s == str {
|
||||
return strs
|
||||
}
|
||||
}
|
||||
return append(strs, str)
|
||||
}
|
||||
|
||||
// CompareSliceStr compares two 'string' type slices.
|
||||
// It returns true if elements and order are both the same.
|
||||
func CompareSliceStr(s1, s2 []string) bool {
|
||||
if len(s1) != len(s2) {
|
||||
return false
|
||||
}
|
||||
|
||||
for i := range s1 {
|
||||
if s1[i] != s2[i] {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
// CompareSliceStr compares two 'string' type slices.
|
||||
// It returns true if elements are the same, and ignores the order.
|
||||
func CompareSliceStrU(s1, s2 []string) bool {
|
||||
if len(s1) != len(s2) {
|
||||
return false
|
||||
}
|
||||
|
||||
for i := range s1 {
|
||||
for j := len(s2) - 1; j >= 0; j-- {
|
||||
if s1[i] == s2[j] {
|
||||
s2 = append(s2[:j], s2[j+1:]...)
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
if len(s2) > 0 {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// IsSliceContainsStr returns true if the string exists in given slice, ignore case.
|
||||
func IsSliceContainsStr(sl []string, str string) bool {
|
||||
str = strings.ToLower(str)
|
||||
for _, s := range sl {
|
||||
if strings.ToLower(s) == str {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// IsSliceContainsInt64 returns true if the int64 exists in given slice.
|
||||
func IsSliceContainsInt64(sl []int64, i int64) bool {
|
||||
for _, s := range sl {
|
||||
if s == i {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
99
Godeps/_workspace/src/github.com/Unknwon/com/slice_test.go
generated
vendored
Normal file
99
Godeps/_workspace/src/github.com/Unknwon/com/slice_test.go
generated
vendored
Normal file
@@ -0,0 +1,99 @@
|
||||
// Copyright 2013 com authors
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
package com
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"testing"
|
||||
|
||||
. "github.com/smartystreets/goconvey/convey"
|
||||
)
|
||||
|
||||
func TestAppendStr(t *testing.T) {
|
||||
Convey("Append a string to a slice with no duplicates", t, func() {
|
||||
s := []string{"a"}
|
||||
|
||||
Convey("Append a string that does not exist in slice", func() {
|
||||
s = AppendStr(s, "b")
|
||||
So(len(s), ShouldEqual, 2)
|
||||
})
|
||||
|
||||
Convey("Append a string that does exist in slice", func() {
|
||||
s = AppendStr(s, "b")
|
||||
So(len(s), ShouldEqual, 2)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
func TestCompareSliceStr(t *testing.T) {
|
||||
Convey("Compares two 'string' type slices with elements and order", t, func() {
|
||||
Convey("Compare two slices that do have same elements and order", func() {
|
||||
So(CompareSliceStr(
|
||||
[]string{"1", "2", "3"}, []string{"1", "2", "3"}), ShouldBeTrue)
|
||||
})
|
||||
|
||||
Convey("Compare two slices that do have same elements but does not have same order", func() {
|
||||
So(!CompareSliceStr(
|
||||
[]string{"2", "1", "3"}, []string{"1", "2", "3"}), ShouldBeTrue)
|
||||
})
|
||||
|
||||
Convey("Compare two slices that have different number of elements", func() {
|
||||
So(!CompareSliceStr(
|
||||
[]string{"2", "1"}, []string{"1", "2", "3"}), ShouldBeTrue)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
func TestCompareSliceStrU(t *testing.T) {
|
||||
Convey("Compare two 'string' type slices with elements and ignore the order", t, func() {
|
||||
Convey("Compare two slices that do have same elements and order", func() {
|
||||
So(CompareSliceStrU(
|
||||
[]string{"1", "2", "3"}, []string{"1", "2", "3"}), ShouldBeTrue)
|
||||
})
|
||||
|
||||
Convey("Compare two slices that do have same elements but does not have same order", func() {
|
||||
So(CompareSliceStrU(
|
||||
[]string{"2", "1", "3"}, []string{"1", "2", "3"}), ShouldBeTrue)
|
||||
})
|
||||
|
||||
Convey("Compare two slices that have different number of elements", func() {
|
||||
So(!CompareSliceStrU(
|
||||
[]string{"2", "1"}, []string{"1", "2", "3"}), ShouldBeTrue)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
func BenchmarkAppendStr(b *testing.B) {
|
||||
s := []string{"a"}
|
||||
for i := 0; i < b.N; i++ {
|
||||
s = AppendStr(s, fmt.Sprint(b.N%3))
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkCompareSliceStr(b *testing.B) {
|
||||
s1 := []string{"1", "2", "3"}
|
||||
s2 := []string{"1", "2", "3"}
|
||||
for i := 0; i < b.N; i++ {
|
||||
CompareSliceStr(s1, s2)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkCompareSliceStrU(b *testing.B) {
|
||||
s1 := []string{"1", "4", "2", "3"}
|
||||
s2 := []string{"1", "2", "3", "4"}
|
||||
for i := 0; i < b.N; i++ {
|
||||
CompareSliceStrU(s1, s2)
|
||||
}
|
||||
}
|
||||
140
Godeps/_workspace/src/github.com/Unknwon/com/string.go
generated
vendored
Normal file
140
Godeps/_workspace/src/github.com/Unknwon/com/string.go
generated
vendored
Normal file
@@ -0,0 +1,140 @@
|
||||
// Copyright 2013 com authors
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
package com
|
||||
|
||||
import (
|
||||
"crypto/aes"
|
||||
"crypto/cipher"
|
||||
"crypto/rand"
|
||||
"encoding/base64"
|
||||
"errors"
|
||||
"io"
|
||||
r "math/rand"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
// AESEncrypt encrypts text and given key with AES.
|
||||
func AESEncrypt(key, text []byte) ([]byte, error) {
|
||||
block, err := aes.NewCipher(key)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
b := base64.StdEncoding.EncodeToString(text)
|
||||
ciphertext := make([]byte, aes.BlockSize+len(b))
|
||||
iv := ciphertext[:aes.BlockSize]
|
||||
if _, err := io.ReadFull(rand.Reader, iv); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
cfb := cipher.NewCFBEncrypter(block, iv)
|
||||
cfb.XORKeyStream(ciphertext[aes.BlockSize:], []byte(b))
|
||||
return ciphertext, nil
|
||||
}
|
||||
|
||||
// AESDecrypt decrypts text and given key with AES.
|
||||
func AESDecrypt(key, text []byte) ([]byte, error) {
|
||||
block, err := aes.NewCipher(key)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if len(text) < aes.BlockSize {
|
||||
return nil, errors.New("ciphertext too short")
|
||||
}
|
||||
iv := text[:aes.BlockSize]
|
||||
text = text[aes.BlockSize:]
|
||||
cfb := cipher.NewCFBDecrypter(block, iv)
|
||||
cfb.XORKeyStream(text, text)
|
||||
data, err := base64.StdEncoding.DecodeString(string(text))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return data, nil
|
||||
}
|
||||
|
||||
// IsLetter returns true if the 'l' is an English letter.
|
||||
func IsLetter(l uint8) bool {
|
||||
n := (l | 0x20) - 'a'
|
||||
if n >= 0 && n < 26 {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// Expand replaces {k} in template with match[k] or subs[atoi(k)] if k is not in match.
|
||||
func Expand(template string, match map[string]string, subs ...string) string {
|
||||
var p []byte
|
||||
var i int
|
||||
for {
|
||||
i = strings.Index(template, "{")
|
||||
if i < 0 {
|
||||
break
|
||||
}
|
||||
p = append(p, template[:i]...)
|
||||
template = template[i+1:]
|
||||
i = strings.Index(template, "}")
|
||||
if s, ok := match[template[:i]]; ok {
|
||||
p = append(p, s...)
|
||||
} else {
|
||||
j, _ := strconv.Atoi(template[:i])
|
||||
if j >= len(subs) {
|
||||
p = append(p, []byte("Missing")...)
|
||||
} else {
|
||||
p = append(p, subs[j]...)
|
||||
}
|
||||
}
|
||||
template = template[i+1:]
|
||||
}
|
||||
p = append(p, template...)
|
||||
return string(p)
|
||||
}
|
||||
|
||||
// Reverse s string, support unicode
|
||||
func Reverse(s string) string {
|
||||
n := len(s)
|
||||
runes := make([]rune, n)
|
||||
for _, rune := range s {
|
||||
n--
|
||||
runes[n] = rune
|
||||
}
|
||||
return string(runes[n:])
|
||||
}
|
||||
|
||||
// RandomCreateBytes generate random []byte by specify chars.
|
||||
func RandomCreateBytes(n int, alphabets ...byte) []byte {
|
||||
const alphanum = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"
|
||||
var bytes = make([]byte, n)
|
||||
var randby bool
|
||||
if num, err := rand.Read(bytes); num != n || err != nil {
|
||||
r.Seed(time.Now().UnixNano())
|
||||
randby = true
|
||||
}
|
||||
for i, b := range bytes {
|
||||
if len(alphabets) == 0 {
|
||||
if randby {
|
||||
bytes[i] = alphanum[r.Intn(len(alphanum))]
|
||||
} else {
|
||||
bytes[i] = alphanum[b%byte(len(alphanum))]
|
||||
}
|
||||
} else {
|
||||
if randby {
|
||||
bytes[i] = alphabets[r.Intn(len(alphabets))]
|
||||
} else {
|
||||
bytes[i] = alphabets[b%byte(len(alphabets))]
|
||||
}
|
||||
}
|
||||
}
|
||||
return bytes
|
||||
}
|
||||
82
Godeps/_workspace/src/github.com/Unknwon/com/string_test.go
generated
vendored
Normal file
82
Godeps/_workspace/src/github.com/Unknwon/com/string_test.go
generated
vendored
Normal file
@@ -0,0 +1,82 @@
|
||||
// Copyright 2013 com authors
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
package com
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestIsLetter(t *testing.T) {
|
||||
if IsLetter('1') {
|
||||
t.Errorf("IsLetter:\n Expect => %v\n Got => %v\n", false, true)
|
||||
}
|
||||
|
||||
if IsLetter('[') {
|
||||
t.Errorf("IsLetter:\n Expect => %v\n Got => %v\n", false, true)
|
||||
}
|
||||
|
||||
if !IsLetter('a') {
|
||||
t.Errorf("IsLetter:\n Expect => %v\n Got => %v\n", true, false)
|
||||
}
|
||||
|
||||
if !IsLetter('Z') {
|
||||
t.Errorf("IsLetter:\n Expect => %v\n Got => %v\n", true, false)
|
||||
}
|
||||
}
|
||||
|
||||
func TestExpand(t *testing.T) {
|
||||
match := map[string]string{
|
||||
"domain": "gowalker.org",
|
||||
"subdomain": "github.com",
|
||||
}
|
||||
s := "http://{domain}/{subdomain}/{0}/{1}"
|
||||
sR := "http://gowalker.org/github.com/Unknwon/gowalker"
|
||||
if Expand(s, match, "Unknwon", "gowalker") != sR {
|
||||
t.Errorf("Expand:\n Expect => %s\n Got => %s\n", sR, s)
|
||||
}
|
||||
}
|
||||
|
||||
func TestReverse(t *testing.T) {
|
||||
if Reverse("abcdefg") != "gfedcba" {
|
||||
t.Errorf("Reverse:\n Except => %s\n Got =>%s\n", "gfedcba", Reverse("abcdefg"))
|
||||
}
|
||||
if Reverse("上善若水厚德载物") != "物载德厚水若善上" {
|
||||
t.Errorf("Reverse:\n Except => %s\n Got =>%s\n", "物载德厚水若善上", Reverse("上善若水厚德载物"))
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkIsLetter(b *testing.B) {
|
||||
for i := 0; i < b.N; i++ {
|
||||
IsLetter('a')
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkExpand(b *testing.B) {
|
||||
match := map[string]string{
|
||||
"domain": "gowalker.org",
|
||||
"subdomain": "github.com",
|
||||
}
|
||||
s := "http://{domain}/{subdomain}/{0}/{1}"
|
||||
for i := 0; i < b.N; i++ {
|
||||
Expand(s, match, "Unknwon", "gowalker")
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkReverse(b *testing.B) {
|
||||
s := "abscef中文"
|
||||
for i := 0; i < b.N; i++ {
|
||||
Reverse(s)
|
||||
}
|
||||
}
|
||||
1
Godeps/_workspace/src/github.com/Unknwon/com/testdata/SaveFile.txt
generated
vendored
Normal file
1
Godeps/_workspace/src/github.com/Unknwon/com/testdata/SaveFile.txt
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
TestSaveFile
|
||||
1
Godeps/_workspace/src/github.com/Unknwon/com/testdata/SaveFileS.txt
generated
vendored
Normal file
1
Godeps/_workspace/src/github.com/Unknwon/com/testdata/SaveFileS.txt
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
TestSaveFileS
|
||||
0
Godeps/_workspace/src/github.com/Unknwon/com/testdata/sample_file.txt
generated
vendored
Normal file
0
Godeps/_workspace/src/github.com/Unknwon/com/testdata/sample_file.txt
generated
vendored
Normal file
1
Godeps/_workspace/src/github.com/Unknwon/com/testdata/statDir/SaveFile.txt
generated
vendored
Normal file
1
Godeps/_workspace/src/github.com/Unknwon/com/testdata/statDir/SaveFile.txt
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
TestSaveFile
|
||||
1
Godeps/_workspace/src/github.com/Unknwon/com/testdata/statDir/SaveFileS.txt
generated
vendored
Normal file
1
Godeps/_workspace/src/github.com/Unknwon/com/testdata/statDir/SaveFileS.txt
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
TestSaveFileS
|
||||
0
Godeps/_workspace/src/github.com/Unknwon/com/testdata/statDir/sample_file.txt
generated
vendored
Normal file
0
Godeps/_workspace/src/github.com/Unknwon/com/testdata/statDir/sample_file.txt
generated
vendored
Normal file
1
Godeps/_workspace/src/github.com/Unknwon/com/testdata/statDir/secondLevel/SaveFile.txt
generated
vendored
Normal file
1
Godeps/_workspace/src/github.com/Unknwon/com/testdata/statDir/secondLevel/SaveFile.txt
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
TestSaveFile
|
||||
1
Godeps/_workspace/src/github.com/Unknwon/com/testdata/statDir/secondLevel/SaveFileS.txt
generated
vendored
Normal file
1
Godeps/_workspace/src/github.com/Unknwon/com/testdata/statDir/secondLevel/SaveFileS.txt
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
TestSaveFileS
|
||||
0
Godeps/_workspace/src/github.com/Unknwon/com/testdata/statDir/secondLevel/sample_file.txt
generated
vendored
Normal file
0
Godeps/_workspace/src/github.com/Unknwon/com/testdata/statDir/secondLevel/sample_file.txt
generated
vendored
Normal file
115
Godeps/_workspace/src/github.com/Unknwon/com/time.go
generated
vendored
Normal file
115
Godeps/_workspace/src/github.com/Unknwon/com/time.go
generated
vendored
Normal file
@@ -0,0 +1,115 @@
|
||||
// Copyright 2013 com authors
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
package com
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
// Format unix time int64 to string
|
||||
func Date(ti int64, format string) string {
|
||||
t := time.Unix(int64(ti), 0)
|
||||
return DateT(t, format)
|
||||
}
|
||||
|
||||
// Format unix time string to string
|
||||
func DateS(ts string, format string) string {
|
||||
i, _ := strconv.ParseInt(ts, 10, 64)
|
||||
return Date(i, format)
|
||||
}
|
||||
|
||||
// Format time.Time struct to string
|
||||
// MM - month - 01
|
||||
// M - month - 1, single bit
|
||||
// DD - day - 02
|
||||
// D - day 2
|
||||
// YYYY - year - 2006
|
||||
// YY - year - 06
|
||||
// HH - 24 hours - 03
|
||||
// H - 24 hours - 3
|
||||
// hh - 12 hours - 03
|
||||
// h - 12 hours - 3
|
||||
// mm - minute - 04
|
||||
// m - minute - 4
|
||||
// ss - second - 05
|
||||
// s - second = 5
|
||||
func DateT(t time.Time, format string) string {
|
||||
res := strings.Replace(format, "MM", t.Format("01"), -1)
|
||||
res = strings.Replace(res, "M", t.Format("1"), -1)
|
||||
res = strings.Replace(res, "DD", t.Format("02"), -1)
|
||||
res = strings.Replace(res, "D", t.Format("2"), -1)
|
||||
res = strings.Replace(res, "YYYY", t.Format("2006"), -1)
|
||||
res = strings.Replace(res, "YY", t.Format("06"), -1)
|
||||
res = strings.Replace(res, "HH", fmt.Sprintf("%02d", t.Hour()), -1)
|
||||
res = strings.Replace(res, "H", fmt.Sprintf("%d", t.Hour()), -1)
|
||||
res = strings.Replace(res, "hh", t.Format("03"), -1)
|
||||
res = strings.Replace(res, "h", t.Format("3"), -1)
|
||||
res = strings.Replace(res, "mm", t.Format("04"), -1)
|
||||
res = strings.Replace(res, "m", t.Format("4"), -1)
|
||||
res = strings.Replace(res, "ss", t.Format("05"), -1)
|
||||
res = strings.Replace(res, "s", t.Format("5"), -1)
|
||||
return res
|
||||
}
|
||||
|
||||
// DateFormat pattern rules.
|
||||
var datePatterns = []string{
|
||||
// year
|
||||
"Y", "2006", // A full numeric representation of a year, 4 digits Examples: 1999 or 2003
|
||||
"y", "06", //A two digit representation of a year Examples: 99 or 03
|
||||
|
||||
// month
|
||||
"m", "01", // Numeric representation of a month, with leading zeros 01 through 12
|
||||
"n", "1", // Numeric representation of a month, without leading zeros 1 through 12
|
||||
"M", "Jan", // A short textual representation of a month, three letters Jan through Dec
|
||||
"F", "January", // A full textual representation of a month, such as January or March January through December
|
||||
|
||||
// day
|
||||
"d", "02", // Day of the month, 2 digits with leading zeros 01 to 31
|
||||
"j", "2", // Day of the month without leading zeros 1 to 31
|
||||
|
||||
// week
|
||||
"D", "Mon", // A textual representation of a day, three letters Mon through Sun
|
||||
"l", "Monday", // A full textual representation of the day of the week Sunday through Saturday
|
||||
|
||||
// time
|
||||
"g", "3", // 12-hour format of an hour without leading zeros 1 through 12
|
||||
"G", "15", // 24-hour format of an hour without leading zeros 0 through 23
|
||||
"h", "03", // 12-hour format of an hour with leading zeros 01 through 12
|
||||
"H", "15", // 24-hour format of an hour with leading zeros 00 through 23
|
||||
|
||||
"a", "pm", // Lowercase Ante meridiem and Post meridiem am or pm
|
||||
"A", "PM", // Uppercase Ante meridiem and Post meridiem AM or PM
|
||||
|
||||
"i", "04", // Minutes with leading zeros 00 to 59
|
||||
"s", "05", // Seconds, with leading zeros 00 through 59
|
||||
|
||||
// time zone
|
||||
"T", "MST",
|
||||
"P", "-07:00",
|
||||
"O", "-0700",
|
||||
|
||||
// RFC 2822
|
||||
"r", time.RFC1123Z,
|
||||
}
|
||||
|
||||
// Parse Date use PHP time format.
|
||||
func DateParse(dateString, format string) (time.Time, error) {
|
||||
replacer := strings.NewReplacer(datePatterns...)
|
||||
format = replacer.Replace(format)
|
||||
return time.ParseInLocation(format, dateString, time.Local)
|
||||
}
|
||||
41
Godeps/_workspace/src/github.com/Unknwon/com/url.go
generated
vendored
Normal file
41
Godeps/_workspace/src/github.com/Unknwon/com/url.go
generated
vendored
Normal file
@@ -0,0 +1,41 @@
|
||||
// Copyright 2013 com authors
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
package com
|
||||
|
||||
import (
|
||||
"encoding/base64"
|
||||
"net/url"
|
||||
)
|
||||
|
||||
// url encode string, is + not %20
|
||||
func UrlEncode(str string) string {
|
||||
return url.QueryEscape(str)
|
||||
}
|
||||
|
||||
// url decode string
|
||||
func UrlDecode(str string) (string, error) {
|
||||
return url.QueryUnescape(str)
|
||||
}
|
||||
|
||||
// base64 encode
|
||||
func Base64Encode(str string) string {
|
||||
return base64.StdEncoding.EncodeToString([]byte(str))
|
||||
}
|
||||
|
||||
// base64 decode
|
||||
func Base64Decode(str string) (string, error) {
|
||||
s, e := base64.StdEncoding.DecodeString(str)
|
||||
return string(s), e
|
||||
}
|
||||
2
Godeps/_workspace/src/github.com/Unknwon/macaron/.gitignore
generated
vendored
Normal file
2
Godeps/_workspace/src/github.com/Unknwon/macaron/.gitignore
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
macaron.sublime-project
|
||||
macaron.sublime-workspace
|
||||
191
Godeps/_workspace/src/github.com/Unknwon/macaron/LICENSE
generated
vendored
Normal file
191
Godeps/_workspace/src/github.com/Unknwon/macaron/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,191 @@
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction, and
|
||||
distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by the copyright
|
||||
owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all other entities
|
||||
that control, are controlled by, or are under common control with that entity.
|
||||
For the purposes of this definition, "control" means (i) the power, direct or
|
||||
indirect, to cause the direction or management of such entity, whether by
|
||||
contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity exercising
|
||||
permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications, including
|
||||
but not limited to software source code, documentation source, and configuration
|
||||
files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical transformation or
|
||||
translation of a Source form, including but not limited to compiled object code,
|
||||
generated documentation, and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or Object form, made
|
||||
available under the License, as indicated by a copyright notice that is included
|
||||
in or attached to the work (an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object form, that
|
||||
is based on (or derived from) the Work and for which the editorial revisions,
|
||||
annotations, elaborations, or other modifications represent, as a whole, an
|
||||
original work of authorship. For the purposes of this License, Derivative Works
|
||||
shall not include works that remain separable from, or merely link (or bind by
|
||||
name) to the interfaces of, the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including the original version
|
||||
of the Work and any modifications or additions to that Work or Derivative Works
|
||||
thereof, that is intentionally submitted to Licensor for inclusion in the Work
|
||||
by the copyright owner or by an individual or Legal Entity authorized to submit
|
||||
on behalf of the copyright owner. For the purposes of this definition,
|
||||
"submitted" means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems, and
|
||||
issue tracking systems that are managed by, or on behalf of, the Licensor for
|
||||
the purpose of discussing and improving the Work, but excluding communication
|
||||
that is conspicuously marked or otherwise designated in writing by the copyright
|
||||
owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity on behalf
|
||||
of whom a Contribution has been received by Licensor and subsequently
|
||||
incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License.
|
||||
|
||||
Subject to the terms and conditions of this License, each Contributor hereby
|
||||
grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free,
|
||||
irrevocable copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the Work and such
|
||||
Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License.
|
||||
|
||||
Subject to the terms and conditions of this License, each Contributor hereby
|
||||
grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free,
|
||||
irrevocable (except as stated in this section) patent license to make, have
|
||||
made, use, offer to sell, sell, import, and otherwise transfer the Work, where
|
||||
such license applies only to those patent claims licensable by such Contributor
|
||||
that are necessarily infringed by their Contribution(s) alone or by combination
|
||||
of their Contribution(s) with the Work to which such Contribution(s) was
|
||||
submitted. If You institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work or a
|
||||
Contribution incorporated within the Work constitutes direct or contributory
|
||||
patent infringement, then any patent licenses granted to You under this License
|
||||
for that Work shall terminate as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution.
|
||||
|
||||
You may reproduce and distribute copies of the Work or Derivative Works thereof
|
||||
in any medium, with or without modifications, and in Source or Object form,
|
||||
provided that You meet the following conditions:
|
||||
|
||||
You must give any other recipients of the Work or Derivative Works a copy of
|
||||
this License; and
|
||||
You must cause any modified files to carry prominent notices stating that You
|
||||
changed the files; and
|
||||
You must retain, in the Source form of any Derivative Works that You distribute,
|
||||
all copyright, patent, trademark, and attribution notices from the Source form
|
||||
of the Work, excluding those notices that do not pertain to any part of the
|
||||
Derivative Works; and
|
||||
If the Work includes a "NOTICE" text file as part of its distribution, then any
|
||||
Derivative Works that You distribute must include a readable copy of the
|
||||
attribution notices contained within such NOTICE file, excluding those notices
|
||||
that do not pertain to any part of the Derivative Works, in at least one of the
|
||||
following places: within a NOTICE text file distributed as part of the
|
||||
Derivative Works; within the Source form or documentation, if provided along
|
||||
with the Derivative Works; or, within a display generated by the Derivative
|
||||
Works, if and wherever such third-party notices normally appear. The contents of
|
||||
the NOTICE file are for informational purposes only and do not modify the
|
||||
License. You may add Your own attribution notices within Derivative Works that
|
||||
You distribute, alongside or as an addendum to the NOTICE text from the Work,
|
||||
provided that such additional attribution notices cannot be construed as
|
||||
modifying the License.
|
||||
You may add Your own copyright statement to Your modifications and may provide
|
||||
additional or different license terms and conditions for use, reproduction, or
|
||||
distribution of Your modifications, or for any such Derivative Works as a whole,
|
||||
provided Your use, reproduction, and distribution of the Work otherwise complies
|
||||
with the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions.
|
||||
|
||||
Unless You explicitly state otherwise, any Contribution intentionally submitted
|
||||
for inclusion in the Work by You to the Licensor shall be under the terms and
|
||||
conditions of this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify the terms of
|
||||
any separate license agreement you may have executed with Licensor regarding
|
||||
such Contributions.
|
||||
|
||||
6. Trademarks.
|
||||
|
||||
This License does not grant permission to use the trade names, trademarks,
|
||||
service marks, or product names of the Licensor, except as required for
|
||||
reasonable and customary use in describing the origin of the Work and
|
||||
reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty.
|
||||
|
||||
Unless required by applicable law or agreed to in writing, Licensor provides the
|
||||
Work (and each Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied,
|
||||
including, without limitation, any warranties or conditions of TITLE,
|
||||
NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are
|
||||
solely responsible for determining the appropriateness of using or
|
||||
redistributing the Work and assume any risks associated with Your exercise of
|
||||
permissions under this License.
|
||||
|
||||
8. Limitation of Liability.
|
||||
|
||||
In no event and under no legal theory, whether in tort (including negligence),
|
||||
contract, or otherwise, unless required by applicable law (such as deliberate
|
||||
and grossly negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special, incidental,
|
||||
or consequential damages of any character arising as a result of this License or
|
||||
out of the use or inability to use the Work (including but not limited to
|
||||
damages for loss of goodwill, work stoppage, computer failure or malfunction, or
|
||||
any and all other commercial damages or losses), even if such Contributor has
|
||||
been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability.
|
||||
|
||||
While redistributing the Work or Derivative Works thereof, You may choose to
|
||||
offer, and charge a fee for, acceptance of support, warranty, indemnity, or
|
||||
other liability obligations and/or rights consistent with this License. However,
|
||||
in accepting such obligations, You may act only on Your own behalf and on Your
|
||||
sole responsibility, not on behalf of any other Contributor, and only if You
|
||||
agree to indemnify, defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason of your
|
||||
accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work
|
||||
|
||||
To apply the Apache License to your work, attach the following boilerplate
|
||||
notice, with the fields enclosed by brackets "[]" replaced with your own
|
||||
identifying information. (Don't include the brackets!) The text should be
|
||||
enclosed in the appropriate comment syntax for the file format. We also
|
||||
recommend that a file or class name and description of purpose be included on
|
||||
the same "printed page" as the copyright notice for easier identification within
|
||||
third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
94
Godeps/_workspace/src/github.com/Unknwon/macaron/README.md
generated
vendored
Normal file
94
Godeps/_workspace/src/github.com/Unknwon/macaron/README.md
generated
vendored
Normal file
@@ -0,0 +1,94 @@
|
||||
Macaron [](https://drone.io/github.com/Unknwon/macaron/latest) [](http://gocover.io/github.com/Unknwon/macaron)
|
||||
=======================
|
||||
|
||||

|
||||
|
||||
Package macaron is a high productive and modular design web framework in Go.
|
||||
|
||||
##### Current version: 0.5.4
|
||||
|
||||
## Getting Started
|
||||
|
||||
To install Macaron:
|
||||
|
||||
go get github.com/Unknwon/macaron
|
||||
|
||||
The very basic usage of Macaron:
|
||||
|
||||
```go
|
||||
package main
|
||||
|
||||
import "github.com/Unknwon/macaron"
|
||||
|
||||
func main() {
|
||||
m := macaron.Classic()
|
||||
m.Get("/", func() string {
|
||||
return "Hello world!"
|
||||
})
|
||||
m.Run()
|
||||
}
|
||||
```
|
||||
|
||||
## Features
|
||||
|
||||
- Powerful routing with suburl.
|
||||
- Flexible routes combinations.
|
||||
- Unlimited nested group routers.
|
||||
- Directly integrate with existing services.
|
||||
- Dynamically change template files at runtime.
|
||||
- Allow to use in-memory template and static files.
|
||||
- Easy to plugin/unplugin features with modular design.
|
||||
- Handy dependency injection powered by [inject](https://github.com/codegangsta/inject).
|
||||
- Better router layer and less reflection make faster speed.
|
||||
|
||||
## Middlewares
|
||||
|
||||
Middlewares allow you easily plugin/unplugin features for your Macaron applications.
|
||||
|
||||
There are already many [middlewares](https://github.com/macaron-contrib) to simplify your work:
|
||||
|
||||
- gzip - Gzip compression to all requests
|
||||
- render - Go template engine
|
||||
- static - Serves static files
|
||||
- [binding](https://github.com/macaron-contrib/binding) - Request data binding and validation
|
||||
- [i18n](https://github.com/macaron-contrib/i18n) - Internationalization and Localization
|
||||
- [cache](https://github.com/macaron-contrib/cache) - Cache manager
|
||||
- [session](https://github.com/macaron-contrib/session) - Session manager
|
||||
- [csrf](https://github.com/macaron-contrib/csrf) - Generates and validates csrf tokens
|
||||
- [captcha](https://github.com/macaron-contrib/captcha) - Captcha service
|
||||
- [pongo2](https://github.com/macaron-contrib/pongo2) - Pongo2 template engine support
|
||||
- [sockets](https://github.com/macaron-contrib/sockets) - WebSockets channels binding
|
||||
- [bindata](https://github.com/macaron-contrib/bindata) - Embed binary data as static and template files
|
||||
- [toolbox](https://github.com/macaron-contrib/toolbox) - Health check, pprof, profile and statistic services
|
||||
- [oauth2](https://github.com/macaron-contrib/oauth2) - OAuth 2.0 backend
|
||||
- [switcher](https://github.com/macaron-contrib/switcher) - Multiple-site support
|
||||
- [method](https://github.com/macaron-contrib/method) - HTTP method override
|
||||
- [permissions2](https://github.com/xyproto/permissions2) - Cookies, users and permissions
|
||||
- [renders](https://github.com/macaron-contrib/renders) - Beego-like render engine(Macaron has built-in template engine, this is another option)
|
||||
|
||||
## Use Cases
|
||||
|
||||
- [Gogs](https://github.com/gogits/gogs): Go Git Service
|
||||
- [Gogs Web](https://github.com/gogits/gogsweb): Gogs official website
|
||||
- [Go Walker](https://gowalker.org): Go online API documentation
|
||||
- [Switch](https://github.com/gpmgo/switch): Gopm registry
|
||||
- [YouGam](http://yougam.com): Online Forum
|
||||
- [Car Girl](http://qcnl.gzsy.com/): Online campaign
|
||||
- [Critical Stack Intel](https://intel.criticalstack.com/): A 100% free intel marketplace from Critical Stack, Inc.
|
||||
|
||||
## Getting Help
|
||||
|
||||
- [API Reference](https://gowalker.org/github.com/Unknwon/macaron)
|
||||
- [Documentation](http://macaron.gogs.io)
|
||||
- [FAQs](http://macaron.gogs.io/docs/faqs)
|
||||
- [](https://gitter.im/Unknwon/macaron?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
|
||||
|
||||
## Credits
|
||||
|
||||
- Basic design of [Martini](https://github.com/go-martini/martini).
|
||||
- Router layer of [beego](https://github.com/astaxie/beego).
|
||||
- Logo is modified by [@insionng](https://github.com/insionng) based on [Tribal Dragon](http://xtremeyamazaki.deviantart.com/art/Tribal-Dragon-27005087).
|
||||
|
||||
## License
|
||||
|
||||
This project is under Apache v2 License. See the [LICENSE](LICENSE) file for the full license text.
|
||||
478
Godeps/_workspace/src/github.com/Unknwon/macaron/context.go
generated
vendored
Normal file
478
Godeps/_workspace/src/github.com/Unknwon/macaron/context.go
generated
vendored
Normal file
@@ -0,0 +1,478 @@
|
||||
// Copyright 2014 Unknwon
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
package macaron
|
||||
|
||||
import (
|
||||
"crypto/md5"
|
||||
"encoding/hex"
|
||||
"html/template"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"mime/multipart"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"os"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"reflect"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/Unknwon/com"
|
||||
|
||||
"github.com/Unknwon/macaron/inject"
|
||||
)
|
||||
|
||||
// Locale reprents a localization interface.
|
||||
type Locale interface {
|
||||
Language() string
|
||||
Tr(string, ...interface{}) string
|
||||
}
|
||||
|
||||
// RequestBody represents a request body.
|
||||
type RequestBody struct {
|
||||
reader io.ReadCloser
|
||||
}
|
||||
|
||||
// Bytes reads and returns content of request body in bytes.
|
||||
func (rb *RequestBody) Bytes() ([]byte, error) {
|
||||
return ioutil.ReadAll(rb.reader)
|
||||
}
|
||||
|
||||
// String reads and returns content of request body in string.
|
||||
func (rb *RequestBody) String() (string, error) {
|
||||
data, err := rb.Bytes()
|
||||
return string(data), err
|
||||
}
|
||||
|
||||
// ReadCloser returns a ReadCloser for request body.
|
||||
func (rb *RequestBody) ReadCloser() io.ReadCloser {
|
||||
return rb.reader
|
||||
}
|
||||
|
||||
// Request represents an HTTP request received by a server or to be sent by a client.
|
||||
type Request struct {
|
||||
*http.Request
|
||||
}
|
||||
|
||||
func (r *Request) Body() *RequestBody {
|
||||
return &RequestBody{r.Request.Body}
|
||||
}
|
||||
|
||||
// Context represents the runtime context of current request of Macaron instance.
|
||||
// It is the integration of most frequently used middlewares and helper methods.
|
||||
type Context struct {
|
||||
inject.Injector
|
||||
handlers []Handler
|
||||
action Handler
|
||||
index int
|
||||
|
||||
*Router
|
||||
Req Request
|
||||
Resp ResponseWriter
|
||||
params Params
|
||||
Render // Not nil only if you use macaran.Render middleware.
|
||||
Locale
|
||||
Data map[string]interface{}
|
||||
}
|
||||
|
||||
func (c *Context) handler() Handler {
|
||||
if c.index < len(c.handlers) {
|
||||
return c.handlers[c.index]
|
||||
}
|
||||
if c.index == len(c.handlers) {
|
||||
return c.action
|
||||
}
|
||||
panic("invalid index for context handler")
|
||||
}
|
||||
|
||||
func (c *Context) Next() {
|
||||
c.index += 1
|
||||
c.run()
|
||||
}
|
||||
|
||||
func (c *Context) Written() bool {
|
||||
return c.Resp.Written()
|
||||
}
|
||||
|
||||
func (c *Context) run() {
|
||||
for c.index <= len(c.handlers) {
|
||||
vals, err := c.Invoke(c.handler())
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
c.index += 1
|
||||
|
||||
// if the handler returned something, write it to the http response
|
||||
if len(vals) > 0 {
|
||||
ev := c.GetVal(reflect.TypeOf(ReturnHandler(nil)))
|
||||
handleReturn := ev.Interface().(ReturnHandler)
|
||||
handleReturn(c, vals)
|
||||
}
|
||||
|
||||
if c.Written() {
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// RemoteAddr returns more real IP address.
|
||||
func (ctx *Context) RemoteAddr() string {
|
||||
addr := ctx.Req.Header.Get("X-Real-IP")
|
||||
if len(addr) == 0 {
|
||||
addr = ctx.Req.Header.Get("X-Forwarded-For")
|
||||
if addr == "" {
|
||||
addr = ctx.Req.RemoteAddr
|
||||
if i := strings.LastIndex(addr, ":"); i > -1 {
|
||||
addr = addr[:i]
|
||||
}
|
||||
}
|
||||
}
|
||||
return addr
|
||||
}
|
||||
|
||||
func (ctx *Context) renderHTML(status int, setName, tplName string, data ...interface{}) {
|
||||
if ctx.Render == nil {
|
||||
panic("renderer middleware hasn't been registered")
|
||||
}
|
||||
if len(data) <= 0 {
|
||||
ctx.Render.HTMLSet(status, setName, tplName, ctx.Data)
|
||||
} else if len(data) == 1 {
|
||||
ctx.Render.HTMLSet(status, setName, tplName, data[0])
|
||||
} else {
|
||||
ctx.Render.HTMLSet(status, setName, tplName, data[0], data[1].(HTMLOptions))
|
||||
}
|
||||
}
|
||||
|
||||
// HTML calls Render.HTML but allows less arguments.
|
||||
func (ctx *Context) HTML(status int, name string, data ...interface{}) {
|
||||
ctx.renderHTML(status, _DEFAULT_TPL_SET_NAME, name, data...)
|
||||
}
|
||||
|
||||
// HTML calls Render.HTMLSet but allows less arguments.
|
||||
func (ctx *Context) HTMLSet(status int, setName, tplName string, data ...interface{}) {
|
||||
ctx.renderHTML(status, setName, tplName, data...)
|
||||
}
|
||||
|
||||
func (ctx *Context) Redirect(location string, status ...int) {
|
||||
code := http.StatusFound
|
||||
if len(status) == 1 {
|
||||
code = status[0]
|
||||
}
|
||||
|
||||
http.Redirect(ctx.Resp, ctx.Req.Request, location, code)
|
||||
}
|
||||
|
||||
// Query querys form parameter.
|
||||
func (ctx *Context) Query(name string) string {
|
||||
if ctx.Req.Form == nil {
|
||||
ctx.Req.ParseForm()
|
||||
}
|
||||
return ctx.Req.Form.Get(name)
|
||||
}
|
||||
|
||||
// QueryTrim querys and trims spaces form parameter.
|
||||
func (ctx *Context) QueryTrim(name string) string {
|
||||
return strings.TrimSpace(ctx.Query(name))
|
||||
}
|
||||
|
||||
// QueryStrings returns a list of results by given query name.
|
||||
func (ctx *Context) QueryStrings(name string) []string {
|
||||
if ctx.Req.Form == nil {
|
||||
ctx.Req.ParseForm()
|
||||
}
|
||||
|
||||
vals, ok := ctx.Req.Form[name]
|
||||
if !ok {
|
||||
return []string{}
|
||||
}
|
||||
return vals
|
||||
}
|
||||
|
||||
// QueryEscape returns escapred query result.
|
||||
func (ctx *Context) QueryEscape(name string) string {
|
||||
return template.HTMLEscapeString(ctx.Query(name))
|
||||
}
|
||||
|
||||
// QueryInt returns query result in int type.
|
||||
func (ctx *Context) QueryInt(name string) int {
|
||||
return com.StrTo(ctx.Query(name)).MustInt()
|
||||
}
|
||||
|
||||
// QueryInt64 returns query result in int64 type.
|
||||
func (ctx *Context) QueryInt64(name string) int64 {
|
||||
return com.StrTo(ctx.Query(name)).MustInt64()
|
||||
}
|
||||
|
||||
// QueryFloat64 returns query result in float64 type.
|
||||
func (ctx *Context) QueryFloat64(name string) float64 {
|
||||
v, _ := strconv.ParseFloat(ctx.Query(name), 64)
|
||||
return v
|
||||
}
|
||||
|
||||
// Params returns value of given param name.
|
||||
// e.g. ctx.Params(":uid") or ctx.Params("uid")
|
||||
func (ctx *Context) Params(name string) string {
|
||||
if len(name) == 0 {
|
||||
return ""
|
||||
}
|
||||
if name[0] != '*' && name[0] != ':' {
|
||||
name = ":" + name
|
||||
}
|
||||
return ctx.params[name]
|
||||
}
|
||||
|
||||
// SetParams sets value of param with given name.
|
||||
func (ctx *Context) SetParams(name, val string) {
|
||||
if !strings.HasPrefix(name, ":") {
|
||||
name = ":" + name
|
||||
}
|
||||
ctx.params[name] = val
|
||||
}
|
||||
|
||||
// ParamsEscape returns escapred params result.
|
||||
// e.g. ctx.ParamsEscape(":uname")
|
||||
func (ctx *Context) ParamsEscape(name string) string {
|
||||
return template.HTMLEscapeString(ctx.Params(name))
|
||||
}
|
||||
|
||||
// ParamsInt returns params result in int type.
|
||||
// e.g. ctx.ParamsInt(":uid")
|
||||
func (ctx *Context) ParamsInt(name string) int {
|
||||
return com.StrTo(ctx.Params(name)).MustInt()
|
||||
}
|
||||
|
||||
// ParamsInt64 returns params result in int64 type.
|
||||
// e.g. ctx.ParamsInt64(":uid")
|
||||
func (ctx *Context) ParamsInt64(name string) int64 {
|
||||
return com.StrTo(ctx.Params(name)).MustInt64()
|
||||
}
|
||||
|
||||
// ParamsFloat64 returns params result in int64 type.
|
||||
// e.g. ctx.ParamsFloat64(":uid")
|
||||
func (ctx *Context) ParamsFloat64(name string) float64 {
|
||||
v, _ := strconv.ParseFloat(ctx.Params(name), 64)
|
||||
return v
|
||||
}
|
||||
|
||||
// GetFile returns information about user upload file by given form field name.
|
||||
func (ctx *Context) GetFile(name string) (multipart.File, *multipart.FileHeader, error) {
|
||||
return ctx.Req.FormFile(name)
|
||||
}
|
||||
|
||||
// SetCookie sets given cookie value to response header.
|
||||
// FIXME: IE support? http://golanghome.com/post/620#reply2
|
||||
func (ctx *Context) SetCookie(name string, value string, others ...interface{}) {
|
||||
cookie := http.Cookie{}
|
||||
cookie.Name = name
|
||||
cookie.Value = url.QueryEscape(value)
|
||||
|
||||
if len(others) > 0 {
|
||||
switch v := others[0].(type) {
|
||||
case int:
|
||||
cookie.MaxAge = v
|
||||
case int64:
|
||||
cookie.MaxAge = int(v)
|
||||
case int32:
|
||||
cookie.MaxAge = int(v)
|
||||
}
|
||||
}
|
||||
|
||||
cookie.Path = "/"
|
||||
if len(others) > 1 {
|
||||
if v, ok := others[1].(string); ok && len(v) > 0 {
|
||||
cookie.Path = v
|
||||
}
|
||||
}
|
||||
|
||||
if len(others) > 2 {
|
||||
if v, ok := others[2].(string); ok && len(v) > 0 {
|
||||
cookie.Domain = v
|
||||
}
|
||||
}
|
||||
|
||||
if len(others) > 3 {
|
||||
switch v := others[3].(type) {
|
||||
case bool:
|
||||
cookie.Secure = v
|
||||
default:
|
||||
if others[3] != nil {
|
||||
cookie.Secure = true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if len(others) > 4 {
|
||||
if v, ok := others[4].(bool); ok && v {
|
||||
cookie.HttpOnly = true
|
||||
}
|
||||
}
|
||||
|
||||
ctx.Resp.Header().Add("Set-Cookie", cookie.String())
|
||||
}
|
||||
|
||||
// GetCookie returns given cookie value from request header.
|
||||
func (ctx *Context) GetCookie(name string) string {
|
||||
cookie, err := ctx.Req.Cookie(name)
|
||||
if err != nil {
|
||||
return ""
|
||||
}
|
||||
val, _ := url.QueryUnescape(cookie.Value)
|
||||
return val
|
||||
}
|
||||
|
||||
// GetCookieInt returns cookie result in int type.
|
||||
func (ctx *Context) GetCookieInt(name string) int {
|
||||
return com.StrTo(ctx.GetCookie(name)).MustInt()
|
||||
}
|
||||
|
||||
// GetCookieInt64 returns cookie result in int64 type.
|
||||
func (ctx *Context) GetCookieInt64(name string) int64 {
|
||||
return com.StrTo(ctx.GetCookie(name)).MustInt64()
|
||||
}
|
||||
|
||||
// GetCookieFloat64 returns cookie result in float64 type.
|
||||
func (ctx *Context) GetCookieFloat64(name string) float64 {
|
||||
v, _ := strconv.ParseFloat(ctx.GetCookie(name), 64)
|
||||
return v
|
||||
}
|
||||
|
||||
var defaultCookieSecret string
|
||||
|
||||
// SetDefaultCookieSecret sets global default secure cookie secret.
|
||||
func (m *Macaron) SetDefaultCookieSecret(secret string) {
|
||||
defaultCookieSecret = secret
|
||||
}
|
||||
|
||||
// SetSecureCookie sets given cookie value to response header with default secret string.
|
||||
func (ctx *Context) SetSecureCookie(name, value string, others ...interface{}) {
|
||||
ctx.SetSuperSecureCookie(defaultCookieSecret, name, value, others...)
|
||||
}
|
||||
|
||||
// GetSecureCookie returns given cookie value from request header with default secret string.
|
||||
func (ctx *Context) GetSecureCookie(key string) (string, bool) {
|
||||
return ctx.GetSuperSecureCookie(defaultCookieSecret, key)
|
||||
}
|
||||
|
||||
// SetSuperSecureCookie sets given cookie value to response header with secret string.
|
||||
func (ctx *Context) SetSuperSecureCookie(secret, name, value string, others ...interface{}) {
|
||||
m := md5.Sum([]byte(secret))
|
||||
secret = hex.EncodeToString(m[:])
|
||||
text, err := com.AESEncrypt([]byte(secret), []byte(value))
|
||||
if err != nil {
|
||||
panic("error encrypting cookie: " + err.Error())
|
||||
}
|
||||
ctx.SetCookie(name, hex.EncodeToString(text), others...)
|
||||
}
|
||||
|
||||
// GetSuperSecureCookie returns given cookie value from request header with secret string.
|
||||
func (ctx *Context) GetSuperSecureCookie(secret, key string) (string, bool) {
|
||||
val := ctx.GetCookie(key)
|
||||
if val == "" {
|
||||
return "", false
|
||||
}
|
||||
|
||||
data, err := hex.DecodeString(val)
|
||||
if err != nil {
|
||||
return "", false
|
||||
}
|
||||
|
||||
m := md5.Sum([]byte(secret))
|
||||
secret = hex.EncodeToString(m[:])
|
||||
text, err := com.AESDecrypt([]byte(secret), data)
|
||||
return string(text), err == nil
|
||||
}
|
||||
|
||||
func (ctx *Context) setRawContentHeader() {
|
||||
ctx.Resp.Header().Set("Content-Description", "Raw content")
|
||||
ctx.Resp.Header().Set("Content-Type", "text/plain")
|
||||
ctx.Resp.Header().Set("Expires", "0")
|
||||
ctx.Resp.Header().Set("Cache-Control", "must-revalidate")
|
||||
ctx.Resp.Header().Set("Pragma", "public")
|
||||
}
|
||||
|
||||
// ServeContent serves given content to response.
|
||||
func (ctx *Context) ServeContent(name string, r io.ReadSeeker, params ...interface{}) {
|
||||
modtime := time.Now()
|
||||
for _, p := range params {
|
||||
switch v := p.(type) {
|
||||
case time.Time:
|
||||
modtime = v
|
||||
}
|
||||
}
|
||||
|
||||
ctx.setRawContentHeader()
|
||||
http.ServeContent(ctx.Resp, ctx.Req.Request, name, modtime, r)
|
||||
}
|
||||
|
||||
// ServeFileContent serves given file as content to response.
|
||||
func (ctx *Context) ServeFileContent(file string, names ...string) {
|
||||
var name string
|
||||
if len(names) > 0 {
|
||||
name = names[0]
|
||||
} else {
|
||||
name = path.Base(file)
|
||||
}
|
||||
|
||||
f, err := os.Open(file)
|
||||
if err != nil {
|
||||
if Env == PROD {
|
||||
http.Error(ctx.Resp, "Internal Server Error", 500)
|
||||
} else {
|
||||
http.Error(ctx.Resp, err.Error(), 500)
|
||||
}
|
||||
return
|
||||
}
|
||||
defer f.Close()
|
||||
|
||||
ctx.setRawContentHeader()
|
||||
http.ServeContent(ctx.Resp, ctx.Req.Request, name, time.Now(), f)
|
||||
}
|
||||
|
||||
// ServeFile serves given file to response.
|
||||
func (ctx *Context) ServeFile(file string, names ...string) {
|
||||
var name string
|
||||
if len(names) > 0 {
|
||||
name = names[0]
|
||||
} else {
|
||||
name = path.Base(file)
|
||||
}
|
||||
ctx.Resp.Header().Set("Content-Description", "File Transfer")
|
||||
ctx.Resp.Header().Set("Content-Type", "application/octet-stream")
|
||||
ctx.Resp.Header().Set("Content-Disposition", "attachment; filename="+name)
|
||||
ctx.Resp.Header().Set("Content-Transfer-Encoding", "binary")
|
||||
ctx.Resp.Header().Set("Expires", "0")
|
||||
ctx.Resp.Header().Set("Cache-Control", "must-revalidate")
|
||||
ctx.Resp.Header().Set("Pragma", "public")
|
||||
http.ServeFile(ctx.Resp, ctx.Req.Request, file)
|
||||
}
|
||||
|
||||
// ChangeStaticPath changes static path from old to new one.
|
||||
func (ctx *Context) ChangeStaticPath(oldPath, newPath string) {
|
||||
if !filepath.IsAbs(oldPath) {
|
||||
oldPath = filepath.Join(Root, oldPath)
|
||||
}
|
||||
dir := statics.Get(oldPath)
|
||||
if dir != nil {
|
||||
statics.Delete(oldPath)
|
||||
|
||||
if !filepath.IsAbs(newPath) {
|
||||
newPath = filepath.Join(Root, newPath)
|
||||
}
|
||||
*dir = http.Dir(newPath)
|
||||
statics.Set(dir)
|
||||
}
|
||||
}
|
||||
370
Godeps/_workspace/src/github.com/Unknwon/macaron/context_test.go
generated
vendored
Normal file
370
Godeps/_workspace/src/github.com/Unknwon/macaron/context_test.go
generated
vendored
Normal file
@@ -0,0 +1,370 @@
|
||||
// Copyright 2014 Unknwon
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
package macaron
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"net/url"
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/Unknwon/com"
|
||||
|
||||
. "github.com/smartystreets/goconvey/convey"
|
||||
)
|
||||
|
||||
func Test_Context(t *testing.T) {
|
||||
Convey("Do advanced encapsulation operations", t, func() {
|
||||
m := Classic()
|
||||
m.Use(Renderers(RenderOptions{
|
||||
Directory: "fixtures/basic",
|
||||
}, "fixtures/basic2"))
|
||||
|
||||
Convey("Get request body", func() {
|
||||
m.Get("/body1", func(ctx *Context) {
|
||||
data, err := ioutil.ReadAll(ctx.Req.Body().ReadCloser())
|
||||
So(err, ShouldBeNil)
|
||||
So(string(data), ShouldEqual, "This is my request body")
|
||||
})
|
||||
m.Get("/body2", func(ctx *Context) {
|
||||
data, err := ctx.Req.Body().Bytes()
|
||||
So(err, ShouldBeNil)
|
||||
So(string(data), ShouldEqual, "This is my request body")
|
||||
})
|
||||
m.Get("/body3", func(ctx *Context) {
|
||||
data, err := ctx.Req.Body().String()
|
||||
So(err, ShouldBeNil)
|
||||
So(data, ShouldEqual, "This is my request body")
|
||||
})
|
||||
|
||||
for i := 1; i <= 3; i++ {
|
||||
resp := httptest.NewRecorder()
|
||||
req, err := http.NewRequest("GET", "/body"+com.ToStr(i), nil)
|
||||
req.Body = ioutil.NopCloser(bytes.NewBufferString("This is my request body"))
|
||||
So(err, ShouldBeNil)
|
||||
m.ServeHTTP(resp, req)
|
||||
}
|
||||
})
|
||||
|
||||
Convey("Get remote IP address", func() {
|
||||
m.Get("/remoteaddr", func(ctx *Context) string {
|
||||
return ctx.RemoteAddr()
|
||||
})
|
||||
|
||||
resp := httptest.NewRecorder()
|
||||
req, err := http.NewRequest("GET", "/remoteaddr", nil)
|
||||
req.RemoteAddr = "127.0.0.1:3333"
|
||||
So(err, ShouldBeNil)
|
||||
m.ServeHTTP(resp, req)
|
||||
So(resp.Body.String(), ShouldEqual, "127.0.0.1")
|
||||
})
|
||||
|
||||
Convey("Render HTML", func() {
|
||||
|
||||
Convey("Normal HTML", func() {
|
||||
m.Get("/html", func(ctx *Context) {
|
||||
ctx.HTML(304, "hello", "Unknwon") // 304 for logger test.
|
||||
})
|
||||
|
||||
resp := httptest.NewRecorder()
|
||||
req, err := http.NewRequest("GET", "/html", nil)
|
||||
So(err, ShouldBeNil)
|
||||
m.ServeHTTP(resp, req)
|
||||
So(resp.Body.String(), ShouldEqual, "<h1>Hello Unknwon</h1>")
|
||||
})
|
||||
|
||||
Convey("HTML template set", func() {
|
||||
m.Get("/html2", func(ctx *Context) {
|
||||
ctx.Data["Name"] = "Unknwon"
|
||||
ctx.HTMLSet(200, "basic2", "hello2")
|
||||
})
|
||||
|
||||
resp := httptest.NewRecorder()
|
||||
req, err := http.NewRequest("GET", "/html2", nil)
|
||||
So(err, ShouldBeNil)
|
||||
m.ServeHTTP(resp, req)
|
||||
So(resp.Body.String(), ShouldEqual, "<h1>Hello Unknwon</h1>")
|
||||
})
|
||||
|
||||
Convey("With layout", func() {
|
||||
m.Get("/layout", func(ctx *Context) {
|
||||
ctx.HTML(200, "hello", "Unknwon", HTMLOptions{"layout"})
|
||||
})
|
||||
|
||||
resp := httptest.NewRecorder()
|
||||
req, err := http.NewRequest("GET", "/layout", nil)
|
||||
So(err, ShouldBeNil)
|
||||
m.ServeHTTP(resp, req)
|
||||
So(resp.Body.String(), ShouldEqual, "head<h1>Hello Unknwon</h1>foot")
|
||||
})
|
||||
})
|
||||
|
||||
Convey("Parse from and query", func() {
|
||||
m.Get("/query", func(ctx *Context) string {
|
||||
var buf bytes.Buffer
|
||||
buf.WriteString(ctx.QueryTrim("name") + " ")
|
||||
buf.WriteString(ctx.QueryEscape("name") + " ")
|
||||
buf.WriteString(com.ToStr(ctx.QueryInt("int")) + " ")
|
||||
buf.WriteString(com.ToStr(ctx.QueryInt64("int64")) + " ")
|
||||
buf.WriteString(com.ToStr(ctx.QueryFloat64("float64")) + " ")
|
||||
return buf.String()
|
||||
})
|
||||
m.Get("/query2", func(ctx *Context) string {
|
||||
var buf bytes.Buffer
|
||||
buf.WriteString(strings.Join(ctx.QueryStrings("list"), ",") + " ")
|
||||
buf.WriteString(strings.Join(ctx.QueryStrings("404"), ",") + " ")
|
||||
return buf.String()
|
||||
})
|
||||
|
||||
resp := httptest.NewRecorder()
|
||||
req, err := http.NewRequest("GET", "/query?name=Unknwon&int=12&int64=123&float64=1.25", nil)
|
||||
So(err, ShouldBeNil)
|
||||
m.ServeHTTP(resp, req)
|
||||
So(resp.Body.String(), ShouldEqual, "Unknwon Unknwon 12 123 1.25 ")
|
||||
|
||||
resp = httptest.NewRecorder()
|
||||
req, err = http.NewRequest("GET", "/query2?list=item1&list=item2", nil)
|
||||
So(err, ShouldBeNil)
|
||||
m.ServeHTTP(resp, req)
|
||||
So(resp.Body.String(), ShouldEqual, "item1,item2 ")
|
||||
})
|
||||
|
||||
Convey("URL parameter", func() {
|
||||
m.Get("/:name/:int/:int64/:float64", func(ctx *Context) string {
|
||||
var buf bytes.Buffer
|
||||
ctx.SetParams("name", ctx.Params("name"))
|
||||
buf.WriteString(ctx.Params(""))
|
||||
buf.WriteString(ctx.Params(":name") + " ")
|
||||
buf.WriteString(ctx.ParamsEscape(":name") + " ")
|
||||
buf.WriteString(com.ToStr(ctx.ParamsInt(":int")) + " ")
|
||||
buf.WriteString(com.ToStr(ctx.ParamsInt64(":int64")) + " ")
|
||||
buf.WriteString(com.ToStr(ctx.ParamsFloat64(":float64")) + " ")
|
||||
return buf.String()
|
||||
})
|
||||
|
||||
resp := httptest.NewRecorder()
|
||||
req, err := http.NewRequest("GET", "/user/1/13/1.24", nil)
|
||||
So(err, ShouldBeNil)
|
||||
m.ServeHTTP(resp, req)
|
||||
So(resp.Body.String(), ShouldEqual, "user user 1 13 1.24 ")
|
||||
})
|
||||
|
||||
Convey("Get file", func() {
|
||||
m.Get("/getfile", func(ctx *Context) {
|
||||
ctx.GetFile("hi")
|
||||
})
|
||||
|
||||
resp := httptest.NewRecorder()
|
||||
req, err := http.NewRequest("GET", "/getfile", nil)
|
||||
So(err, ShouldBeNil)
|
||||
m.ServeHTTP(resp, req)
|
||||
})
|
||||
|
||||
Convey("Set and get cookie", func() {
|
||||
m.Get("/set", func(ctx *Context) {
|
||||
ctx.SetCookie("user", "Unknwon", 1, "/", "localhost", true, true)
|
||||
ctx.SetCookie("user", "Unknwon", int32(1), "/", "localhost", 1)
|
||||
ctx.SetCookie("user", "Unknwon", int64(1))
|
||||
})
|
||||
|
||||
resp := httptest.NewRecorder()
|
||||
req, err := http.NewRequest("GET", "/set", nil)
|
||||
So(err, ShouldBeNil)
|
||||
m.ServeHTTP(resp, req)
|
||||
So(resp.Header().Get("Set-Cookie"), ShouldEqual, "user=Unknwon; Path=/; Domain=localhost; Max-Age=1; HttpOnly; Secure")
|
||||
|
||||
m.Get("/get", func(ctx *Context) string {
|
||||
ctx.GetCookie("404")
|
||||
So(ctx.GetCookieInt("uid"), ShouldEqual, 1)
|
||||
So(ctx.GetCookieInt64("uid"), ShouldEqual, 1)
|
||||
So(ctx.GetCookieFloat64("balance"), ShouldEqual, 1.25)
|
||||
return ctx.GetCookie("user")
|
||||
})
|
||||
|
||||
resp = httptest.NewRecorder()
|
||||
req, err = http.NewRequest("GET", "/get", nil)
|
||||
So(err, ShouldBeNil)
|
||||
req.Header.Set("Cookie", "user=Unknwon; uid=1; balance=1.25")
|
||||
m.ServeHTTP(resp, req)
|
||||
So(resp.Body.String(), ShouldEqual, "Unknwon")
|
||||
})
|
||||
|
||||
Convey("Set and get secure cookie", func() {
|
||||
m.SetDefaultCookieSecret("macaron")
|
||||
m.Get("/set", func(ctx *Context) {
|
||||
ctx.SetSecureCookie("user", "Unknwon", 1)
|
||||
})
|
||||
|
||||
resp := httptest.NewRecorder()
|
||||
req, err := http.NewRequest("GET", "/set", nil)
|
||||
So(err, ShouldBeNil)
|
||||
m.ServeHTTP(resp, req)
|
||||
|
||||
cookie := resp.Header().Get("Set-Cookie")
|
||||
|
||||
m.Get("/get", func(ctx *Context) string {
|
||||
name, ok := ctx.GetSecureCookie("user")
|
||||
So(ok, ShouldBeTrue)
|
||||
return name
|
||||
})
|
||||
|
||||
resp = httptest.NewRecorder()
|
||||
req, err = http.NewRequest("GET", "/get", nil)
|
||||
So(err, ShouldBeNil)
|
||||
req.Header.Set("Cookie", cookie)
|
||||
m.ServeHTTP(resp, req)
|
||||
So(resp.Body.String(), ShouldEqual, "Unknwon")
|
||||
})
|
||||
|
||||
Convey("Serve files", func() {
|
||||
m.Get("/file", func(ctx *Context) {
|
||||
ctx.ServeFile("fixtures/custom_funcs/index.tmpl")
|
||||
})
|
||||
|
||||
resp := httptest.NewRecorder()
|
||||
req, err := http.NewRequest("GET", "/file", nil)
|
||||
So(err, ShouldBeNil)
|
||||
m.ServeHTTP(resp, req)
|
||||
So(resp.Body.String(), ShouldEqual, "{{ myCustomFunc }}")
|
||||
|
||||
m.Get("/file2", func(ctx *Context) {
|
||||
ctx.ServeFile("fixtures/custom_funcs/index.tmpl", "ok.tmpl")
|
||||
})
|
||||
|
||||
resp = httptest.NewRecorder()
|
||||
req, err = http.NewRequest("GET", "/file2", nil)
|
||||
So(err, ShouldBeNil)
|
||||
m.ServeHTTP(resp, req)
|
||||
So(resp.Body.String(), ShouldEqual, "{{ myCustomFunc }}")
|
||||
})
|
||||
|
||||
Convey("Serve file content", func() {
|
||||
m.Get("/file", func(ctx *Context) {
|
||||
ctx.ServeFileContent("fixtures/custom_funcs/index.tmpl")
|
||||
})
|
||||
|
||||
resp := httptest.NewRecorder()
|
||||
req, err := http.NewRequest("GET", "/file", nil)
|
||||
So(err, ShouldBeNil)
|
||||
m.ServeHTTP(resp, req)
|
||||
So(resp.Body.String(), ShouldEqual, "{{ myCustomFunc }}")
|
||||
|
||||
m.Get("/file2", func(ctx *Context) {
|
||||
ctx.ServeFileContent("fixtures/custom_funcs/index.tmpl", "ok.tmpl")
|
||||
})
|
||||
|
||||
resp = httptest.NewRecorder()
|
||||
req, err = http.NewRequest("GET", "/file2", nil)
|
||||
So(err, ShouldBeNil)
|
||||
m.ServeHTTP(resp, req)
|
||||
So(resp.Body.String(), ShouldEqual, "{{ myCustomFunc }}")
|
||||
|
||||
m.Get("/file3", func(ctx *Context) {
|
||||
ctx.ServeFileContent("404.tmpl")
|
||||
})
|
||||
|
||||
resp = httptest.NewRecorder()
|
||||
req, err = http.NewRequest("GET", "/file3", nil)
|
||||
So(err, ShouldBeNil)
|
||||
m.ServeHTTP(resp, req)
|
||||
So(resp.Body.String(), ShouldEqual, "open 404.tmpl: no such file or directory\n")
|
||||
So(resp.Code, ShouldEqual, 500)
|
||||
})
|
||||
|
||||
Convey("Serve content", func() {
|
||||
m.Get("/content", func(ctx *Context) {
|
||||
ctx.ServeContent("content1", bytes.NewReader([]byte("Hello world!")))
|
||||
})
|
||||
|
||||
resp := httptest.NewRecorder()
|
||||
req, err := http.NewRequest("GET", "/content", nil)
|
||||
So(err, ShouldBeNil)
|
||||
m.ServeHTTP(resp, req)
|
||||
So(resp.Body.String(), ShouldEqual, "Hello world!")
|
||||
|
||||
m.Get("/content2", func(ctx *Context) {
|
||||
ctx.ServeContent("content1", bytes.NewReader([]byte("Hello world!")), time.Now())
|
||||
})
|
||||
|
||||
resp = httptest.NewRecorder()
|
||||
req, err = http.NewRequest("GET", "/content2", nil)
|
||||
So(err, ShouldBeNil)
|
||||
m.ServeHTTP(resp, req)
|
||||
So(resp.Body.String(), ShouldEqual, "Hello world!")
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
func Test_Context_Render(t *testing.T) {
|
||||
Convey("Invalid render", t, func() {
|
||||
defer func() {
|
||||
So(recover(), ShouldNotBeNil)
|
||||
}()
|
||||
|
||||
m := New()
|
||||
m.Get("/", func(ctx *Context) {
|
||||
ctx.HTML(200, "hey")
|
||||
})
|
||||
|
||||
resp := httptest.NewRecorder()
|
||||
req, err := http.NewRequest("GET", "/", nil)
|
||||
So(err, ShouldBeNil)
|
||||
m.ServeHTTP(resp, req)
|
||||
})
|
||||
}
|
||||
|
||||
func Test_Context_Redirect(t *testing.T) {
|
||||
Convey("Context with default redirect", t, func() {
|
||||
url, err := url.Parse("http://localhost/path/one")
|
||||
So(err, ShouldBeNil)
|
||||
resp := httptest.NewRecorder()
|
||||
req := http.Request{
|
||||
Method: "GET",
|
||||
URL: url,
|
||||
}
|
||||
ctx := &Context{
|
||||
Req: Request{&req},
|
||||
Resp: NewResponseWriter(resp),
|
||||
Data: make(map[string]interface{}),
|
||||
}
|
||||
ctx.Redirect("two")
|
||||
|
||||
So(resp.Code, ShouldEqual, http.StatusFound)
|
||||
So(resp.HeaderMap["Location"][0], ShouldEqual, "/path/two")
|
||||
})
|
||||
|
||||
Convey("Context with custom redirect", t, func() {
|
||||
url, err := url.Parse("http://localhost/path/one")
|
||||
So(err, ShouldBeNil)
|
||||
resp := httptest.NewRecorder()
|
||||
req := http.Request{
|
||||
Method: "GET",
|
||||
URL: url,
|
||||
}
|
||||
ctx := &Context{
|
||||
Req: Request{&req},
|
||||
Resp: NewResponseWriter(resp),
|
||||
Data: make(map[string]interface{}),
|
||||
}
|
||||
ctx.Redirect("two", 307)
|
||||
|
||||
So(resp.Code, ShouldEqual, http.StatusTemporaryRedirect)
|
||||
So(resp.HeaderMap["Location"][0], ShouldEqual, "/path/two")
|
||||
})
|
||||
}
|
||||
1
Godeps/_workspace/src/github.com/Unknwon/macaron/fixtures/basic/admin/index.tmpl
generated
vendored
Normal file
1
Godeps/_workspace/src/github.com/Unknwon/macaron/fixtures/basic/admin/index.tmpl
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
<h1>Admin {{.}}</h1>
|
||||
1
Godeps/_workspace/src/github.com/Unknwon/macaron/fixtures/basic/another_layout.tmpl
generated
vendored
Normal file
1
Godeps/_workspace/src/github.com/Unknwon/macaron/fixtures/basic/another_layout.tmpl
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
another head{{ yield }}another foot
|
||||
1
Godeps/_workspace/src/github.com/Unknwon/macaron/fixtures/basic/content.tmpl
generated
vendored
Normal file
1
Godeps/_workspace/src/github.com/Unknwon/macaron/fixtures/basic/content.tmpl
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
<h1>{{ . }}</h1>
|
||||
1
Godeps/_workspace/src/github.com/Unknwon/macaron/fixtures/basic/current_layout.tmpl
generated
vendored
Normal file
1
Godeps/_workspace/src/github.com/Unknwon/macaron/fixtures/basic/current_layout.tmpl
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{{ current }} head{{ yield }}{{ current }} foot
|
||||
1
Godeps/_workspace/src/github.com/Unknwon/macaron/fixtures/basic/delims.tmpl
generated
vendored
Normal file
1
Godeps/_workspace/src/github.com/Unknwon/macaron/fixtures/basic/delims.tmpl
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
<h1>Hello {[{.}]}</h1>
|
||||
1
Godeps/_workspace/src/github.com/Unknwon/macaron/fixtures/basic/hello.tmpl
generated
vendored
Normal file
1
Godeps/_workspace/src/github.com/Unknwon/macaron/fixtures/basic/hello.tmpl
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
<h1>Hello {{.}}</h1>
|
||||
1
Godeps/_workspace/src/github.com/Unknwon/macaron/fixtures/basic/hypertext.html
generated
vendored
Normal file
1
Godeps/_workspace/src/github.com/Unknwon/macaron/fixtures/basic/hypertext.html
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
Hypertext!
|
||||
1
Godeps/_workspace/src/github.com/Unknwon/macaron/fixtures/basic/layout.tmpl
generated
vendored
Normal file
1
Godeps/_workspace/src/github.com/Unknwon/macaron/fixtures/basic/layout.tmpl
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
head{{ yield }}foot
|
||||
1
Godeps/_workspace/src/github.com/Unknwon/macaron/fixtures/basic2/hello.tmpl
generated
vendored
Normal file
1
Godeps/_workspace/src/github.com/Unknwon/macaron/fixtures/basic2/hello.tmpl
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
<h1>What's up, {{.}}</h1>
|
||||
1
Godeps/_workspace/src/github.com/Unknwon/macaron/fixtures/basic2/hello2.tmpl
generated
vendored
Normal file
1
Godeps/_workspace/src/github.com/Unknwon/macaron/fixtures/basic2/hello2.tmpl
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
<h1>Hello {{.Name}}</h1>
|
||||
1
Godeps/_workspace/src/github.com/Unknwon/macaron/fixtures/custom_funcs/index.tmpl
generated
vendored
Normal file
1
Godeps/_workspace/src/github.com/Unknwon/macaron/fixtures/custom_funcs/index.tmpl
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{{ myCustomFunc }}
|
||||
81
Godeps/_workspace/src/github.com/Unknwon/macaron/gzip.go
generated
vendored
Normal file
81
Godeps/_workspace/src/github.com/Unknwon/macaron/gzip.go
generated
vendored
Normal file
@@ -0,0 +1,81 @@
|
||||
// Copyright 2013 Martini Authors
|
||||
// Copyright 2014 Unknwon
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
package macaron
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"compress/gzip"
|
||||
"fmt"
|
||||
"net"
|
||||
"net/http"
|
||||
"strings"
|
||||
)
|
||||
|
||||
const (
|
||||
HeaderAcceptEncoding = "Accept-Encoding"
|
||||
HeaderContentEncoding = "Content-Encoding"
|
||||
HeaderContentLength = "Content-Length"
|
||||
HeaderContentType = "Content-Type"
|
||||
HeaderVary = "Vary"
|
||||
)
|
||||
|
||||
// Gziper returns a Handler that adds gzip compression to all requests.
|
||||
// Make sure to include the Gzip middleware above other middleware
|
||||
// that alter the response body (like the render middleware).
|
||||
func Gziper() Handler {
|
||||
return func(ctx *Context) {
|
||||
if !strings.Contains(ctx.Req.Header.Get(HeaderAcceptEncoding), "gzip") {
|
||||
return
|
||||
}
|
||||
|
||||
headers := ctx.Resp.Header()
|
||||
headers.Set(HeaderContentEncoding, "gzip")
|
||||
headers.Set(HeaderVary, HeaderAcceptEncoding)
|
||||
|
||||
gz := gzip.NewWriter(ctx.Resp)
|
||||
defer gz.Close()
|
||||
|
||||
gzw := gzipResponseWriter{gz, ctx.Resp}
|
||||
ctx.Resp = gzw
|
||||
ctx.MapTo(gzw, (*http.ResponseWriter)(nil))
|
||||
|
||||
ctx.Next()
|
||||
|
||||
// delete content length after we know we have been written to
|
||||
gzw.Header().Del("Content-Length")
|
||||
}
|
||||
}
|
||||
|
||||
type gzipResponseWriter struct {
|
||||
w *gzip.Writer
|
||||
ResponseWriter
|
||||
}
|
||||
|
||||
func (grw gzipResponseWriter) Write(p []byte) (int, error) {
|
||||
if len(grw.Header().Get(HeaderContentType)) == 0 {
|
||||
grw.Header().Set(HeaderContentType, http.DetectContentType(p))
|
||||
}
|
||||
|
||||
return grw.w.Write(p)
|
||||
}
|
||||
|
||||
func (grw gzipResponseWriter) Hijack() (net.Conn, *bufio.ReadWriter, error) {
|
||||
hijacker, ok := grw.ResponseWriter.(http.Hijacker)
|
||||
if !ok {
|
||||
return nil, nil, fmt.Errorf("the ResponseWriter doesn't support the Hijacker interface")
|
||||
}
|
||||
return hijacker.Hijack()
|
||||
}
|
||||
65
Godeps/_workspace/src/github.com/Unknwon/macaron/gzip_test.go
generated
vendored
Normal file
65
Godeps/_workspace/src/github.com/Unknwon/macaron/gzip_test.go
generated
vendored
Normal file
@@ -0,0 +1,65 @@
|
||||
// Copyright 2013 Martini Authors
|
||||
// Copyright 2014 Unknwon
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
package macaron
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
. "github.com/smartystreets/goconvey/convey"
|
||||
)
|
||||
|
||||
func Test_Gzip(t *testing.T) {
|
||||
Convey("Gzip response content", t, func() {
|
||||
before := false
|
||||
|
||||
m := New()
|
||||
m.Use(Gziper())
|
||||
m.Use(func(r http.ResponseWriter) {
|
||||
r.(ResponseWriter).Before(func(rw ResponseWriter) {
|
||||
before = true
|
||||
})
|
||||
})
|
||||
m.Get("/", func() string { return "hello wolrd!" })
|
||||
|
||||
// Not yet gzip.
|
||||
resp := httptest.NewRecorder()
|
||||
req, err := http.NewRequest("GET", "/", nil)
|
||||
So(err, ShouldBeNil)
|
||||
m.ServeHTTP(resp, req)
|
||||
|
||||
_, ok := resp.HeaderMap[HeaderContentEncoding]
|
||||
So(ok, ShouldBeFalse)
|
||||
|
||||
ce := resp.Header().Get(HeaderContentEncoding)
|
||||
So(strings.EqualFold(ce, "gzip"), ShouldBeFalse)
|
||||
|
||||
// Gzip now.
|
||||
resp = httptest.NewRecorder()
|
||||
req.Header.Set(HeaderAcceptEncoding, "gzip")
|
||||
m.ServeHTTP(resp, req)
|
||||
|
||||
_, ok = resp.HeaderMap[HeaderContentEncoding]
|
||||
So(ok, ShouldBeTrue)
|
||||
|
||||
ce = resp.Header().Get(HeaderContentEncoding)
|
||||
So(strings.EqualFold(ce, "gzip"), ShouldBeTrue)
|
||||
|
||||
So(before, ShouldBeTrue)
|
||||
})
|
||||
}
|
||||
4
Godeps/_workspace/src/github.com/Unknwon/macaron/inject/README.md
generated
vendored
Normal file
4
Godeps/_workspace/src/github.com/Unknwon/macaron/inject/README.md
generated
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
inject
|
||||
======
|
||||
|
||||
Dependency injection for go
|
||||
187
Godeps/_workspace/src/github.com/Unknwon/macaron/inject/inject.go
generated
vendored
Normal file
187
Godeps/_workspace/src/github.com/Unknwon/macaron/inject/inject.go
generated
vendored
Normal file
@@ -0,0 +1,187 @@
|
||||
// Package inject provides utilities for mapping and injecting dependencies in various ways.
|
||||
package inject
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"reflect"
|
||||
)
|
||||
|
||||
// Injector represents an interface for mapping and injecting dependencies into structs
|
||||
// and function arguments.
|
||||
type Injector interface {
|
||||
Applicator
|
||||
Invoker
|
||||
TypeMapper
|
||||
// SetParent sets the parent of the injector. If the injector cannot find a
|
||||
// dependency in its Type map it will check its parent before returning an
|
||||
// error.
|
||||
SetParent(Injector)
|
||||
}
|
||||
|
||||
// Applicator represents an interface for mapping dependencies to a struct.
|
||||
type Applicator interface {
|
||||
// Maps dependencies in the Type map to each field in the struct
|
||||
// that is tagged with 'inject'. Returns an error if the injection
|
||||
// fails.
|
||||
Apply(interface{}) error
|
||||
}
|
||||
|
||||
// Invoker represents an interface for calling functions via reflection.
|
||||
type Invoker interface {
|
||||
// Invoke attempts to call the interface{} provided as a function,
|
||||
// providing dependencies for function arguments based on Type. Returns
|
||||
// a slice of reflect.Value representing the returned values of the function.
|
||||
// Returns an error if the injection fails.
|
||||
Invoke(interface{}) ([]reflect.Value, error)
|
||||
}
|
||||
|
||||
// TypeMapper represents an interface for mapping interface{} values based on type.
|
||||
type TypeMapper interface {
|
||||
// Maps the interface{} value based on its immediate type from reflect.TypeOf.
|
||||
Map(interface{}) TypeMapper
|
||||
// Maps the interface{} value based on the pointer of an Interface provided.
|
||||
// This is really only useful for mapping a value as an interface, as interfaces
|
||||
// cannot at this time be referenced directly without a pointer.
|
||||
MapTo(interface{}, interface{}) TypeMapper
|
||||
// Provides a possibility to directly insert a mapping based on type and value.
|
||||
// This makes it possible to directly map type arguments not possible to instantiate
|
||||
// with reflect like unidirectional channels.
|
||||
Set(reflect.Type, reflect.Value) TypeMapper
|
||||
// Returns the Value that is mapped to the current type. Returns a zeroed Value if
|
||||
// the Type has not been mapped.
|
||||
GetVal(reflect.Type) reflect.Value
|
||||
}
|
||||
|
||||
type injector struct {
|
||||
values map[reflect.Type]reflect.Value
|
||||
parent Injector
|
||||
}
|
||||
|
||||
// InterfaceOf dereferences a pointer to an Interface type.
|
||||
// It panics if value is not an pointer to an interface.
|
||||
func InterfaceOf(value interface{}) reflect.Type {
|
||||
t := reflect.TypeOf(value)
|
||||
|
||||
for t.Kind() == reflect.Ptr {
|
||||
t = t.Elem()
|
||||
}
|
||||
|
||||
if t.Kind() != reflect.Interface {
|
||||
panic("Called inject.InterfaceOf with a value that is not a pointer to an interface. (*MyInterface)(nil)")
|
||||
}
|
||||
|
||||
return t
|
||||
}
|
||||
|
||||
// New returns a new Injector.
|
||||
func New() Injector {
|
||||
return &injector{
|
||||
values: make(map[reflect.Type]reflect.Value),
|
||||
}
|
||||
}
|
||||
|
||||
// Invoke attempts to call the interface{} provided as a function,
|
||||
// providing dependencies for function arguments based on Type.
|
||||
// Returns a slice of reflect.Value representing the returned values of the function.
|
||||
// Returns an error if the injection fails.
|
||||
// It panics if f is not a function
|
||||
func (inj *injector) Invoke(f interface{}) ([]reflect.Value, error) {
|
||||
t := reflect.TypeOf(f)
|
||||
|
||||
var in = make([]reflect.Value, t.NumIn()) //Panic if t is not kind of Func
|
||||
for i := 0; i < t.NumIn(); i++ {
|
||||
argType := t.In(i)
|
||||
val := inj.GetVal(argType)
|
||||
if !val.IsValid() {
|
||||
return nil, fmt.Errorf("Value not found for type %v", argType)
|
||||
}
|
||||
|
||||
in[i] = val
|
||||
}
|
||||
|
||||
return reflect.ValueOf(f).Call(in), nil
|
||||
}
|
||||
|
||||
// Maps dependencies in the Type map to each field in the struct
|
||||
// that is tagged with 'inject'.
|
||||
// Returns an error if the injection fails.
|
||||
func (inj *injector) Apply(val interface{}) error {
|
||||
v := reflect.ValueOf(val)
|
||||
|
||||
for v.Kind() == reflect.Ptr {
|
||||
v = v.Elem()
|
||||
}
|
||||
|
||||
if v.Kind() != reflect.Struct {
|
||||
return nil // Should not panic here ?
|
||||
}
|
||||
|
||||
t := v.Type()
|
||||
|
||||
for i := 0; i < v.NumField(); i++ {
|
||||
f := v.Field(i)
|
||||
structField := t.Field(i)
|
||||
if f.CanSet() && (structField.Tag == "inject" || structField.Tag.Get("inject") != "") {
|
||||
ft := f.Type()
|
||||
v := inj.GetVal(ft)
|
||||
if !v.IsValid() {
|
||||
return fmt.Errorf("Value not found for type %v", ft)
|
||||
}
|
||||
|
||||
f.Set(v)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// Maps the concrete value of val to its dynamic type using reflect.TypeOf,
|
||||
// It returns the TypeMapper registered in.
|
||||
func (i *injector) Map(val interface{}) TypeMapper {
|
||||
i.values[reflect.TypeOf(val)] = reflect.ValueOf(val)
|
||||
return i
|
||||
}
|
||||
|
||||
func (i *injector) MapTo(val interface{}, ifacePtr interface{}) TypeMapper {
|
||||
i.values[InterfaceOf(ifacePtr)] = reflect.ValueOf(val)
|
||||
return i
|
||||
}
|
||||
|
||||
// Maps the given reflect.Type to the given reflect.Value and returns
|
||||
// the Typemapper the mapping has been registered in.
|
||||
func (i *injector) Set(typ reflect.Type, val reflect.Value) TypeMapper {
|
||||
i.values[typ] = val
|
||||
return i
|
||||
}
|
||||
|
||||
func (i *injector) GetVal(t reflect.Type) reflect.Value {
|
||||
val := i.values[t]
|
||||
|
||||
if val.IsValid() {
|
||||
return val
|
||||
}
|
||||
|
||||
// no concrete types found, try to find implementors
|
||||
// if t is an interface
|
||||
if t.Kind() == reflect.Interface {
|
||||
for k, v := range i.values {
|
||||
if k.Implements(t) {
|
||||
val = v
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Still no type found, try to look it up on the parent
|
||||
if !val.IsValid() && i.parent != nil {
|
||||
val = i.parent.GetVal(t)
|
||||
}
|
||||
|
||||
return val
|
||||
|
||||
}
|
||||
|
||||
func (i *injector) SetParent(parent Injector) {
|
||||
i.parent = parent
|
||||
}
|
||||
1
Godeps/_workspace/src/github.com/Unknwon/macaron/inject/inject.goconvey
generated
vendored
Normal file
1
Godeps/_workspace/src/github.com/Unknwon/macaron/inject/inject.goconvey
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
ignore
|
||||
174
Godeps/_workspace/src/github.com/Unknwon/macaron/inject/inject_test.go
generated
vendored
Normal file
174
Godeps/_workspace/src/github.com/Unknwon/macaron/inject/inject_test.go
generated
vendored
Normal file
@@ -0,0 +1,174 @@
|
||||
// Copyright 2013 Martini Authors
|
||||
// Copyright 2014 Unknwon
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
package inject_test
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"reflect"
|
||||
"testing"
|
||||
|
||||
"github.com/Unknwon/macaron/inject"
|
||||
)
|
||||
|
||||
type SpecialString interface {
|
||||
}
|
||||
|
||||
type TestStruct struct {
|
||||
Dep1 string `inject:"t" json:"-"`
|
||||
Dep2 SpecialString `inject`
|
||||
Dep3 string
|
||||
}
|
||||
|
||||
type Greeter struct {
|
||||
Name string
|
||||
}
|
||||
|
||||
func (g *Greeter) String() string {
|
||||
return "Hello, My name is" + g.Name
|
||||
}
|
||||
|
||||
/* Test Helpers */
|
||||
func expect(t *testing.T, a interface{}, b interface{}) {
|
||||
if a != b {
|
||||
t.Errorf("Expected %v (type %v) - Got %v (type %v)", b, reflect.TypeOf(b), a, reflect.TypeOf(a))
|
||||
}
|
||||
}
|
||||
|
||||
func refute(t *testing.T, a interface{}, b interface{}) {
|
||||
if a == b {
|
||||
t.Errorf("Did not expect %v (type %v) - Got %v (type %v)", b, reflect.TypeOf(b), a, reflect.TypeOf(a))
|
||||
}
|
||||
}
|
||||
|
||||
func Test_InjectorInvoke(t *testing.T) {
|
||||
injector := inject.New()
|
||||
expect(t, injector == nil, false)
|
||||
|
||||
dep := "some dependency"
|
||||
injector.Map(dep)
|
||||
dep2 := "another dep"
|
||||
injector.MapTo(dep2, (*SpecialString)(nil))
|
||||
dep3 := make(chan *SpecialString)
|
||||
dep4 := make(chan *SpecialString)
|
||||
typRecv := reflect.ChanOf(reflect.RecvDir, reflect.TypeOf(dep3).Elem())
|
||||
typSend := reflect.ChanOf(reflect.SendDir, reflect.TypeOf(dep4).Elem())
|
||||
injector.Set(typRecv, reflect.ValueOf(dep3))
|
||||
injector.Set(typSend, reflect.ValueOf(dep4))
|
||||
|
||||
_, err := injector.Invoke(func(d1 string, d2 SpecialString, d3 <-chan *SpecialString, d4 chan<- *SpecialString) {
|
||||
expect(t, d1, dep)
|
||||
expect(t, d2, dep2)
|
||||
expect(t, reflect.TypeOf(d3).Elem(), reflect.TypeOf(dep3).Elem())
|
||||
expect(t, reflect.TypeOf(d4).Elem(), reflect.TypeOf(dep4).Elem())
|
||||
expect(t, reflect.TypeOf(d3).ChanDir(), reflect.RecvDir)
|
||||
expect(t, reflect.TypeOf(d4).ChanDir(), reflect.SendDir)
|
||||
})
|
||||
|
||||
expect(t, err, nil)
|
||||
}
|
||||
|
||||
func Test_InjectorInvokeReturnValues(t *testing.T) {
|
||||
injector := inject.New()
|
||||
expect(t, injector == nil, false)
|
||||
|
||||
dep := "some dependency"
|
||||
injector.Map(dep)
|
||||
dep2 := "another dep"
|
||||
injector.MapTo(dep2, (*SpecialString)(nil))
|
||||
|
||||
result, err := injector.Invoke(func(d1 string, d2 SpecialString) string {
|
||||
expect(t, d1, dep)
|
||||
expect(t, d2, dep2)
|
||||
return "Hello world"
|
||||
})
|
||||
|
||||
expect(t, result[0].String(), "Hello world")
|
||||
expect(t, err, nil)
|
||||
}
|
||||
|
||||
func Test_InjectorApply(t *testing.T) {
|
||||
injector := inject.New()
|
||||
|
||||
injector.Map("a dep").MapTo("another dep", (*SpecialString)(nil))
|
||||
|
||||
s := TestStruct{}
|
||||
err := injector.Apply(&s)
|
||||
expect(t, err, nil)
|
||||
|
||||
expect(t, s.Dep1, "a dep")
|
||||
expect(t, s.Dep2, "another dep")
|
||||
}
|
||||
|
||||
func Test_InterfaceOf(t *testing.T) {
|
||||
iType := inject.InterfaceOf((*SpecialString)(nil))
|
||||
expect(t, iType.Kind(), reflect.Interface)
|
||||
|
||||
iType = inject.InterfaceOf((**SpecialString)(nil))
|
||||
expect(t, iType.Kind(), reflect.Interface)
|
||||
|
||||
// Expecting nil
|
||||
defer func() {
|
||||
rec := recover()
|
||||
refute(t, rec, nil)
|
||||
}()
|
||||
iType = inject.InterfaceOf((*testing.T)(nil))
|
||||
}
|
||||
|
||||
func Test_InjectorSet(t *testing.T) {
|
||||
injector := inject.New()
|
||||
typ := reflect.TypeOf("string")
|
||||
typSend := reflect.ChanOf(reflect.SendDir, typ)
|
||||
typRecv := reflect.ChanOf(reflect.RecvDir, typ)
|
||||
|
||||
// instantiating unidirectional channels is not possible using reflect
|
||||
// http://golang.org/src/pkg/reflect/value.go?s=60463:60504#L2064
|
||||
chanRecv := reflect.MakeChan(reflect.ChanOf(reflect.BothDir, typ), 0)
|
||||
chanSend := reflect.MakeChan(reflect.ChanOf(reflect.BothDir, typ), 0)
|
||||
|
||||
injector.Set(typSend, chanSend)
|
||||
injector.Set(typRecv, chanRecv)
|
||||
|
||||
expect(t, injector.GetVal(typSend).IsValid(), true)
|
||||
expect(t, injector.GetVal(typRecv).IsValid(), true)
|
||||
expect(t, injector.GetVal(chanSend.Type()).IsValid(), false)
|
||||
}
|
||||
|
||||
func Test_InjectorGet(t *testing.T) {
|
||||
injector := inject.New()
|
||||
|
||||
injector.Map("some dependency")
|
||||
|
||||
expect(t, injector.GetVal(reflect.TypeOf("string")).IsValid(), true)
|
||||
expect(t, injector.GetVal(reflect.TypeOf(11)).IsValid(), false)
|
||||
}
|
||||
|
||||
func Test_InjectorSetParent(t *testing.T) {
|
||||
injector := inject.New()
|
||||
injector.MapTo("another dep", (*SpecialString)(nil))
|
||||
|
||||
injector2 := inject.New()
|
||||
injector2.SetParent(injector)
|
||||
|
||||
expect(t, injector2.GetVal(inject.InterfaceOf((*SpecialString)(nil))).IsValid(), true)
|
||||
}
|
||||
|
||||
func TestInjectImplementors(t *testing.T) {
|
||||
injector := inject.New()
|
||||
g := &Greeter{"Jeremy"}
|
||||
injector.Map(g)
|
||||
|
||||
expect(t, injector.GetVal(inject.InterfaceOf((*fmt.Stringer)(nil))).IsValid(), true)
|
||||
}
|
||||
61
Godeps/_workspace/src/github.com/Unknwon/macaron/logger.go
generated
vendored
Normal file
61
Godeps/_workspace/src/github.com/Unknwon/macaron/logger.go
generated
vendored
Normal file
@@ -0,0 +1,61 @@
|
||||
// Copyright 2013 Martini Authors
|
||||
// Copyright 2014 Unknwon
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
package macaron
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
"net/http"
|
||||
"runtime"
|
||||
"time"
|
||||
)
|
||||
|
||||
var ColorLog = true
|
||||
|
||||
func init() {
|
||||
ColorLog = runtime.GOOS != "windows"
|
||||
}
|
||||
|
||||
// Logger returns a middleware handler that logs the request as it goes in and the response as it goes out.
|
||||
func Logger() Handler {
|
||||
return func(ctx *Context, log *log.Logger) {
|
||||
start := time.Now()
|
||||
|
||||
log.Printf("Started %s %s for %s", ctx.Req.Method, ctx.Req.RequestURI, ctx.RemoteAddr())
|
||||
|
||||
rw := ctx.Resp.(ResponseWriter)
|
||||
ctx.Next()
|
||||
|
||||
content := fmt.Sprintf("Completed %s %v %s in %v", ctx.Req.RequestURI, rw.Status(), http.StatusText(rw.Status()), time.Since(start))
|
||||
if ColorLog {
|
||||
switch rw.Status() {
|
||||
case 200, 201, 202:
|
||||
content = fmt.Sprintf("\033[1;32m%s\033[0m", content)
|
||||
case 301, 302:
|
||||
content = fmt.Sprintf("\033[1;37m%s\033[0m", content)
|
||||
case 304:
|
||||
content = fmt.Sprintf("\033[1;33m%s\033[0m", content)
|
||||
case 401, 403:
|
||||
content = fmt.Sprintf("\033[4;31m%s\033[0m", content)
|
||||
case 404:
|
||||
content = fmt.Sprintf("\033[1;31m%s\033[0m", content)
|
||||
case 500:
|
||||
content = fmt.Sprintf("\033[1;36m%s\033[0m", content)
|
||||
}
|
||||
}
|
||||
log.Println(content)
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user